VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103005

最後變更 在這個檔案從103005是 102977,由 vboxsync 提交於 14 月 前

VMM/IEM: Implemented generic fallback for misaligned x86 locking that is not compatible with the host. Using the existing split-lock solution with VINF_EM_EMULATE_SPLIT_LOCK from bugref:10052. We keep ignoring the 'lock' prefix in the recompiler for single CPU VMs (now also on amd64 hosts). bugref:10547

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 534.5 KB
 
1/* $Id: IEMAllInstOneByte.cpp.h 102977 2024-01-19 23:11:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.alldomusa.eu.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
75 \
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
77 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
78 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
79 IEM_MC_REF_EFLAGS(pEFlags); \
80 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
81 \
82 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
83 IEM_MC_END(); \
84 } \
85 else \
86 { \
87 /* \
88 * We're accessing memory. \
89 * Note! We're putting the eflags on the stack here so we can commit them \
90 * after the memory. \
91 */ \
92 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
93 { \
94 IEM_MC_BEGIN(3, 3, 0, 0); \
95 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
96 IEM_MC_ARG(uint8_t, u8Src, 1); \
97 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
98 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
99 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
100 \
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
102 IEMOP_HLP_DONE_DECODING(); \
103 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
104 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
105 IEM_MC_FETCH_EFLAGS(EFlags); \
106 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
107 \
108 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
109 IEM_MC_COMMIT_EFLAGS(EFlags); \
110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
111 IEM_MC_END(); \
112 } \
113 else \
114 { \
115 IEM_MC_BEGIN(3, 3, 0, 0); \
116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
117 IEM_MC_ARG(uint8_t, u8Src, 1); \
118 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
120 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
121 \
122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
123 IEMOP_HLP_DONE_DECODING(); \
124 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
125 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
126 IEM_MC_FETCH_EFLAGS(EFlags); \
127 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
128 \
129 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
130 IEM_MC_COMMIT_EFLAGS(EFlags); \
131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
132 IEM_MC_END(); \
133 } \
134 } \
135 (void)0
136
137/**
138 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
139 * operands.
140 */
141#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
143 \
144 /* \
145 * If rm is denoting a register, no more instruction bytes. \
146 */ \
147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
148 { \
149 IEM_MC_BEGIN(3, 0, 0, 0); \
150 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
151 IEM_MC_ARG(uint8_t, u8Src, 1); \
152 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
153 \
154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
156 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
157 IEM_MC_REF_EFLAGS(pEFlags); \
158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
159 \
160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
161 IEM_MC_END(); \
162 } \
163 else \
164 { \
165 /* \
166 * We're accessing memory. \
167 * Note! We're putting the eflags on the stack here so we can commit them \
168 * after the memory. \
169 */ \
170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
171 { \
172 IEM_MC_BEGIN(3, 3, 0, 0); \
173 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
174 IEM_MC_ARG(uint8_t, u8Src, 1); \
175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
178 \
179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
182 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
183 IEM_MC_FETCH_EFLAGS(EFlags); \
184 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
185 \
186 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
187 IEM_MC_COMMIT_EFLAGS(EFlags); \
188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
189 IEM_MC_END(); \
190 } \
191 else \
192 { \
193 IEMOP_HLP_DONE_DECODING(); \
194 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
195 } \
196 } \
197 (void)0
198
199/**
200 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
201 * destination.
202 */
203#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
205 \
206 /* \
207 * If rm is denoting a register, no more instruction bytes. \
208 */ \
209 if (IEM_IS_MODRM_REG_MODE(bRm)) \
210 { \
211 IEM_MC_BEGIN(3, 0, 0, 0); \
212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
213 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
214 IEM_MC_ARG(uint8_t, u8Src, 1); \
215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
216 \
217 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
218 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_EFLAGS(pEFlags); \
220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
221 \
222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
223 IEM_MC_END(); \
224 } \
225 else \
226 { \
227 /* \
228 * We're accessing memory. \
229 */ \
230 IEM_MC_BEGIN(3, 1, 0, 0); \
231 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
232 IEM_MC_ARG(uint8_t, u8Src, 1); \
233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
235 \
236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
238 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
239 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
240 IEM_MC_REF_EFLAGS(pEFlags); \
241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
242 \
243 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
244 IEM_MC_END(); \
245 } \
246 (void)0
247
248
249/**
250 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
251 * memory/register as the destination.
252 */
253#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
255 \
256 /* \
257 * If rm is denoting a register, no more instruction bytes. \
258 */ \
259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
260 { \
261 switch (pVCpu->iem.s.enmEffOpSize) \
262 { \
263 case IEMMODE_16BIT: \
264 IEM_MC_BEGIN(3, 0, 0, 0); \
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
266 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
267 IEM_MC_ARG(uint16_t, u16Src, 1); \
268 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
269 \
270 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
272 IEM_MC_REF_EFLAGS(pEFlags); \
273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
274 \
275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
276 IEM_MC_END(); \
277 break; \
278 \
279 case IEMMODE_32BIT: \
280 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
282 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
283 IEM_MC_ARG(uint32_t, u32Src, 1); \
284 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
285 \
286 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
288 IEM_MC_REF_EFLAGS(pEFlags); \
289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
290 \
291 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
293 IEM_MC_END(); \
294 break; \
295 \
296 case IEMMODE_64BIT: \
297 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
300 IEM_MC_ARG(uint64_t, u64Src, 1); \
301 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
302 \
303 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
305 IEM_MC_REF_EFLAGS(pEFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
307 \
308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
309 IEM_MC_END(); \
310 break; \
311 \
312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
313 } \
314 } \
315 else \
316 { \
317 /* \
318 * We're accessing memory. \
319 * Note! We're putting the eflags on the stack here so we can commit them \
320 * after the memory. \
321 */ \
322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
323 { \
324 switch (pVCpu->iem.s.enmEffOpSize) \
325 { \
326 case IEMMODE_16BIT: \
327 IEM_MC_BEGIN(3, 3, 0, 0); \
328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
329 IEM_MC_ARG(uint16_t, u16Src, 1); \
330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
332 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
333 \
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
335 IEMOP_HLP_DONE_DECODING(); \
336 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
338 IEM_MC_FETCH_EFLAGS(EFlags); \
339 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
340 \
341 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
342 IEM_MC_COMMIT_EFLAGS(EFlags); \
343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
344 IEM_MC_END(); \
345 break; \
346 \
347 case IEMMODE_32BIT: \
348 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
350 IEM_MC_ARG(uint32_t, u32Src, 1); \
351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
354 \
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
356 IEMOP_HLP_DONE_DECODING(); \
357 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
359 IEM_MC_FETCH_EFLAGS(EFlags); \
360 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
361 \
362 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
363 IEM_MC_COMMIT_EFLAGS(EFlags); \
364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
365 IEM_MC_END(); \
366 break; \
367 \
368 case IEMMODE_64BIT: \
369 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
370 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
371 IEM_MC_ARG(uint64_t, u64Src, 1); \
372 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
374 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
375 \
376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
377 IEMOP_HLP_DONE_DECODING(); \
378 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
380 IEM_MC_FETCH_EFLAGS(EFlags); \
381 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
382 \
383 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
384 IEM_MC_COMMIT_EFLAGS(EFlags); \
385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
386 IEM_MC_END(); \
387 break; \
388 \
389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
390 } \
391 } \
392 else \
393 { \
394 (void)0
395/* Separate macro to work around parsing issue in IEMAllInstPython.py */
396#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
397 switch (pVCpu->iem.s.enmEffOpSize) \
398 { \
399 case IEMMODE_16BIT: \
400 IEM_MC_BEGIN(3, 3, 0, 0); \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_ARG(uint16_t, u16Src, 1); \
403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
406 \
407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
408 IEMOP_HLP_DONE_DECODING(); \
409 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
411 IEM_MC_FETCH_EFLAGS(EFlags); \
412 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
413 \
414 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
415 IEM_MC_COMMIT_EFLAGS(EFlags); \
416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
417 IEM_MC_END(); \
418 break; \
419 \
420 case IEMMODE_32BIT: \
421 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
423 IEM_MC_ARG(uint32_t, u32Src, 1); \
424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
427 \
428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
429 IEMOP_HLP_DONE_DECODING(); \
430 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
432 IEM_MC_FETCH_EFLAGS(EFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
434 \
435 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
436 IEM_MC_COMMIT_EFLAGS(EFlags); \
437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
438 IEM_MC_END(); \
439 break; \
440 \
441 case IEMMODE_64BIT: \
442 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
444 IEM_MC_ARG(uint64_t, u64Src, 1); \
445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
448 \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
453 IEM_MC_FETCH_EFLAGS(EFlags); \
454 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
455 \
456 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
457 IEM_MC_COMMIT_EFLAGS(EFlags); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 } \
466 (void)0
467
468/**
469 * Body for read-only word/dword/qword instructions like TEST and CMP with
470 * memory/register as the destination.
471 */
472#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
474 \
475 /* \
476 * If rm is denoting a register, no more instruction bytes. \
477 */ \
478 if (IEM_IS_MODRM_REG_MODE(bRm)) \
479 { \
480 switch (pVCpu->iem.s.enmEffOpSize) \
481 { \
482 case IEMMODE_16BIT: \
483 IEM_MC_BEGIN(3, 0, 0, 0); \
484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
485 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
486 IEM_MC_ARG(uint16_t, u16Src, 1); \
487 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
488 \
489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
491 IEM_MC_REF_EFLAGS(pEFlags); \
492 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
493 \
494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
495 IEM_MC_END(); \
496 break; \
497 \
498 case IEMMODE_32BIT: \
499 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
501 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
502 IEM_MC_ARG(uint32_t, u32Src, 1); \
503 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
504 \
505 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
507 IEM_MC_REF_EFLAGS(pEFlags); \
508 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
509 \
510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
511 IEM_MC_END(); \
512 break; \
513 \
514 case IEMMODE_64BIT: \
515 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
517 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
518 IEM_MC_ARG(uint64_t, u64Src, 1); \
519 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
520 \
521 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
522 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
523 IEM_MC_REF_EFLAGS(pEFlags); \
524 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
525 \
526 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
527 IEM_MC_END(); \
528 break; \
529 \
530 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
531 } \
532 } \
533 else \
534 { \
535 /* \
536 * We're accessing memory. \
537 * Note! We're putting the eflags on the stack here so we can commit them \
538 * after the memory. \
539 */ \
540 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
541 { \
542 switch (pVCpu->iem.s.enmEffOpSize) \
543 { \
544 case IEMMODE_16BIT: \
545 IEM_MC_BEGIN(3, 3, 0, 0); \
546 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
547 IEM_MC_ARG(uint16_t, u16Src, 1); \
548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
551 \
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
553 IEMOP_HLP_DONE_DECODING(); \
554 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
556 IEM_MC_FETCH_EFLAGS(EFlags); \
557 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
558 \
559 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
560 IEM_MC_COMMIT_EFLAGS(EFlags); \
561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
562 IEM_MC_END(); \
563 break; \
564 \
565 case IEMMODE_32BIT: \
566 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
567 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
568 IEM_MC_ARG(uint32_t, u32Src, 1); \
569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
572 \
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
574 IEMOP_HLP_DONE_DECODING(); \
575 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
576 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
577 IEM_MC_FETCH_EFLAGS(EFlags); \
578 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
579 \
580 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
581 IEM_MC_COMMIT_EFLAGS(EFlags); \
582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
583 IEM_MC_END(); \
584 break; \
585 \
586 case IEMMODE_64BIT: \
587 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
588 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
589 IEM_MC_ARG(uint64_t, u64Src, 1); \
590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
593 \
594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
595 IEMOP_HLP_DONE_DECODING(); \
596 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
598 IEM_MC_FETCH_EFLAGS(EFlags); \
599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
600 \
601 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
602 IEM_MC_COMMIT_EFLAGS(EFlags); \
603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
604 IEM_MC_END(); \
605 break; \
606 \
607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
608 } \
609 } \
610 else \
611 { \
612 IEMOP_HLP_DONE_DECODING(); \
613 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
614 } \
615 } \
616 (void)0
617
618
619/**
620 * Body for instructions like ADD, AND, OR, ++ with working on AL with
621 * a byte immediate.
622 */
623#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
625 \
626 IEM_MC_BEGIN(3, 0, 0, 0); \
627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
628 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
629 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
630 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
631 \
632 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
633 IEM_MC_REF_EFLAGS(pEFlags); \
634 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
635 \
636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
637 IEM_MC_END()
638
639/**
640 * Body for instructions like ADD, AND, OR, ++ with working on
641 * AX/EAX/RAX with a word/dword immediate.
642 */
643#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
644 switch (pVCpu->iem.s.enmEffOpSize) \
645 { \
646 case IEMMODE_16BIT: \
647 { \
648 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
649 \
650 IEM_MC_BEGIN(3, 0, 0, 0); \
651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
652 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
653 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
654 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
655 \
656 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
657 IEM_MC_REF_EFLAGS(pEFlags); \
658 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
659 \
660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
661 IEM_MC_END(); \
662 } \
663 \
664 case IEMMODE_32BIT: \
665 { \
666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
667 \
668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
670 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
671 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
672 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
673 \
674 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
675 IEM_MC_REF_EFLAGS(pEFlags); \
676 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
677 \
678 if (a_fModifiesDstReg) \
679 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
681 IEM_MC_END(); \
682 } \
683 \
684 case IEMMODE_64BIT: \
685 { \
686 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
687 \
688 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
691 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
692 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
693 \
694 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
697 \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 } \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 (void)0
705
706
707
708/* Instruction specification format - work in progress: */
709
710/**
711 * @opcode 0x00
712 * @opmnemonic add
713 * @op1 rm:Eb
714 * @op2 reg:Gb
715 * @opmaps one
716 * @openc ModR/M
717 * @opflmodify cf,pf,af,zf,sf,of
718 * @ophints harmless ignores_op_sizes
719 * @opstats add_Eb_Gb
720 * @opgroup og_gen_arith_bin
721 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
722 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
723 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
724 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
725 */
726FNIEMOP_DEF(iemOp_add_Eb_Gb)
727{
728 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
729 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked);
730}
731
732
733/**
734 * @opcode 0x01
735 * @opgroup og_gen_arith_bin
736 * @opflmodify cf,pf,af,zf,sf,of
737 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
738 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
739 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
740 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
741 */
742FNIEMOP_DEF(iemOp_add_Ev_Gv)
743{
744 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
745 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
746 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
747}
748
749
750/**
751 * @opcode 0x02
752 * @opgroup og_gen_arith_bin
753 * @opflmodify cf,pf,af,zf,sf,of
754 * @opcopytests iemOp_add_Eb_Gb
755 */
756FNIEMOP_DEF(iemOp_add_Gb_Eb)
757{
758 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
759 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
760}
761
762
763/**
764 * @opcode 0x03
765 * @opgroup og_gen_arith_bin
766 * @opflmodify cf,pf,af,zf,sf,of
767 * @opcopytests iemOp_add_Ev_Gv
768 */
769FNIEMOP_DEF(iemOp_add_Gv_Ev)
770{
771 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
772 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
773}
774
775
776/**
777 * @opcode 0x04
778 * @opgroup og_gen_arith_bin
779 * @opflmodify cf,pf,af,zf,sf,of
780 * @opcopytests iemOp_add_Eb_Gb
781 */
782FNIEMOP_DEF(iemOp_add_Al_Ib)
783{
784 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
785 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
786}
787
788
789/**
790 * @opcode 0x05
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
794 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
795 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
796 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
797 */
798FNIEMOP_DEF(iemOp_add_eAX_Iz)
799{
800 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
801 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
802}
803
804
805/**
806 * @opcode 0x06
807 * @opgroup og_stack_sreg
808 */
809FNIEMOP_DEF(iemOp_push_ES)
810{
811 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
812 IEMOP_HLP_NO_64BIT();
813 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
814}
815
816
817/**
818 * @opcode 0x07
819 * @opgroup og_stack_sreg
820 */
821FNIEMOP_DEF(iemOp_pop_ES)
822{
823 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
824 IEMOP_HLP_NO_64BIT();
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
828 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
829 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
830 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
831 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
832 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
833}
834
835
836/**
837 * @opcode 0x08
838 * @opgroup og_gen_arith_bin
839 * @opflmodify cf,pf,af,zf,sf,of
840 * @opflundef af
841 * @opflclear of,cf
842 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
843 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
844 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
845 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
846 */
847FNIEMOP_DEF(iemOp_or_Eb_Gb)
848{
849 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
850 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
851 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked);
852}
853
854
855/*
856 * @opcode 0x09
857 * @opgroup og_gen_arith_bin
858 * @opflmodify cf,pf,af,zf,sf,of
859 * @opflundef af
860 * @opflclear of,cf
861 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
862 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
863 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
864 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
865 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
866 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
867 */
868FNIEMOP_DEF(iemOp_or_Ev_Gv)
869{
870 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
871 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
872 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
873 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
874}
875
876
877/**
878 * @opcode 0x0a
879 * @opgroup og_gen_arith_bin
880 * @opflmodify cf,pf,af,zf,sf,of
881 * @opflundef af
882 * @opflclear of,cf
883 * @opcopytests iemOp_or_Eb_Gb
884 */
885FNIEMOP_DEF(iemOp_or_Gb_Eb)
886{
887 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
888 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
889 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
890}
891
892
893/**
894 * @opcode 0x0b
895 * @opgroup og_gen_arith_bin
896 * @opflmodify cf,pf,af,zf,sf,of
897 * @opflundef af
898 * @opflclear of,cf
899 * @opcopytests iemOp_or_Ev_Gv
900 */
901FNIEMOP_DEF(iemOp_or_Gv_Ev)
902{
903 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
904 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
905 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
906}
907
908
909/**
910 * @opcode 0x0c
911 * @opgroup og_gen_arith_bin
912 * @opflmodify cf,pf,af,zf,sf,of
913 * @opflundef af
914 * @opflclear of,cf
915 * @opcopytests iemOp_or_Eb_Gb
916 */
917FNIEMOP_DEF(iemOp_or_Al_Ib)
918{
919 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
921 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
922}
923
924
925/**
926 * @opcode 0x0d
927 * @opgroup og_gen_arith_bin
928 * @opflmodify cf,pf,af,zf,sf,of
929 * @opflundef af
930 * @opflclear of,cf
931 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
932 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
933 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
934 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
935 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
936 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
937 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
938 */
939FNIEMOP_DEF(iemOp_or_eAX_Iz)
940{
941 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
943 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
944}
945
946
947/**
948 * @opcode 0x0e
949 * @opgroup og_stack_sreg
950 */
951FNIEMOP_DEF(iemOp_push_CS)
952{
953 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
954 IEMOP_HLP_NO_64BIT();
955 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
956}
957
958
959/**
960 * @opcode 0x0f
961 * @opmnemonic EscTwo0f
962 * @openc two0f
963 * @opdisenum OP_2B_ESC
964 * @ophints harmless
965 * @opgroup og_escapes
966 */
967FNIEMOP_DEF(iemOp_2byteEscape)
968{
969#if 0 /// @todo def VBOX_STRICT
970 /* Sanity check the table the first time around. */
971 static bool s_fTested = false;
972 if (RT_LIKELY(s_fTested)) { /* likely */ }
973 else
974 {
975 s_fTested = true;
976 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
977 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
978 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
979 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
980 }
981#endif
982
983 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
984 {
985 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
986 IEMOP_HLP_MIN_286();
987 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
988 }
989 /* @opdone */
990
991 /*
992 * On the 8086 this is a POP CS instruction.
993 * For the time being we don't specify this this.
994 */
995 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
996 IEMOP_HLP_NO_64BIT();
997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
998 /** @todo eliminate END_TB here */
999 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1000 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1001 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1002 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1003}
1004
1005/**
1006 * @opcode 0x10
1007 * @opgroup og_gen_arith_bin
1008 * @opfltest cf
1009 * @opflmodify cf,pf,af,zf,sf,of
1010 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1011 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1012 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1013 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1014 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1015 */
1016FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1017{
1018 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1019 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked);
1020}
1021
1022
1023/**
1024 * @opcode 0x11
1025 * @opgroup og_gen_arith_bin
1026 * @opfltest cf
1027 * @opflmodify cf,pf,af,zf,sf,of
1028 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1029 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1030 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1031 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1032 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1033 */
1034FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1035{
1036 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1037 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1038 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1039}
1040
1041
1042/**
1043 * @opcode 0x12
1044 * @opgroup og_gen_arith_bin
1045 * @opfltest cf
1046 * @opflmodify cf,pf,af,zf,sf,of
1047 * @opcopytests iemOp_adc_Eb_Gb
1048 */
1049FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1050{
1051 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1052 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1053}
1054
1055
1056/**
1057 * @opcode 0x13
1058 * @opgroup og_gen_arith_bin
1059 * @opfltest cf
1060 * @opflmodify cf,pf,af,zf,sf,of
1061 * @opcopytests iemOp_adc_Ev_Gv
1062 */
1063FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1064{
1065 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1066 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1067}
1068
1069
1070/**
1071 * @opcode 0x14
1072 * @opgroup og_gen_arith_bin
1073 * @opfltest cf
1074 * @opflmodify cf,pf,af,zf,sf,of
1075 * @opcopytests iemOp_adc_Eb_Gb
1076 */
1077FNIEMOP_DEF(iemOp_adc_Al_Ib)
1078{
1079 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1080 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1081}
1082
1083
1084/**
1085 * @opcode 0x15
1086 * @opgroup og_gen_arith_bin
1087 * @opfltest cf
1088 * @opflmodify cf,pf,af,zf,sf,of
1089 * @opcopytests iemOp_adc_Ev_Gv
1090 */
1091FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1092{
1093 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1094 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1095}
1096
1097
1098/**
1099 * @opcode 0x16
1100 */
1101FNIEMOP_DEF(iemOp_push_SS)
1102{
1103 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1104 IEMOP_HLP_NO_64BIT();
1105 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1106}
1107
1108
1109/**
1110 * @opcode 0x17
1111 * @opgroup og_gen_arith_bin
1112 * @opfltest cf
1113 * @opflmodify cf,pf,af,zf,sf,of
1114 */
1115FNIEMOP_DEF(iemOp_pop_SS)
1116{
1117 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1119 IEMOP_HLP_NO_64BIT();
1120 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1121 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1122 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1123 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1124 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1125 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1126 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1127}
1128
1129
1130/**
1131 * @opcode 0x18
1132 * @opgroup og_gen_arith_bin
1133 * @opfltest cf
1134 * @opflmodify cf,pf,af,zf,sf,of
1135 */
1136FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1137{
1138 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1139 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked);
1140}
1141
1142
1143/**
1144 * @opcode 0x19
1145 * @opgroup og_gen_arith_bin
1146 * @opfltest cf
1147 * @opflmodify cf,pf,af,zf,sf,of
1148 */
1149FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1150{
1151 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1152 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1153 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1154}
1155
1156
1157/**
1158 * @opcode 0x1a
1159 * @opgroup og_gen_arith_bin
1160 * @opfltest cf
1161 * @opflmodify cf,pf,af,zf,sf,of
1162 */
1163FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1164{
1165 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1166 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1167}
1168
1169
1170/**
1171 * @opcode 0x1b
1172 * @opgroup og_gen_arith_bin
1173 * @opfltest cf
1174 * @opflmodify cf,pf,af,zf,sf,of
1175 */
1176FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1177{
1178 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1180}
1181
1182
1183/**
1184 * @opcode 0x1c
1185 * @opgroup og_gen_arith_bin
1186 * @opfltest cf
1187 * @opflmodify cf,pf,af,zf,sf,of
1188 */
1189FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1190{
1191 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1192 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1193}
1194
1195
1196/**
1197 * @opcode 0x1d
1198 * @opgroup og_gen_arith_bin
1199 * @opfltest cf
1200 * @opflmodify cf,pf,af,zf,sf,of
1201 */
1202FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1203{
1204 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1205 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1206}
1207
1208
1209/**
1210 * @opcode 0x1e
1211 * @opgroup og_stack_sreg
1212 */
1213FNIEMOP_DEF(iemOp_push_DS)
1214{
1215 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1216 IEMOP_HLP_NO_64BIT();
1217 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1218}
1219
1220
1221/**
1222 * @opcode 0x1f
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_pop_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEMOP_HLP_NO_64BIT();
1230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1231 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1232 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1233 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1234 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1235 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1236 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1237}
1238
1239
1240/**
1241 * @opcode 0x20
1242 * @opgroup og_gen_arith_bin
1243 * @opflmodify cf,pf,af,zf,sf,of
1244 * @opflundef af
1245 * @opflclear of,cf
1246 */
1247FNIEMOP_DEF(iemOp_and_Eb_Gb)
1248{
1249 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1250 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1251 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked);
1252}
1253
1254
1255/**
1256 * @opcode 0x21
1257 * @opgroup og_gen_arith_bin
1258 * @opflmodify cf,pf,af,zf,sf,of
1259 * @opflundef af
1260 * @opflclear of,cf
1261 */
1262FNIEMOP_DEF(iemOp_and_Ev_Gv)
1263{
1264 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1265 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1266 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1267 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1268}
1269
1270
1271/**
1272 * @opcode 0x22
1273 * @opgroup og_gen_arith_bin
1274 * @opflmodify cf,pf,af,zf,sf,of
1275 * @opflundef af
1276 * @opflclear of,cf
1277 */
1278FNIEMOP_DEF(iemOp_and_Gb_Eb)
1279{
1280 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1281 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1282 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1283}
1284
1285
1286/**
1287 * @opcode 0x23
1288 * @opgroup og_gen_arith_bin
1289 * @opflmodify cf,pf,af,zf,sf,of
1290 * @opflundef af
1291 * @opflclear of,cf
1292 */
1293FNIEMOP_DEF(iemOp_and_Gv_Ev)
1294{
1295 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1296 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1297 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1298}
1299
1300
1301/**
1302 * @opcode 0x24
1303 * @opgroup og_gen_arith_bin
1304 * @opflmodify cf,pf,af,zf,sf,of
1305 * @opflundef af
1306 * @opflclear of,cf
1307 */
1308FNIEMOP_DEF(iemOp_and_Al_Ib)
1309{
1310 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1311 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1312 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1313}
1314
1315
1316/**
1317 * @opcode 0x25
1318 * @opgroup og_gen_arith_bin
1319 * @opflmodify cf,pf,af,zf,sf,of
1320 * @opflundef af
1321 * @opflclear of,cf
1322 */
1323FNIEMOP_DEF(iemOp_and_eAX_Iz)
1324{
1325 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1326 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1327 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1328}
1329
1330
1331/**
1332 * @opcode 0x26
1333 * @opmnemonic SEG
1334 * @op1 ES
1335 * @opgroup og_prefix
1336 * @openc prefix
1337 * @opdisenum OP_SEG
1338 * @ophints harmless
1339 */
1340FNIEMOP_DEF(iemOp_seg_ES)
1341{
1342 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1343 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1344 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1345
1346 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1347 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1348}
1349
1350
1351/**
1352 * @opcode 0x27
1353 * @opfltest af,cf
1354 * @opflmodify cf,pf,af,zf,sf,of
1355 * @opflundef of
1356 */
1357FNIEMOP_DEF(iemOp_daa)
1358{
1359 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1360 IEMOP_HLP_NO_64BIT();
1361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1362 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1363 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1364}
1365
1366
1367/**
1368 * @opcode 0x28
1369 * @opgroup og_gen_arith_bin
1370 * @opflmodify cf,pf,af,zf,sf,of
1371 */
1372FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1373{
1374 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1375 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked);
1376}
1377
1378
1379/**
1380 * @opcode 0x29
1381 * @opgroup og_gen_arith_bin
1382 * @opflmodify cf,pf,af,zf,sf,of
1383 */
1384FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1385{
1386 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1387 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1388 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1389}
1390
1391
1392/**
1393 * @opcode 0x2a
1394 * @opgroup og_gen_arith_bin
1395 * @opflmodify cf,pf,af,zf,sf,of
1396 */
1397FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1398{
1399 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1400 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1401}
1402
1403
1404/**
1405 * @opcode 0x2b
1406 * @opgroup og_gen_arith_bin
1407 * @opflmodify cf,pf,af,zf,sf,of
1408 */
1409FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1410{
1411 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1412 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1413}
1414
1415
1416/**
1417 * @opcode 0x2c
1418 * @opgroup og_gen_arith_bin
1419 * @opflmodify cf,pf,af,zf,sf,of
1420 */
1421FNIEMOP_DEF(iemOp_sub_Al_Ib)
1422{
1423 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1424 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1425}
1426
1427
1428/**
1429 * @opcode 0x2d
1430 * @opgroup og_gen_arith_bin
1431 * @opflmodify cf,pf,af,zf,sf,of
1432 */
1433FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1434{
1435 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1436 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1437}
1438
1439
1440/**
1441 * @opcode 0x2e
1442 * @opmnemonic SEG
1443 * @op1 CS
1444 * @opgroup og_prefix
1445 * @openc prefix
1446 * @opdisenum OP_SEG
1447 * @ophints harmless
1448 */
1449FNIEMOP_DEF(iemOp_seg_CS)
1450{
1451 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1452 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1453 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1454
1455 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1456 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1457}
1458
1459
1460/**
1461 * @opcode 0x2f
1462 * @opfltest af,cf
1463 * @opflmodify cf,pf,af,zf,sf,of
1464 * @opflundef of
1465 */
1466FNIEMOP_DEF(iemOp_das)
1467{
1468 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1469 IEMOP_HLP_NO_64BIT();
1470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1471 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1472 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1473}
1474
1475
1476/**
1477 * @opcode 0x30
1478 * @opgroup og_gen_arith_bin
1479 * @opflmodify cf,pf,af,zf,sf,of
1480 * @opflundef af
1481 * @opflclear of,cf
1482 */
1483FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1484{
1485 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1487 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked);
1488}
1489
1490
1491/**
1492 * @opcode 0x31
1493 * @opgroup og_gen_arith_bin
1494 * @opflmodify cf,pf,af,zf,sf,of
1495 * @opflundef af
1496 * @opflclear of,cf
1497 */
1498FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1499{
1500 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1501 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1502 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1503 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1504}
1505
1506
1507/**
1508 * @opcode 0x32
1509 * @opgroup og_gen_arith_bin
1510 * @opflmodify cf,pf,af,zf,sf,of
1511 * @opflundef af
1512 * @opflclear of,cf
1513 */
1514FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1515{
1516 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1517 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1518 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1519}
1520
1521
1522/**
1523 * @opcode 0x33
1524 * @opgroup og_gen_arith_bin
1525 * @opflmodify cf,pf,af,zf,sf,of
1526 * @opflundef af
1527 * @opflclear of,cf
1528 */
1529FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1530{
1531 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1532 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1533 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1534}
1535
1536
1537/**
1538 * @opcode 0x34
1539 * @opgroup og_gen_arith_bin
1540 * @opflmodify cf,pf,af,zf,sf,of
1541 * @opflundef af
1542 * @opflclear of,cf
1543 */
1544FNIEMOP_DEF(iemOp_xor_Al_Ib)
1545{
1546 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1547 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1548 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1549}
1550
1551
1552/**
1553 * @opcode 0x35
1554 * @opgroup og_gen_arith_bin
1555 * @opflmodify cf,pf,af,zf,sf,of
1556 * @opflundef af
1557 * @opflclear of,cf
1558 */
1559FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1560{
1561 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1563 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1564}
1565
1566
1567/**
1568 * @opcode 0x36
1569 * @opmnemonic SEG
1570 * @op1 SS
1571 * @opgroup og_prefix
1572 * @openc prefix
1573 * @opdisenum OP_SEG
1574 * @ophints harmless
1575 */
1576FNIEMOP_DEF(iemOp_seg_SS)
1577{
1578 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1579 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1580 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1581
1582 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1583 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1584}
1585
1586
1587/**
1588 * @opcode 0x37
1589 * @opfltest af,cf
1590 * @opflmodify cf,pf,af,zf,sf,of
1591 * @opflundef pf,zf,sf,of
1592 * @opgroup og_gen_arith_dec
1593 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1594 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1595 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1596 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1597 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1598 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1599 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1600 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1601 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1602 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1603 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1604 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1605 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1606 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1607 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1608 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1609 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1611 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1612 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1613 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1615 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1616 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1617 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1618 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1619 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1620 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1621 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1622 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1623 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1624 */
1625FNIEMOP_DEF(iemOp_aaa)
1626{
1627 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1628 IEMOP_HLP_NO_64BIT();
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1631
1632 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1633}
1634
1635
1636/**
1637 * @opcode 0x38
1638 */
1639FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1640{
1641 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1642 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1643}
1644
1645
1646/**
1647 * @opcode 0x39
1648 */
1649FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1650{
1651 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1652 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1653}
1654
1655
1656/**
1657 * @opcode 0x3a
1658 */
1659FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1660{
1661 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1662 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1663}
1664
1665
1666/**
1667 * @opcode 0x3b
1668 */
1669FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1670{
1671 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1672 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1673}
1674
1675
1676/**
1677 * @opcode 0x3c
1678 */
1679FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1680{
1681 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1682 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1683}
1684
1685
1686/**
1687 * @opcode 0x3d
1688 */
1689FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1690{
1691 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1692 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1693}
1694
1695
1696/**
1697 * @opcode 0x3e
1698 */
1699FNIEMOP_DEF(iemOp_seg_DS)
1700{
1701 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1702 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1703 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1704
1705 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1706 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1707}
1708
1709
1710/**
1711 * @opcode 0x3f
1712 * @opfltest af,cf
1713 * @opflmodify cf,pf,af,zf,sf,of
1714 * @opflundef pf,zf,sf,of
1715 * @opgroup og_gen_arith_dec
1716 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1717 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1718 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1719 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1720 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1721 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1722 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1723 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1724 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1725 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1726 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1727 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1731 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1732 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1733 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1734 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1735 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1736 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1737 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1738 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1739 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1740 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1741 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1742 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1743 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1745 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1746 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1747 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1749 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1750 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1751 */
1752FNIEMOP_DEF(iemOp_aas)
1753{
1754 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1755 IEMOP_HLP_NO_64BIT();
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1757 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1758
1759 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1760}
1761
1762
1763/**
1764 * Common 'inc/dec register' helper.
1765 *
1766 * Not for 64-bit code, only for what became the rex prefixes.
1767 */
1768#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1769 switch (pVCpu->iem.s.enmEffOpSize) \
1770 { \
1771 case IEMMODE_16BIT: \
1772 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1774 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1775 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1776 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1777 IEM_MC_REF_EFLAGS(pEFlags); \
1778 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1779 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1780 IEM_MC_END(); \
1781 break; \
1782 \
1783 case IEMMODE_32BIT: \
1784 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1786 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1787 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1788 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1789 IEM_MC_REF_EFLAGS(pEFlags); \
1790 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1791 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1793 IEM_MC_END(); \
1794 break; \
1795 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1796 } \
1797 (void)0
1798
1799/**
1800 * @opcode 0x40
1801 */
1802FNIEMOP_DEF(iemOp_inc_eAX)
1803{
1804 /*
1805 * This is a REX prefix in 64-bit mode.
1806 */
1807 if (IEM_IS_64BIT_CODE(pVCpu))
1808 {
1809 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1810 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1811
1812 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1813 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1814 }
1815
1816 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1817 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1818}
1819
1820
1821/**
1822 * @opcode 0x41
1823 */
1824FNIEMOP_DEF(iemOp_inc_eCX)
1825{
1826 /*
1827 * This is a REX prefix in 64-bit mode.
1828 */
1829 if (IEM_IS_64BIT_CODE(pVCpu))
1830 {
1831 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1832 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1833 pVCpu->iem.s.uRexB = 1 << 3;
1834
1835 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1836 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1837 }
1838
1839 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1840 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1841}
1842
1843
1844/**
1845 * @opcode 0x42
1846 */
1847FNIEMOP_DEF(iemOp_inc_eDX)
1848{
1849 /*
1850 * This is a REX prefix in 64-bit mode.
1851 */
1852 if (IEM_IS_64BIT_CODE(pVCpu))
1853 {
1854 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1855 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1856 pVCpu->iem.s.uRexIndex = 1 << 3;
1857
1858 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1859 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1860 }
1861
1862 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1863 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1864}
1865
1866
1867
1868/**
1869 * @opcode 0x43
1870 */
1871FNIEMOP_DEF(iemOp_inc_eBX)
1872{
1873 /*
1874 * This is a REX prefix in 64-bit mode.
1875 */
1876 if (IEM_IS_64BIT_CODE(pVCpu))
1877 {
1878 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1879 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1880 pVCpu->iem.s.uRexB = 1 << 3;
1881 pVCpu->iem.s.uRexIndex = 1 << 3;
1882
1883 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1884 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1885 }
1886
1887 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1888 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1889}
1890
1891
1892/**
1893 * @opcode 0x44
1894 */
1895FNIEMOP_DEF(iemOp_inc_eSP)
1896{
1897 /*
1898 * This is a REX prefix in 64-bit mode.
1899 */
1900 if (IEM_IS_64BIT_CODE(pVCpu))
1901 {
1902 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1903 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1904 pVCpu->iem.s.uRexReg = 1 << 3;
1905
1906 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1907 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1908 }
1909
1910 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1911 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1912}
1913
1914
1915/**
1916 * @opcode 0x45
1917 */
1918FNIEMOP_DEF(iemOp_inc_eBP)
1919{
1920 /*
1921 * This is a REX prefix in 64-bit mode.
1922 */
1923 if (IEM_IS_64BIT_CODE(pVCpu))
1924 {
1925 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1926 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1927 pVCpu->iem.s.uRexReg = 1 << 3;
1928 pVCpu->iem.s.uRexB = 1 << 3;
1929
1930 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1931 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1932 }
1933
1934 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1935 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1936}
1937
1938
1939/**
1940 * @opcode 0x46
1941 */
1942FNIEMOP_DEF(iemOp_inc_eSI)
1943{
1944 /*
1945 * This is a REX prefix in 64-bit mode.
1946 */
1947 if (IEM_IS_64BIT_CODE(pVCpu))
1948 {
1949 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1950 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1951 pVCpu->iem.s.uRexReg = 1 << 3;
1952 pVCpu->iem.s.uRexIndex = 1 << 3;
1953
1954 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1955 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1956 }
1957
1958 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1959 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1960}
1961
1962
1963/**
1964 * @opcode 0x47
1965 */
1966FNIEMOP_DEF(iemOp_inc_eDI)
1967{
1968 /*
1969 * This is a REX prefix in 64-bit mode.
1970 */
1971 if (IEM_IS_64BIT_CODE(pVCpu))
1972 {
1973 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1974 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1975 pVCpu->iem.s.uRexReg = 1 << 3;
1976 pVCpu->iem.s.uRexB = 1 << 3;
1977 pVCpu->iem.s.uRexIndex = 1 << 3;
1978
1979 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1980 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1981 }
1982
1983 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1984 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1985}
1986
1987
1988/**
1989 * @opcode 0x48
1990 */
1991FNIEMOP_DEF(iemOp_dec_eAX)
1992{
1993 /*
1994 * This is a REX prefix in 64-bit mode.
1995 */
1996 if (IEM_IS_64BIT_CODE(pVCpu))
1997 {
1998 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1999 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2000 iemRecalEffOpSize(pVCpu);
2001
2002 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2003 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2004 }
2005
2006 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2007 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2008}
2009
2010
2011/**
2012 * @opcode 0x49
2013 */
2014FNIEMOP_DEF(iemOp_dec_eCX)
2015{
2016 /*
2017 * This is a REX prefix in 64-bit mode.
2018 */
2019 if (IEM_IS_64BIT_CODE(pVCpu))
2020 {
2021 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2022 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2023 pVCpu->iem.s.uRexB = 1 << 3;
2024 iemRecalEffOpSize(pVCpu);
2025
2026 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2027 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2028 }
2029
2030 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2031 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2032}
2033
2034
2035/**
2036 * @opcode 0x4a
2037 */
2038FNIEMOP_DEF(iemOp_dec_eDX)
2039{
2040 /*
2041 * This is a REX prefix in 64-bit mode.
2042 */
2043 if (IEM_IS_64BIT_CODE(pVCpu))
2044 {
2045 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2046 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2047 pVCpu->iem.s.uRexIndex = 1 << 3;
2048 iemRecalEffOpSize(pVCpu);
2049
2050 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2051 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2052 }
2053
2054 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2055 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2056}
2057
2058
2059/**
2060 * @opcode 0x4b
2061 */
2062FNIEMOP_DEF(iemOp_dec_eBX)
2063{
2064 /*
2065 * This is a REX prefix in 64-bit mode.
2066 */
2067 if (IEM_IS_64BIT_CODE(pVCpu))
2068 {
2069 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2070 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2071 pVCpu->iem.s.uRexB = 1 << 3;
2072 pVCpu->iem.s.uRexIndex = 1 << 3;
2073 iemRecalEffOpSize(pVCpu);
2074
2075 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2076 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2077 }
2078
2079 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2080 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2081}
2082
2083
2084/**
2085 * @opcode 0x4c
2086 */
2087FNIEMOP_DEF(iemOp_dec_eSP)
2088{
2089 /*
2090 * This is a REX prefix in 64-bit mode.
2091 */
2092 if (IEM_IS_64BIT_CODE(pVCpu))
2093 {
2094 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2095 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2096 pVCpu->iem.s.uRexReg = 1 << 3;
2097 iemRecalEffOpSize(pVCpu);
2098
2099 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2100 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2101 }
2102
2103 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2104 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2105}
2106
2107
2108/**
2109 * @opcode 0x4d
2110 */
2111FNIEMOP_DEF(iemOp_dec_eBP)
2112{
2113 /*
2114 * This is a REX prefix in 64-bit mode.
2115 */
2116 if (IEM_IS_64BIT_CODE(pVCpu))
2117 {
2118 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2119 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2120 pVCpu->iem.s.uRexReg = 1 << 3;
2121 pVCpu->iem.s.uRexB = 1 << 3;
2122 iemRecalEffOpSize(pVCpu);
2123
2124 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2125 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2126 }
2127
2128 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2129 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2130}
2131
2132
2133/**
2134 * @opcode 0x4e
2135 */
2136FNIEMOP_DEF(iemOp_dec_eSI)
2137{
2138 /*
2139 * This is a REX prefix in 64-bit mode.
2140 */
2141 if (IEM_IS_64BIT_CODE(pVCpu))
2142 {
2143 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2144 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2145 pVCpu->iem.s.uRexReg = 1 << 3;
2146 pVCpu->iem.s.uRexIndex = 1 << 3;
2147 iemRecalEffOpSize(pVCpu);
2148
2149 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2150 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2151 }
2152
2153 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2154 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2155}
2156
2157
2158/**
2159 * @opcode 0x4f
2160 */
2161FNIEMOP_DEF(iemOp_dec_eDI)
2162{
2163 /*
2164 * This is a REX prefix in 64-bit mode.
2165 */
2166 if (IEM_IS_64BIT_CODE(pVCpu))
2167 {
2168 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2169 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2170 pVCpu->iem.s.uRexReg = 1 << 3;
2171 pVCpu->iem.s.uRexB = 1 << 3;
2172 pVCpu->iem.s.uRexIndex = 1 << 3;
2173 iemRecalEffOpSize(pVCpu);
2174
2175 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2176 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2177 }
2178
2179 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2180 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2181}
2182
2183
2184/**
2185 * Common 'push register' helper.
2186 */
2187FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2188{
2189 if (IEM_IS_64BIT_CODE(pVCpu))
2190 {
2191 iReg |= pVCpu->iem.s.uRexB;
2192 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2193 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2194 }
2195
2196 switch (pVCpu->iem.s.enmEffOpSize)
2197 {
2198 case IEMMODE_16BIT:
2199 IEM_MC_BEGIN(0, 1, 0, 0);
2200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2201 IEM_MC_LOCAL(uint16_t, u16Value);
2202 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2203 IEM_MC_PUSH_U16(u16Value);
2204 IEM_MC_ADVANCE_RIP_AND_FINISH();
2205 IEM_MC_END();
2206 break;
2207
2208 case IEMMODE_32BIT:
2209 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2211 IEM_MC_LOCAL(uint32_t, u32Value);
2212 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2213 IEM_MC_PUSH_U32(u32Value);
2214 IEM_MC_ADVANCE_RIP_AND_FINISH();
2215 IEM_MC_END();
2216 break;
2217
2218 case IEMMODE_64BIT:
2219 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2221 IEM_MC_LOCAL(uint64_t, u64Value);
2222 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2223 IEM_MC_PUSH_U64(u64Value);
2224 IEM_MC_ADVANCE_RIP_AND_FINISH();
2225 IEM_MC_END();
2226 break;
2227
2228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2229 }
2230}
2231
2232
2233/**
2234 * @opcode 0x50
2235 */
2236FNIEMOP_DEF(iemOp_push_eAX)
2237{
2238 IEMOP_MNEMONIC(push_rAX, "push rAX");
2239 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2240}
2241
2242
2243/**
2244 * @opcode 0x51
2245 */
2246FNIEMOP_DEF(iemOp_push_eCX)
2247{
2248 IEMOP_MNEMONIC(push_rCX, "push rCX");
2249 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2250}
2251
2252
2253/**
2254 * @opcode 0x52
2255 */
2256FNIEMOP_DEF(iemOp_push_eDX)
2257{
2258 IEMOP_MNEMONIC(push_rDX, "push rDX");
2259 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2260}
2261
2262
2263/**
2264 * @opcode 0x53
2265 */
2266FNIEMOP_DEF(iemOp_push_eBX)
2267{
2268 IEMOP_MNEMONIC(push_rBX, "push rBX");
2269 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2270}
2271
2272
2273/**
2274 * @opcode 0x54
2275 */
2276FNIEMOP_DEF(iemOp_push_eSP)
2277{
2278 IEMOP_MNEMONIC(push_rSP, "push rSP");
2279 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2280 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2281
2282 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2283 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2285 IEM_MC_LOCAL(uint16_t, u16Value);
2286 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2287 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2288 IEM_MC_PUSH_U16(u16Value);
2289 IEM_MC_ADVANCE_RIP_AND_FINISH();
2290 IEM_MC_END();
2291}
2292
2293
2294/**
2295 * @opcode 0x55
2296 */
2297FNIEMOP_DEF(iemOp_push_eBP)
2298{
2299 IEMOP_MNEMONIC(push_rBP, "push rBP");
2300 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2301}
2302
2303
2304/**
2305 * @opcode 0x56
2306 */
2307FNIEMOP_DEF(iemOp_push_eSI)
2308{
2309 IEMOP_MNEMONIC(push_rSI, "push rSI");
2310 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2311}
2312
2313
2314/**
2315 * @opcode 0x57
2316 */
2317FNIEMOP_DEF(iemOp_push_eDI)
2318{
2319 IEMOP_MNEMONIC(push_rDI, "push rDI");
2320 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2321}
2322
2323
2324/**
2325 * Common 'pop register' helper.
2326 */
2327FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2328{
2329 if (IEM_IS_64BIT_CODE(pVCpu))
2330 {
2331 iReg |= pVCpu->iem.s.uRexB;
2332 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2333 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2334 }
2335
2336 switch (pVCpu->iem.s.enmEffOpSize)
2337 {
2338 case IEMMODE_16BIT:
2339 IEM_MC_BEGIN(0, 0, 0, 0);
2340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2341 IEM_MC_POP_GREG_U16(iReg);
2342 IEM_MC_ADVANCE_RIP_AND_FINISH();
2343 IEM_MC_END();
2344 break;
2345
2346 case IEMMODE_32BIT:
2347 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 IEM_MC_POP_GREG_U32(iReg);
2350 IEM_MC_ADVANCE_RIP_AND_FINISH();
2351 IEM_MC_END();
2352 break;
2353
2354 case IEMMODE_64BIT:
2355 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2357 IEM_MC_POP_GREG_U64(iReg);
2358 IEM_MC_ADVANCE_RIP_AND_FINISH();
2359 IEM_MC_END();
2360 break;
2361
2362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2363 }
2364}
2365
2366
2367/**
2368 * @opcode 0x58
2369 */
2370FNIEMOP_DEF(iemOp_pop_eAX)
2371{
2372 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2373 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2374}
2375
2376
2377/**
2378 * @opcode 0x59
2379 */
2380FNIEMOP_DEF(iemOp_pop_eCX)
2381{
2382 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2383 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2384}
2385
2386
2387/**
2388 * @opcode 0x5a
2389 */
2390FNIEMOP_DEF(iemOp_pop_eDX)
2391{
2392 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2393 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2394}
2395
2396
2397/**
2398 * @opcode 0x5b
2399 */
2400FNIEMOP_DEF(iemOp_pop_eBX)
2401{
2402 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2403 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2404}
2405
2406
2407/**
2408 * @opcode 0x5c
2409 */
2410FNIEMOP_DEF(iemOp_pop_eSP)
2411{
2412 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2413 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2414}
2415
2416
2417/**
2418 * @opcode 0x5d
2419 */
2420FNIEMOP_DEF(iemOp_pop_eBP)
2421{
2422 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2423 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2424}
2425
2426
2427/**
2428 * @opcode 0x5e
2429 */
2430FNIEMOP_DEF(iemOp_pop_eSI)
2431{
2432 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2433 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2434}
2435
2436
2437/**
2438 * @opcode 0x5f
2439 */
2440FNIEMOP_DEF(iemOp_pop_eDI)
2441{
2442 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2443 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2444}
2445
2446
2447/**
2448 * @opcode 0x60
2449 */
2450FNIEMOP_DEF(iemOp_pusha)
2451{
2452 IEMOP_MNEMONIC(pusha, "pusha");
2453 IEMOP_HLP_MIN_186();
2454 IEMOP_HLP_NO_64BIT();
2455 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2456 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2457 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2458 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2459}
2460
2461
2462/**
2463 * @opcode 0x61
2464 */
2465FNIEMOP_DEF(iemOp_popa__mvex)
2466{
2467 if (!IEM_IS_64BIT_CODE(pVCpu))
2468 {
2469 IEMOP_MNEMONIC(popa, "popa");
2470 IEMOP_HLP_MIN_186();
2471 IEMOP_HLP_NO_64BIT();
2472 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2473 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2476 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2477 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2478 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2479 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2480 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2481 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2482 iemCImpl_popa_16);
2483 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2484 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2485 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2486 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2487 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2488 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2489 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2490 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2491 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2492 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2493 iemCImpl_popa_32);
2494 }
2495 IEMOP_MNEMONIC(mvex, "mvex");
2496 Log(("mvex prefix is not supported!\n"));
2497 IEMOP_RAISE_INVALID_OPCODE_RET();
2498}
2499
2500
2501/**
2502 * @opcode 0x62
2503 * @opmnemonic bound
2504 * @op1 Gv_RO
2505 * @op2 Ma
2506 * @opmincpu 80186
2507 * @ophints harmless x86_invalid_64
2508 * @optest op1=0 op2=0 ->
2509 * @optest op1=1 op2=0 -> value.xcpt=5
2510 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2511 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2512 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2513 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2514 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2515 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2516 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2517 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2518 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2519 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2520 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2521 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2522 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2523 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2524 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2525 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2526 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2527 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2528 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2529 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2530 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2531 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2532 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2533 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2534 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2535 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2536 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2537 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2538 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2539 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2540 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2541 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2542 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2543 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2544 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2545 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2546 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2547 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2548 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2549 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2550 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2551 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2552 */
2553FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2554{
2555 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2556 compatability mode it is invalid with MOD=3.
2557
2558 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2559 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2560 given as R and X without an exact description, so we assume it builds on
2561 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2562 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2563 uint8_t bRm;
2564 if (!IEM_IS_64BIT_CODE(pVCpu))
2565 {
2566 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2567 IEMOP_HLP_MIN_186();
2568 IEM_OPCODE_GET_NEXT_U8(&bRm);
2569 if (IEM_IS_MODRM_MEM_MODE(bRm))
2570 {
2571 /** @todo testcase: check that there are two memory accesses involved. Check
2572 * whether they're both read before the \#BR triggers. */
2573 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2574 {
2575 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2576 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2577 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2578 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2580
2581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2583
2584 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2585 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2586 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2587
2588 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2589 IEM_MC_END();
2590 }
2591 else /* 32-bit operands */
2592 {
2593 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2594 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2595 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2596 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2598
2599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2601
2602 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2603 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2604 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2605
2606 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2607 IEM_MC_END();
2608 }
2609 }
2610
2611 /*
2612 * @opdone
2613 */
2614 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2615 {
2616 /* Note that there is no need for the CPU to fetch further bytes
2617 here because MODRM.MOD == 3. */
2618 Log(("evex not supported by the guest CPU!\n"));
2619 IEMOP_RAISE_INVALID_OPCODE_RET();
2620 }
2621 }
2622 else
2623 {
2624 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2625 * does modr/m read, whereas AMD probably doesn't... */
2626 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2627 {
2628 Log(("evex not supported by the guest CPU!\n"));
2629 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2630 }
2631 IEM_OPCODE_GET_NEXT_U8(&bRm);
2632 }
2633
2634 IEMOP_MNEMONIC(evex, "evex");
2635 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2636 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2637 Log(("evex prefix is not implemented!\n"));
2638 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2639}
2640
2641
2642/** Opcode 0x63 - non-64-bit modes. */
2643FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2644{
2645 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2646 IEMOP_HLP_MIN_286();
2647 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2649
2650 if (IEM_IS_MODRM_REG_MODE(bRm))
2651 {
2652 /* Register */
2653 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2654 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2655 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2656 IEM_MC_ARG(uint16_t, u16Src, 1);
2657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2658
2659 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2660 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2661 IEM_MC_REF_EFLAGS(pEFlags);
2662 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2663
2664 IEM_MC_ADVANCE_RIP_AND_FINISH();
2665 IEM_MC_END();
2666 }
2667 else
2668 {
2669 /* Memory */
2670 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2671 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2672 IEM_MC_ARG(uint16_t, u16Src, 1);
2673 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2675 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2676
2677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2678 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2679 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2680 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2681 IEM_MC_FETCH_EFLAGS(EFlags);
2682 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2683
2684 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2685 IEM_MC_COMMIT_EFLAGS(EFlags);
2686 IEM_MC_ADVANCE_RIP_AND_FINISH();
2687 IEM_MC_END();
2688 }
2689}
2690
2691
2692/**
2693 * @opcode 0x63
2694 *
2695 * @note This is a weird one. It works like a regular move instruction if
2696 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2697 * @todo This definitely needs a testcase to verify the odd cases. */
2698FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2699{
2700 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2701
2702 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2704
2705 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2706 {
2707 if (IEM_IS_MODRM_REG_MODE(bRm))
2708 {
2709 /*
2710 * Register to register.
2711 */
2712 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2714 IEM_MC_LOCAL(uint64_t, u64Value);
2715 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2716 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2717 IEM_MC_ADVANCE_RIP_AND_FINISH();
2718 IEM_MC_END();
2719 }
2720 else
2721 {
2722 /*
2723 * We're loading a register from memory.
2724 */
2725 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2726 IEM_MC_LOCAL(uint64_t, u64Value);
2727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2730 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2731 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2732 IEM_MC_ADVANCE_RIP_AND_FINISH();
2733 IEM_MC_END();
2734 }
2735 }
2736 else
2737 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2738}
2739
2740
2741/**
2742 * @opcode 0x64
2743 * @opmnemonic segfs
2744 * @opmincpu 80386
2745 * @opgroup og_prefixes
2746 */
2747FNIEMOP_DEF(iemOp_seg_FS)
2748{
2749 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2750 IEMOP_HLP_MIN_386();
2751
2752 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2753 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2754
2755 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2756 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2757}
2758
2759
2760/**
2761 * @opcode 0x65
2762 * @opmnemonic seggs
2763 * @opmincpu 80386
2764 * @opgroup og_prefixes
2765 */
2766FNIEMOP_DEF(iemOp_seg_GS)
2767{
2768 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2769 IEMOP_HLP_MIN_386();
2770
2771 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2772 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2773
2774 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2775 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2776}
2777
2778
2779/**
2780 * @opcode 0x66
2781 * @opmnemonic opsize
2782 * @openc prefix
2783 * @opmincpu 80386
2784 * @ophints harmless
2785 * @opgroup og_prefixes
2786 */
2787FNIEMOP_DEF(iemOp_op_size)
2788{
2789 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2790 IEMOP_HLP_MIN_386();
2791
2792 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2793 iemRecalEffOpSize(pVCpu);
2794
2795 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2796 when REPZ or REPNZ are present. */
2797 if (pVCpu->iem.s.idxPrefix == 0)
2798 pVCpu->iem.s.idxPrefix = 1;
2799
2800 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2801 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2802}
2803
2804
2805/**
2806 * @opcode 0x67
2807 * @opmnemonic addrsize
2808 * @openc prefix
2809 * @opmincpu 80386
2810 * @ophints harmless
2811 * @opgroup og_prefixes
2812 */
2813FNIEMOP_DEF(iemOp_addr_size)
2814{
2815 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2816 IEMOP_HLP_MIN_386();
2817
2818 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2819 switch (pVCpu->iem.s.enmDefAddrMode)
2820 {
2821 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2822 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2823 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2824 default: AssertFailed();
2825 }
2826
2827 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2828 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2829}
2830
2831
2832/**
2833 * @opcode 0x68
2834 */
2835FNIEMOP_DEF(iemOp_push_Iz)
2836{
2837 IEMOP_MNEMONIC(push_Iz, "push Iz");
2838 IEMOP_HLP_MIN_186();
2839 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2840 switch (pVCpu->iem.s.enmEffOpSize)
2841 {
2842 case IEMMODE_16BIT:
2843 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2844 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2846 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2847 IEM_MC_PUSH_U16(u16Value);
2848 IEM_MC_ADVANCE_RIP_AND_FINISH();
2849 IEM_MC_END();
2850 break;
2851
2852 case IEMMODE_32BIT:
2853 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2854 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2856 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2857 IEM_MC_PUSH_U32(u32Value);
2858 IEM_MC_ADVANCE_RIP_AND_FINISH();
2859 IEM_MC_END();
2860 break;
2861
2862 case IEMMODE_64BIT:
2863 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2864 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2866 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2867 IEM_MC_PUSH_U64(u64Value);
2868 IEM_MC_ADVANCE_RIP_AND_FINISH();
2869 IEM_MC_END();
2870 break;
2871
2872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2873 }
2874}
2875
2876
2877/**
2878 * @opcode 0x69
2879 */
2880FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2881{
2882 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2883 IEMOP_HLP_MIN_186();
2884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2885 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2886
2887 switch (pVCpu->iem.s.enmEffOpSize)
2888 {
2889 case IEMMODE_16BIT:
2890 {
2891 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2892 if (IEM_IS_MODRM_REG_MODE(bRm))
2893 {
2894 /* register operand */
2895 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2896 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2898 IEM_MC_LOCAL(uint16_t, u16Tmp);
2899 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2900 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2901 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2903 IEM_MC_REF_EFLAGS(pEFlags);
2904 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2905 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2906
2907 IEM_MC_ADVANCE_RIP_AND_FINISH();
2908 IEM_MC_END();
2909 }
2910 else
2911 {
2912 /* memory operand */
2913 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2916
2917 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2919
2920 IEM_MC_LOCAL(uint16_t, u16Tmp);
2921 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2922
2923 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2924 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2925 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2926 IEM_MC_REF_EFLAGS(pEFlags);
2927 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2928 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2929
2930 IEM_MC_ADVANCE_RIP_AND_FINISH();
2931 IEM_MC_END();
2932 }
2933 break;
2934 }
2935
2936 case IEMMODE_32BIT:
2937 {
2938 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2939 if (IEM_IS_MODRM_REG_MODE(bRm))
2940 {
2941 /* register operand */
2942 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2943 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2945 IEM_MC_LOCAL(uint32_t, u32Tmp);
2946 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2947
2948 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2949 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
2950 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2951 IEM_MC_REF_EFLAGS(pEFlags);
2952 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2953 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2954
2955 IEM_MC_ADVANCE_RIP_AND_FINISH();
2956 IEM_MC_END();
2957 }
2958 else
2959 {
2960 /* memory operand */
2961 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2964
2965 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2967
2968 IEM_MC_LOCAL(uint32_t, u32Tmp);
2969 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2970
2971 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2972 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
2973 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2974 IEM_MC_REF_EFLAGS(pEFlags);
2975 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2976 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2977
2978 IEM_MC_ADVANCE_RIP_AND_FINISH();
2979 IEM_MC_END();
2980 }
2981 break;
2982 }
2983
2984 case IEMMODE_64BIT:
2985 {
2986 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2987 if (IEM_IS_MODRM_REG_MODE(bRm))
2988 {
2989 /* register operand */
2990 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2991 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
2992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2993 IEM_MC_LOCAL(uint64_t, u64Tmp);
2994 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2995
2996 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2997 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
2998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2999 IEM_MC_REF_EFLAGS(pEFlags);
3000 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3001 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3002
3003 IEM_MC_ADVANCE_RIP_AND_FINISH();
3004 IEM_MC_END();
3005 }
3006 else
3007 {
3008 /* memory operand */
3009 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3012
3013 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3015
3016 IEM_MC_LOCAL(uint64_t, u64Tmp);
3017 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3018
3019 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3020 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3021 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3022 IEM_MC_REF_EFLAGS(pEFlags);
3023 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3024 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3025
3026 IEM_MC_ADVANCE_RIP_AND_FINISH();
3027 IEM_MC_END();
3028 }
3029 break;
3030 }
3031
3032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3033 }
3034}
3035
3036
3037/**
3038 * @opcode 0x6a
3039 */
3040FNIEMOP_DEF(iemOp_push_Ib)
3041{
3042 IEMOP_MNEMONIC(push_Ib, "push Ib");
3043 IEMOP_HLP_MIN_186();
3044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3045 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3046
3047 switch (pVCpu->iem.s.enmEffOpSize)
3048 {
3049 case IEMMODE_16BIT:
3050 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3052 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3053 IEM_MC_PUSH_U16(uValue);
3054 IEM_MC_ADVANCE_RIP_AND_FINISH();
3055 IEM_MC_END();
3056 break;
3057 case IEMMODE_32BIT:
3058 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3060 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3061 IEM_MC_PUSH_U32(uValue);
3062 IEM_MC_ADVANCE_RIP_AND_FINISH();
3063 IEM_MC_END();
3064 break;
3065 case IEMMODE_64BIT:
3066 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3068 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3069 IEM_MC_PUSH_U64(uValue);
3070 IEM_MC_ADVANCE_RIP_AND_FINISH();
3071 IEM_MC_END();
3072 break;
3073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3074 }
3075}
3076
3077
3078/**
3079 * @opcode 0x6b
3080 */
3081FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3082{
3083 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3084 IEMOP_HLP_MIN_186();
3085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3086 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3087
3088 switch (pVCpu->iem.s.enmEffOpSize)
3089 {
3090 case IEMMODE_16BIT:
3091 {
3092 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3093 if (IEM_IS_MODRM_REG_MODE(bRm))
3094 {
3095 /* register operand */
3096 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3097 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3099
3100 IEM_MC_LOCAL(uint16_t, u16Tmp);
3101 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3102
3103 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3104 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3105 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3106 IEM_MC_REF_EFLAGS(pEFlags);
3107 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3108 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3109
3110 IEM_MC_ADVANCE_RIP_AND_FINISH();
3111 IEM_MC_END();
3112 }
3113 else
3114 {
3115 /* memory operand */
3116 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3117
3118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3120
3121 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3123
3124 IEM_MC_LOCAL(uint16_t, u16Tmp);
3125 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3126
3127 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3128 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3129 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3130 IEM_MC_REF_EFLAGS(pEFlags);
3131 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3132 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3133
3134 IEM_MC_ADVANCE_RIP_AND_FINISH();
3135 IEM_MC_END();
3136 }
3137 break;
3138 }
3139
3140 case IEMMODE_32BIT:
3141 {
3142 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3143 if (IEM_IS_MODRM_REG_MODE(bRm))
3144 {
3145 /* register operand */
3146 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3147 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3149 IEM_MC_LOCAL(uint32_t, u32Tmp);
3150 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3151
3152 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3153 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3154 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3155 IEM_MC_REF_EFLAGS(pEFlags);
3156 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3157 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3158
3159 IEM_MC_ADVANCE_RIP_AND_FINISH();
3160 IEM_MC_END();
3161 }
3162 else
3163 {
3164 /* memory operand */
3165 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3168
3169 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3171
3172 IEM_MC_LOCAL(uint32_t, u32Tmp);
3173 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3174
3175 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3176 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3177 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3178 IEM_MC_REF_EFLAGS(pEFlags);
3179 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3180 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3181
3182 IEM_MC_ADVANCE_RIP_AND_FINISH();
3183 IEM_MC_END();
3184 }
3185 break;
3186 }
3187
3188 case IEMMODE_64BIT:
3189 {
3190 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3191 if (IEM_IS_MODRM_REG_MODE(bRm))
3192 {
3193 /* register operand */
3194 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3195 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3197 IEM_MC_LOCAL(uint64_t, u64Tmp);
3198 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3199
3200 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3201 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3202 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3203 IEM_MC_REF_EFLAGS(pEFlags);
3204 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3205 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3206
3207 IEM_MC_ADVANCE_RIP_AND_FINISH();
3208 IEM_MC_END();
3209 }
3210 else
3211 {
3212 /* memory operand */
3213 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3216
3217 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3219
3220 IEM_MC_LOCAL(uint64_t, u64Tmp);
3221 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3222
3223 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3224 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3225 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3226 IEM_MC_REF_EFLAGS(pEFlags);
3227 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3228 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3229
3230 IEM_MC_ADVANCE_RIP_AND_FINISH();
3231 IEM_MC_END();
3232 }
3233 break;
3234 }
3235
3236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3237 }
3238}
3239
3240
3241/**
3242 * @opcode 0x6c
3243 */
3244FNIEMOP_DEF(iemOp_insb_Yb_DX)
3245{
3246 IEMOP_HLP_MIN_186();
3247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3248 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3249 {
3250 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3251 switch (pVCpu->iem.s.enmEffAddrMode)
3252 {
3253 case IEMMODE_16BIT:
3254 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3255 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3256 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3257 iemCImpl_rep_ins_op8_addr16, false);
3258 case IEMMODE_32BIT:
3259 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3260 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3261 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3262 iemCImpl_rep_ins_op8_addr32, false);
3263 case IEMMODE_64BIT:
3264 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3265 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3266 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3267 iemCImpl_rep_ins_op8_addr64, false);
3268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3269 }
3270 }
3271 else
3272 {
3273 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3274 switch (pVCpu->iem.s.enmEffAddrMode)
3275 {
3276 case IEMMODE_16BIT:
3277 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3278 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3279 iemCImpl_ins_op8_addr16, false);
3280 case IEMMODE_32BIT:
3281 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3282 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3283 iemCImpl_ins_op8_addr32, false);
3284 case IEMMODE_64BIT:
3285 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3286 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3287 iemCImpl_ins_op8_addr64, false);
3288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3289 }
3290 }
3291}
3292
3293
3294/**
3295 * @opcode 0x6d
3296 */
3297FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3298{
3299 IEMOP_HLP_MIN_186();
3300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3301 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3302 {
3303 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3304 switch (pVCpu->iem.s.enmEffOpSize)
3305 {
3306 case IEMMODE_16BIT:
3307 switch (pVCpu->iem.s.enmEffAddrMode)
3308 {
3309 case IEMMODE_16BIT:
3310 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3311 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3312 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3313 iemCImpl_rep_ins_op16_addr16, false);
3314 case IEMMODE_32BIT:
3315 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3316 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3317 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3318 iemCImpl_rep_ins_op16_addr32, false);
3319 case IEMMODE_64BIT:
3320 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3321 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3322 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3323 iemCImpl_rep_ins_op16_addr64, false);
3324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3325 }
3326 break;
3327 case IEMMODE_64BIT:
3328 case IEMMODE_32BIT:
3329 switch (pVCpu->iem.s.enmEffAddrMode)
3330 {
3331 case IEMMODE_16BIT:
3332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3333 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3334 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3335 iemCImpl_rep_ins_op32_addr16, false);
3336 case IEMMODE_32BIT:
3337 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3338 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3339 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3340 iemCImpl_rep_ins_op32_addr32, false);
3341 case IEMMODE_64BIT:
3342 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3343 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3344 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3345 iemCImpl_rep_ins_op32_addr64, false);
3346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3347 }
3348 break;
3349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3350 }
3351 }
3352 else
3353 {
3354 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3355 switch (pVCpu->iem.s.enmEffOpSize)
3356 {
3357 case IEMMODE_16BIT:
3358 switch (pVCpu->iem.s.enmEffAddrMode)
3359 {
3360 case IEMMODE_16BIT:
3361 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3362 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3363 iemCImpl_ins_op16_addr16, false);
3364 case IEMMODE_32BIT:
3365 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3366 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3367 iemCImpl_ins_op16_addr32, false);
3368 case IEMMODE_64BIT:
3369 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3370 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3371 iemCImpl_ins_op16_addr64, false);
3372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3373 }
3374 break;
3375 case IEMMODE_64BIT:
3376 case IEMMODE_32BIT:
3377 switch (pVCpu->iem.s.enmEffAddrMode)
3378 {
3379 case IEMMODE_16BIT:
3380 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3381 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3382 iemCImpl_ins_op32_addr16, false);
3383 case IEMMODE_32BIT:
3384 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3385 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3386 iemCImpl_ins_op32_addr32, false);
3387 case IEMMODE_64BIT:
3388 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3389 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3390 iemCImpl_ins_op32_addr64, false);
3391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3392 }
3393 break;
3394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3395 }
3396 }
3397}
3398
3399
3400/**
3401 * @opcode 0x6e
3402 */
3403FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3404{
3405 IEMOP_HLP_MIN_186();
3406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3407 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3408 {
3409 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3410 switch (pVCpu->iem.s.enmEffAddrMode)
3411 {
3412 case IEMMODE_16BIT:
3413 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3414 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3415 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3416 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3417 case IEMMODE_32BIT:
3418 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3419 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3420 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3421 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3422 case IEMMODE_64BIT:
3423 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3424 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3425 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3426 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3428 }
3429 }
3430 else
3431 {
3432 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3433 switch (pVCpu->iem.s.enmEffAddrMode)
3434 {
3435 case IEMMODE_16BIT:
3436 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3437 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3438 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3439 case IEMMODE_32BIT:
3440 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3441 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3442 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3443 case IEMMODE_64BIT:
3444 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3445 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3446 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3448 }
3449 }
3450}
3451
3452
3453/**
3454 * @opcode 0x6f
3455 */
3456FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3457{
3458 IEMOP_HLP_MIN_186();
3459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3460 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3461 {
3462 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3463 switch (pVCpu->iem.s.enmEffOpSize)
3464 {
3465 case IEMMODE_16BIT:
3466 switch (pVCpu->iem.s.enmEffAddrMode)
3467 {
3468 case IEMMODE_16BIT:
3469 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3470 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3471 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3472 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3473 case IEMMODE_32BIT:
3474 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3475 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3476 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3477 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3478 case IEMMODE_64BIT:
3479 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3480 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3481 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3482 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3484 }
3485 break;
3486 case IEMMODE_64BIT:
3487 case IEMMODE_32BIT:
3488 switch (pVCpu->iem.s.enmEffAddrMode)
3489 {
3490 case IEMMODE_16BIT:
3491 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3492 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3493 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3494 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3495 case IEMMODE_32BIT:
3496 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3497 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3498 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3499 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3500 case IEMMODE_64BIT:
3501 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3502 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3503 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3504 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3506 }
3507 break;
3508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3509 }
3510 }
3511 else
3512 {
3513 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3514 switch (pVCpu->iem.s.enmEffOpSize)
3515 {
3516 case IEMMODE_16BIT:
3517 switch (pVCpu->iem.s.enmEffAddrMode)
3518 {
3519 case IEMMODE_16BIT:
3520 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3521 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3522 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3523 case IEMMODE_32BIT:
3524 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3525 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3526 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3527 case IEMMODE_64BIT:
3528 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3529 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3530 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3532 }
3533 break;
3534 case IEMMODE_64BIT:
3535 case IEMMODE_32BIT:
3536 switch (pVCpu->iem.s.enmEffAddrMode)
3537 {
3538 case IEMMODE_16BIT:
3539 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3540 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3541 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3542 case IEMMODE_32BIT:
3543 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3544 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3545 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3546 case IEMMODE_64BIT:
3547 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3548 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3549 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3551 }
3552 break;
3553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3554 }
3555 }
3556}
3557
3558
3559/**
3560 * @opcode 0x70
3561 */
3562FNIEMOP_DEF(iemOp_jo_Jb)
3563{
3564 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3565 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3566 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3567
3568 IEM_MC_BEGIN(0, 0, 0, 0);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3571 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3572 } IEM_MC_ELSE() {
3573 IEM_MC_ADVANCE_RIP_AND_FINISH();
3574 } IEM_MC_ENDIF();
3575 IEM_MC_END();
3576}
3577
3578
3579/**
3580 * @opcode 0x71
3581 */
3582FNIEMOP_DEF(iemOp_jno_Jb)
3583{
3584 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3585 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3586 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3587
3588 IEM_MC_BEGIN(0, 0, 0, 0);
3589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3590 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3591 IEM_MC_ADVANCE_RIP_AND_FINISH();
3592 } IEM_MC_ELSE() {
3593 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3594 } IEM_MC_ENDIF();
3595 IEM_MC_END();
3596}
3597
3598/**
3599 * @opcode 0x72
3600 */
3601FNIEMOP_DEF(iemOp_jc_Jb)
3602{
3603 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3604 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3605 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3606
3607 IEM_MC_BEGIN(0, 0, 0, 0);
3608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3610 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3611 } IEM_MC_ELSE() {
3612 IEM_MC_ADVANCE_RIP_AND_FINISH();
3613 } IEM_MC_ENDIF();
3614 IEM_MC_END();
3615}
3616
3617
3618/**
3619 * @opcode 0x73
3620 */
3621FNIEMOP_DEF(iemOp_jnc_Jb)
3622{
3623 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3624 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3625 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3626
3627 IEM_MC_BEGIN(0, 0, 0, 0);
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3630 IEM_MC_ADVANCE_RIP_AND_FINISH();
3631 } IEM_MC_ELSE() {
3632 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3633 } IEM_MC_ENDIF();
3634 IEM_MC_END();
3635}
3636
3637
3638/**
3639 * @opcode 0x74
3640 */
3641FNIEMOP_DEF(iemOp_je_Jb)
3642{
3643 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3644 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3645 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3646
3647 IEM_MC_BEGIN(0, 0, 0, 0);
3648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3650 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3651 } IEM_MC_ELSE() {
3652 IEM_MC_ADVANCE_RIP_AND_FINISH();
3653 } IEM_MC_ENDIF();
3654 IEM_MC_END();
3655}
3656
3657
3658/**
3659 * @opcode 0x75
3660 */
3661FNIEMOP_DEF(iemOp_jne_Jb)
3662{
3663 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3664 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3665 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3666
3667 IEM_MC_BEGIN(0, 0, 0, 0);
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3670 IEM_MC_ADVANCE_RIP_AND_FINISH();
3671 } IEM_MC_ELSE() {
3672 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3673 } IEM_MC_ENDIF();
3674 IEM_MC_END();
3675}
3676
3677
3678/**
3679 * @opcode 0x76
3680 */
3681FNIEMOP_DEF(iemOp_jbe_Jb)
3682{
3683 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3684 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3685 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3686
3687 IEM_MC_BEGIN(0, 0, 0, 0);
3688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3689 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3690 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3691 } IEM_MC_ELSE() {
3692 IEM_MC_ADVANCE_RIP_AND_FINISH();
3693 } IEM_MC_ENDIF();
3694 IEM_MC_END();
3695}
3696
3697
3698/**
3699 * @opcode 0x77
3700 */
3701FNIEMOP_DEF(iemOp_jnbe_Jb)
3702{
3703 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3704 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3705 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3706
3707 IEM_MC_BEGIN(0, 0, 0, 0);
3708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3709 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3710 IEM_MC_ADVANCE_RIP_AND_FINISH();
3711 } IEM_MC_ELSE() {
3712 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3713 } IEM_MC_ENDIF();
3714 IEM_MC_END();
3715}
3716
3717
3718/**
3719 * @opcode 0x78
3720 */
3721FNIEMOP_DEF(iemOp_js_Jb)
3722{
3723 IEMOP_MNEMONIC(js_Jb, "js Jb");
3724 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3725 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3726
3727 IEM_MC_BEGIN(0, 0, 0, 0);
3728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3729 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3730 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3731 } IEM_MC_ELSE() {
3732 IEM_MC_ADVANCE_RIP_AND_FINISH();
3733 } IEM_MC_ENDIF();
3734 IEM_MC_END();
3735}
3736
3737
3738/**
3739 * @opcode 0x79
3740 */
3741FNIEMOP_DEF(iemOp_jns_Jb)
3742{
3743 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3744 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3745 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3746
3747 IEM_MC_BEGIN(0, 0, 0, 0);
3748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3749 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3750 IEM_MC_ADVANCE_RIP_AND_FINISH();
3751 } IEM_MC_ELSE() {
3752 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3753 } IEM_MC_ENDIF();
3754 IEM_MC_END();
3755}
3756
3757
3758/**
3759 * @opcode 0x7a
3760 */
3761FNIEMOP_DEF(iemOp_jp_Jb)
3762{
3763 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3764 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3765 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3766
3767 IEM_MC_BEGIN(0, 0, 0, 0);
3768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3769 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3770 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3771 } IEM_MC_ELSE() {
3772 IEM_MC_ADVANCE_RIP_AND_FINISH();
3773 } IEM_MC_ENDIF();
3774 IEM_MC_END();
3775}
3776
3777
3778/**
3779 * @opcode 0x7b
3780 */
3781FNIEMOP_DEF(iemOp_jnp_Jb)
3782{
3783 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3784 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3785 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3786
3787 IEM_MC_BEGIN(0, 0, 0, 0);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3790 IEM_MC_ADVANCE_RIP_AND_FINISH();
3791 } IEM_MC_ELSE() {
3792 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3793 } IEM_MC_ENDIF();
3794 IEM_MC_END();
3795}
3796
3797
3798/**
3799 * @opcode 0x7c
3800 */
3801FNIEMOP_DEF(iemOp_jl_Jb)
3802{
3803 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3804 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3805 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3806
3807 IEM_MC_BEGIN(0, 0, 0, 0);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3810 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3811 } IEM_MC_ELSE() {
3812 IEM_MC_ADVANCE_RIP_AND_FINISH();
3813 } IEM_MC_ENDIF();
3814 IEM_MC_END();
3815}
3816
3817
3818/**
3819 * @opcode 0x7d
3820 */
3821FNIEMOP_DEF(iemOp_jnl_Jb)
3822{
3823 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3824 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3825 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3826
3827 IEM_MC_BEGIN(0, 0, 0, 0);
3828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3829 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3830 IEM_MC_ADVANCE_RIP_AND_FINISH();
3831 } IEM_MC_ELSE() {
3832 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3833 } IEM_MC_ENDIF();
3834 IEM_MC_END();
3835}
3836
3837
3838/**
3839 * @opcode 0x7e
3840 */
3841FNIEMOP_DEF(iemOp_jle_Jb)
3842{
3843 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3844 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3845 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3846
3847 IEM_MC_BEGIN(0, 0, 0, 0);
3848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3849 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3850 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3851 } IEM_MC_ELSE() {
3852 IEM_MC_ADVANCE_RIP_AND_FINISH();
3853 } IEM_MC_ENDIF();
3854 IEM_MC_END();
3855}
3856
3857
3858/**
3859 * @opcode 0x7f
3860 */
3861FNIEMOP_DEF(iemOp_jnle_Jb)
3862{
3863 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3864 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3865 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3866
3867 IEM_MC_BEGIN(0, 0, 0, 0);
3868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3869 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3870 IEM_MC_ADVANCE_RIP_AND_FINISH();
3871 } IEM_MC_ELSE() {
3872 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3873 } IEM_MC_ENDIF();
3874 IEM_MC_END();
3875}
3876
3877
3878/**
3879 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3880 * iemOp_Grp1_Eb_Ib_80.
3881 */
3882#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3883 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3884 { \
3885 /* register target */ \
3886 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3887 IEM_MC_BEGIN(3, 0, 0, 0); \
3888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3889 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3890 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3891 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3892 \
3893 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3894 IEM_MC_REF_EFLAGS(pEFlags); \
3895 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3896 \
3897 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3898 IEM_MC_END(); \
3899 } \
3900 else \
3901 { \
3902 /* memory target */ \
3903 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3904 { \
3905 IEM_MC_BEGIN(3, 3, 0, 0); \
3906 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3907 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3909 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3910 \
3911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3912 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3913 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3914 IEMOP_HLP_DONE_DECODING(); \
3915 \
3916 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3917 IEM_MC_FETCH_EFLAGS(EFlags); \
3918 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3919 \
3920 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3921 IEM_MC_COMMIT_EFLAGS(EFlags); \
3922 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3923 IEM_MC_END(); \
3924 } \
3925 else \
3926 { \
3927 (void)0
3928
3929#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3930 IEM_MC_BEGIN(3, 3, 0, 0); \
3931 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3932 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3934 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3935 \
3936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3937 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3938 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3939 IEMOP_HLP_DONE_DECODING(); \
3940 \
3941 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3942 IEM_MC_FETCH_EFLAGS(EFlags); \
3943 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3944 \
3945 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
3946 IEM_MC_COMMIT_EFLAGS(EFlags); \
3947 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3948 IEM_MC_END(); \
3949 } \
3950 } \
3951 (void)0
3952
3953#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3954 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3955 { \
3956 /* register target */ \
3957 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3958 IEM_MC_BEGIN(3, 0, 0, 0); \
3959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3960 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3961 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3962 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3963 \
3964 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3965 IEM_MC_REF_EFLAGS(pEFlags); \
3966 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3967 \
3968 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3969 IEM_MC_END(); \
3970 } \
3971 else \
3972 { \
3973 /* memory target */ \
3974 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3975 { \
3976 IEM_MC_BEGIN(3, 3, 0, 0); \
3977 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3978 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3980 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3981 \
3982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3983 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3984 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3985 IEMOP_HLP_DONE_DECODING(); \
3986 \
3987 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3988 IEM_MC_FETCH_EFLAGS(EFlags); \
3989 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3990 \
3991 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
3992 IEM_MC_COMMIT_EFLAGS(EFlags); \
3993 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3994 IEM_MC_END(); \
3995 } \
3996 else \
3997 { \
3998 (void)0
3999
4000#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4001 IEMOP_HLP_DONE_DECODING(); \
4002 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4003 } \
4004 } \
4005 (void)0
4006
4007
4008
4009/**
4010 * @opmaps grp1_80,grp1_83
4011 * @opcode /0
4012 */
4013FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4014{
4015 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4016 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4017 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4018}
4019
4020
4021/**
4022 * @opmaps grp1_80,grp1_83
4023 * @opcode /1
4024 */
4025FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4026{
4027 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4028 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4029 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4030}
4031
4032
4033/**
4034 * @opmaps grp1_80,grp1_83
4035 * @opcode /2
4036 */
4037FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4038{
4039 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4040 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4041 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4042}
4043
4044
4045/**
4046 * @opmaps grp1_80,grp1_83
4047 * @opcode /3
4048 */
4049FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4050{
4051 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4052 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4053 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4054}
4055
4056
4057/**
4058 * @opmaps grp1_80,grp1_83
4059 * @opcode /4
4060 */
4061FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4062{
4063 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4064 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4065 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4066}
4067
4068
4069/**
4070 * @opmaps grp1_80,grp1_83
4071 * @opcode /5
4072 */
4073FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4074{
4075 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4076 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4077 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4078}
4079
4080
4081/**
4082 * @opmaps grp1_80,grp1_83
4083 * @opcode /6
4084 */
4085FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4086{
4087 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4088 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4089 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4090}
4091
4092
4093/**
4094 * @opmaps grp1_80,grp1_83
4095 * @opcode /7
4096 */
4097FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4098{
4099 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4100 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4101 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4102}
4103
4104
4105/**
4106 * @opcode 0x80
4107 */
4108FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4109{
4110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4111 switch (IEM_GET_MODRM_REG_8(bRm))
4112 {
4113 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4114 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4115 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4116 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4117 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4118 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4119 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4120 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4122 }
4123}
4124
4125
4126/**
4127 * Body for a group 1 binary operator.
4128 */
4129#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4130 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4131 { \
4132 /* register target */ \
4133 switch (pVCpu->iem.s.enmEffOpSize) \
4134 { \
4135 case IEMMODE_16BIT: \
4136 { \
4137 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4138 IEM_MC_BEGIN(3, 0, 0, 0); \
4139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4140 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4141 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4142 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4143 \
4144 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4145 IEM_MC_REF_EFLAGS(pEFlags); \
4146 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4147 \
4148 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4149 IEM_MC_END(); \
4150 break; \
4151 } \
4152 \
4153 case IEMMODE_32BIT: \
4154 { \
4155 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4156 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4158 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4159 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4160 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4161 \
4162 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4163 IEM_MC_REF_EFLAGS(pEFlags); \
4164 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4165 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4166 \
4167 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4168 IEM_MC_END(); \
4169 break; \
4170 } \
4171 \
4172 case IEMMODE_64BIT: \
4173 { \
4174 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4175 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4177 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4178 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4179 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4180 \
4181 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4182 IEM_MC_REF_EFLAGS(pEFlags); \
4183 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4184 \
4185 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4186 IEM_MC_END(); \
4187 break; \
4188 } \
4189 \
4190 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4191 } \
4192 } \
4193 else \
4194 { \
4195 /* memory target */ \
4196 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4197 { \
4198 switch (pVCpu->iem.s.enmEffOpSize) \
4199 { \
4200 case IEMMODE_16BIT: \
4201 { \
4202 IEM_MC_BEGIN(3, 3, 0, 0); \
4203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4205 \
4206 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4207 IEMOP_HLP_DONE_DECODING(); \
4208 \
4209 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4210 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4211 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4212 \
4213 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4214 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4215 IEM_MC_FETCH_EFLAGS(EFlags); \
4216 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4217 \
4218 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4219 IEM_MC_COMMIT_EFLAGS(EFlags); \
4220 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4221 IEM_MC_END(); \
4222 break; \
4223 } \
4224 \
4225 case IEMMODE_32BIT: \
4226 { \
4227 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4230 \
4231 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4232 IEMOP_HLP_DONE_DECODING(); \
4233 \
4234 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4235 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4236 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4237 \
4238 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4239 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4240 IEM_MC_FETCH_EFLAGS(EFlags); \
4241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4242 \
4243 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4244 IEM_MC_COMMIT_EFLAGS(EFlags); \
4245 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4246 IEM_MC_END(); \
4247 break; \
4248 } \
4249 \
4250 case IEMMODE_64BIT: \
4251 { \
4252 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4253 \
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4256 \
4257 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4258 IEMOP_HLP_DONE_DECODING(); \
4259 \
4260 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4261 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4262 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4263 \
4264 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4265 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4266 IEM_MC_FETCH_EFLAGS(EFlags); \
4267 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4268 \
4269 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4270 IEM_MC_COMMIT_EFLAGS(EFlags); \
4271 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4272 IEM_MC_END(); \
4273 break; \
4274 } \
4275 \
4276 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4277 } \
4278 } \
4279 else \
4280 { \
4281 (void)0
4282/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4283#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4284 switch (pVCpu->iem.s.enmEffOpSize) \
4285 { \
4286 case IEMMODE_16BIT: \
4287 { \
4288 IEM_MC_BEGIN(3, 3, 0, 0); \
4289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4291 \
4292 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4293 IEMOP_HLP_DONE_DECODING(); \
4294 \
4295 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4296 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4297 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4298 \
4299 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4300 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4301 IEM_MC_FETCH_EFLAGS(EFlags); \
4302 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4303 \
4304 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4305 IEM_MC_COMMIT_EFLAGS(EFlags); \
4306 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4307 IEM_MC_END(); \
4308 break; \
4309 } \
4310 \
4311 case IEMMODE_32BIT: \
4312 { \
4313 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4316 \
4317 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4318 IEMOP_HLP_DONE_DECODING(); \
4319 \
4320 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4321 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4322 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4323 \
4324 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4325 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4326 IEM_MC_FETCH_EFLAGS(EFlags); \
4327 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4328 \
4329 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4330 IEM_MC_COMMIT_EFLAGS(EFlags); \
4331 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4332 IEM_MC_END(); \
4333 break; \
4334 } \
4335 \
4336 case IEMMODE_64BIT: \
4337 { \
4338 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4341 \
4342 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4343 IEMOP_HLP_DONE_DECODING(); \
4344 \
4345 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4346 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4347 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4348 \
4349 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4350 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4351 IEM_MC_FETCH_EFLAGS(EFlags); \
4352 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4353 \
4354 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4355 IEM_MC_COMMIT_EFLAGS(EFlags); \
4356 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4357 IEM_MC_END(); \
4358 break; \
4359 } \
4360 \
4361 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4362 } \
4363 } \
4364 } \
4365 (void)0
4366
4367/* read-only version */
4368#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4369 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4370 { \
4371 /* register target */ \
4372 switch (pVCpu->iem.s.enmEffOpSize) \
4373 { \
4374 case IEMMODE_16BIT: \
4375 { \
4376 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4377 IEM_MC_BEGIN(3, 0, 0, 0); \
4378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4379 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4380 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4381 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4382 \
4383 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4384 IEM_MC_REF_EFLAGS(pEFlags); \
4385 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4386 \
4387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4388 IEM_MC_END(); \
4389 break; \
4390 } \
4391 \
4392 case IEMMODE_32BIT: \
4393 { \
4394 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4395 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4397 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4398 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4399 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4400 \
4401 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4402 IEM_MC_REF_EFLAGS(pEFlags); \
4403 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4404 \
4405 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4406 IEM_MC_END(); \
4407 break; \
4408 } \
4409 \
4410 case IEMMODE_64BIT: \
4411 { \
4412 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4413 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4415 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4416 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4417 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4418 \
4419 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4420 IEM_MC_REF_EFLAGS(pEFlags); \
4421 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4422 \
4423 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4424 IEM_MC_END(); \
4425 break; \
4426 } \
4427 \
4428 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4429 } \
4430 } \
4431 else \
4432 { \
4433 /* memory target */ \
4434 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4435 { \
4436 switch (pVCpu->iem.s.enmEffOpSize) \
4437 { \
4438 case IEMMODE_16BIT: \
4439 { \
4440 IEM_MC_BEGIN(3, 3, 0, 0); \
4441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4443 \
4444 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4445 IEMOP_HLP_DONE_DECODING(); \
4446 \
4447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4448 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4449 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4450 \
4451 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4452 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4453 IEM_MC_FETCH_EFLAGS(EFlags); \
4454 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4455 \
4456 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4457 IEM_MC_COMMIT_EFLAGS(EFlags); \
4458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4459 IEM_MC_END(); \
4460 break; \
4461 } \
4462 \
4463 case IEMMODE_32BIT: \
4464 { \
4465 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4468 \
4469 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4470 IEMOP_HLP_DONE_DECODING(); \
4471 \
4472 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4473 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4474 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4475 \
4476 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4477 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4478 IEM_MC_FETCH_EFLAGS(EFlags); \
4479 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4480 \
4481 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4482 IEM_MC_COMMIT_EFLAGS(EFlags); \
4483 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4484 IEM_MC_END(); \
4485 break; \
4486 } \
4487 \
4488 case IEMMODE_64BIT: \
4489 { \
4490 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4493 \
4494 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4495 IEMOP_HLP_DONE_DECODING(); \
4496 \
4497 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4498 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4499 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4500 \
4501 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4502 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4503 IEM_MC_FETCH_EFLAGS(EFlags); \
4504 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4505 \
4506 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4507 IEM_MC_COMMIT_EFLAGS(EFlags); \
4508 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4509 IEM_MC_END(); \
4510 break; \
4511 } \
4512 \
4513 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4514 } \
4515 } \
4516 else \
4517 { \
4518 IEMOP_HLP_DONE_DECODING(); \
4519 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4520 } \
4521 } \
4522 (void)0
4523
4524
4525/**
4526 * @opmaps grp1_81
4527 * @opcode /0
4528 */
4529FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4530{
4531 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4532 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4533 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4534}
4535
4536
4537/**
4538 * @opmaps grp1_81
4539 * @opcode /1
4540 */
4541FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4542{
4543 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4544 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4545 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4546}
4547
4548
4549/**
4550 * @opmaps grp1_81
4551 * @opcode /2
4552 */
4553FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4554{
4555 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4556 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4557 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4558}
4559
4560
4561/**
4562 * @opmaps grp1_81
4563 * @opcode /3
4564 */
4565FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4566{
4567 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4568 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4569 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4570}
4571
4572
4573/**
4574 * @opmaps grp1_81
4575 * @opcode /4
4576 */
4577FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4578{
4579 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4580 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4581 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4582}
4583
4584
4585/**
4586 * @opmaps grp1_81
4587 * @opcode /5
4588 */
4589FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4590{
4591 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4592 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4593 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4594}
4595
4596
4597/**
4598 * @opmaps grp1_81
4599 * @opcode /6
4600 */
4601FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4602{
4603 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4604 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4605 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4606}
4607
4608
4609/**
4610 * @opmaps grp1_81
4611 * @opcode /7
4612 */
4613FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4614{
4615 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4616 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4617}
4618
4619
4620/**
4621 * @opcode 0x81
4622 */
4623FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4624{
4625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4626 switch (IEM_GET_MODRM_REG_8(bRm))
4627 {
4628 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4629 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4630 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4631 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4632 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4633 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4634 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4635 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4637 }
4638}
4639
4640
4641/**
4642 * @opcode 0x82
4643 * @opmnemonic grp1_82
4644 * @opgroup og_groups
4645 */
4646FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4647{
4648 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4649 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4650}
4651
4652
4653/**
4654 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4655 * iemOp_Grp1_Ev_Ib.
4656 */
4657#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4658 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4659 { \
4660 /* \
4661 * Register target \
4662 */ \
4663 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4664 switch (pVCpu->iem.s.enmEffOpSize) \
4665 { \
4666 case IEMMODE_16BIT: \
4667 IEM_MC_BEGIN(3, 0, 0, 0); \
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4669 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4670 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4671 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4672 \
4673 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4674 IEM_MC_REF_EFLAGS(pEFlags); \
4675 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4676 \
4677 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4678 IEM_MC_END(); \
4679 break; \
4680 \
4681 case IEMMODE_32BIT: \
4682 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4684 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4685 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4686 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4687 \
4688 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4689 IEM_MC_REF_EFLAGS(pEFlags); \
4690 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4691 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4692 \
4693 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4694 IEM_MC_END(); \
4695 break; \
4696 \
4697 case IEMMODE_64BIT: \
4698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4701 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4703 \
4704 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4705 IEM_MC_REF_EFLAGS(pEFlags); \
4706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4707 \
4708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4709 IEM_MC_END(); \
4710 break; \
4711 \
4712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4713 } \
4714 } \
4715 else \
4716 { \
4717 /* \
4718 * Memory target. \
4719 */ \
4720 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4721 { \
4722 switch (pVCpu->iem.s.enmEffOpSize) \
4723 { \
4724 case IEMMODE_16BIT: \
4725 IEM_MC_BEGIN(3, 3, 0, 0); \
4726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4728 \
4729 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4730 IEMOP_HLP_DONE_DECODING(); \
4731 \
4732 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4733 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4734 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4735 \
4736 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4738 IEM_MC_FETCH_EFLAGS(EFlags); \
4739 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4740 \
4741 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4742 IEM_MC_COMMIT_EFLAGS(EFlags); \
4743 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4744 IEM_MC_END(); \
4745 break; \
4746 \
4747 case IEMMODE_32BIT: \
4748 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4751 \
4752 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4753 IEMOP_HLP_DONE_DECODING(); \
4754 \
4755 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4756 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4757 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4758 \
4759 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4761 IEM_MC_FETCH_EFLAGS(EFlags); \
4762 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4763 \
4764 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4765 IEM_MC_COMMIT_EFLAGS(EFlags); \
4766 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4767 IEM_MC_END(); \
4768 break; \
4769 \
4770 case IEMMODE_64BIT: \
4771 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4774 \
4775 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4776 IEMOP_HLP_DONE_DECODING(); \
4777 \
4778 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4779 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4780 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4781 \
4782 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4783 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4784 IEM_MC_FETCH_EFLAGS(EFlags); \
4785 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4786 \
4787 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4788 IEM_MC_COMMIT_EFLAGS(EFlags); \
4789 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4790 IEM_MC_END(); \
4791 break; \
4792 \
4793 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4794 } \
4795 } \
4796 else \
4797 { \
4798 (void)0
4799/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4800#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4801 switch (pVCpu->iem.s.enmEffOpSize) \
4802 { \
4803 case IEMMODE_16BIT: \
4804 IEM_MC_BEGIN(3, 3, 0, 0); \
4805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4807 \
4808 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4809 IEMOP_HLP_DONE_DECODING(); \
4810 \
4811 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4812 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4813 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4814 \
4815 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4816 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4817 IEM_MC_FETCH_EFLAGS(EFlags); \
4818 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4819 \
4820 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4821 IEM_MC_COMMIT_EFLAGS(EFlags); \
4822 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4823 IEM_MC_END(); \
4824 break; \
4825 \
4826 case IEMMODE_32BIT: \
4827 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4830 \
4831 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4832 IEMOP_HLP_DONE_DECODING(); \
4833 \
4834 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4835 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4836 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4837 \
4838 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4839 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4840 IEM_MC_FETCH_EFLAGS(EFlags); \
4841 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4842 \
4843 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4844 IEM_MC_COMMIT_EFLAGS(EFlags); \
4845 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4846 IEM_MC_END(); \
4847 break; \
4848 \
4849 case IEMMODE_64BIT: \
4850 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4853 \
4854 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4855 IEMOP_HLP_DONE_DECODING(); \
4856 \
4857 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4858 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4859 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4860 \
4861 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4862 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4863 IEM_MC_FETCH_EFLAGS(EFlags); \
4864 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4865 \
4866 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4867 IEM_MC_COMMIT_EFLAGS(EFlags); \
4868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4869 IEM_MC_END(); \
4870 break; \
4871 \
4872 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4873 } \
4874 } \
4875 } \
4876 (void)0
4877
4878/* read-only variant */
4879#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4880 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4881 { \
4882 /* \
4883 * Register target \
4884 */ \
4885 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4886 switch (pVCpu->iem.s.enmEffOpSize) \
4887 { \
4888 case IEMMODE_16BIT: \
4889 IEM_MC_BEGIN(3, 0, 0, 0); \
4890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4891 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4892 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4893 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4894 \
4895 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4896 IEM_MC_REF_EFLAGS(pEFlags); \
4897 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4898 \
4899 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4900 IEM_MC_END(); \
4901 break; \
4902 \
4903 case IEMMODE_32BIT: \
4904 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4906 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4907 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4908 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4909 \
4910 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4911 IEM_MC_REF_EFLAGS(pEFlags); \
4912 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4913 \
4914 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4915 IEM_MC_END(); \
4916 break; \
4917 \
4918 case IEMMODE_64BIT: \
4919 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4921 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4922 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4923 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4924 \
4925 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4926 IEM_MC_REF_EFLAGS(pEFlags); \
4927 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4928 \
4929 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4930 IEM_MC_END(); \
4931 break; \
4932 \
4933 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4934 } \
4935 } \
4936 else \
4937 { \
4938 /* \
4939 * Memory target. \
4940 */ \
4941 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4942 { \
4943 switch (pVCpu->iem.s.enmEffOpSize) \
4944 { \
4945 case IEMMODE_16BIT: \
4946 IEM_MC_BEGIN(3, 3, 0, 0); \
4947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4949 \
4950 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4951 IEMOP_HLP_DONE_DECODING(); \
4952 \
4953 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4954 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4955 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4956 \
4957 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4958 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4959 IEM_MC_FETCH_EFLAGS(EFlags); \
4960 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4961 \
4962 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4963 IEM_MC_COMMIT_EFLAGS(EFlags); \
4964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4965 IEM_MC_END(); \
4966 break; \
4967 \
4968 case IEMMODE_32BIT: \
4969 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4972 \
4973 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4974 IEMOP_HLP_DONE_DECODING(); \
4975 \
4976 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4977 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4978 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4979 \
4980 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4981 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4982 IEM_MC_FETCH_EFLAGS(EFlags); \
4983 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4984 \
4985 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4986 IEM_MC_COMMIT_EFLAGS(EFlags); \
4987 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4988 IEM_MC_END(); \
4989 break; \
4990 \
4991 case IEMMODE_64BIT: \
4992 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4995 \
4996 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4997 IEMOP_HLP_DONE_DECODING(); \
4998 \
4999 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5000 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5001 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5002 \
5003 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5004 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5005 IEM_MC_FETCH_EFLAGS(EFlags); \
5006 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5007 \
5008 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5009 IEM_MC_COMMIT_EFLAGS(EFlags); \
5010 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5011 IEM_MC_END(); \
5012 break; \
5013 \
5014 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5015 } \
5016 } \
5017 else \
5018 { \
5019 IEMOP_HLP_DONE_DECODING(); \
5020 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5021 } \
5022 } \
5023 (void)0
5024
5025/**
5026 * @opmaps grp1_83
5027 * @opcode /0
5028 */
5029FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5030{
5031 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5032 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5033 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5034}
5035
5036
5037/**
5038 * @opmaps grp1_83
5039 * @opcode /1
5040 */
5041FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5042{
5043 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5044 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5045 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5046}
5047
5048
5049/**
5050 * @opmaps grp1_83
5051 * @opcode /2
5052 */
5053FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5054{
5055 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5056 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5057 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5058}
5059
5060
5061/**
5062 * @opmaps grp1_83
5063 * @opcode /3
5064 */
5065FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5066{
5067 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5068 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5069 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5070}
5071
5072
5073/**
5074 * @opmaps grp1_83
5075 * @opcode /4
5076 */
5077FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5078{
5079 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5080 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5081 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5082}
5083
5084
5085/**
5086 * @opmaps grp1_83
5087 * @opcode /5
5088 */
5089FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5090{
5091 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5092 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5093 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5094}
5095
5096
5097/**
5098 * @opmaps grp1_83
5099 * @opcode /6
5100 */
5101FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5102{
5103 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5104 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5105 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5106}
5107
5108
5109/**
5110 * @opmaps grp1_83
5111 * @opcode /7
5112 */
5113FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5114{
5115 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5116 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5117}
5118
5119
5120/**
5121 * @opcode 0x83
5122 */
5123FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5124{
5125 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5126 to the 386 even if absent in the intel reference manuals and some
5127 3rd party opcode listings. */
5128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5129 switch (IEM_GET_MODRM_REG_8(bRm))
5130 {
5131 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5132 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5133 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5134 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5135 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5136 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5137 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5138 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5140 }
5141}
5142
5143
5144/**
5145 * @opcode 0x84
5146 */
5147FNIEMOP_DEF(iemOp_test_Eb_Gb)
5148{
5149 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5150 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5151 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5152}
5153
5154
5155/**
5156 * @opcode 0x85
5157 */
5158FNIEMOP_DEF(iemOp_test_Ev_Gv)
5159{
5160 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5161 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5162 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5163}
5164
5165
5166/**
5167 * @opcode 0x86
5168 */
5169FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5170{
5171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5172 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5173
5174 /*
5175 * If rm is denoting a register, no more instruction bytes.
5176 */
5177 if (IEM_IS_MODRM_REG_MODE(bRm))
5178 {
5179 IEM_MC_BEGIN(0, 2, 0, 0);
5180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5181 IEM_MC_LOCAL(uint8_t, uTmp1);
5182 IEM_MC_LOCAL(uint8_t, uTmp2);
5183
5184 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5185 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5186 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5187 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5188
5189 IEM_MC_ADVANCE_RIP_AND_FINISH();
5190 IEM_MC_END();
5191 }
5192 else
5193 {
5194 /*
5195 * We're accessing memory.
5196 */
5197#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5198 IEM_MC_BEGIN(2, 4, 0, 0); \
5199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5200 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5201 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5202 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5203 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5204 \
5205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5206 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5207 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5208 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5209 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5210 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5211 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5212 \
5213 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5214 IEM_MC_END()
5215
5216 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5217 {
5218 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5219 }
5220 else
5221 {
5222 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5223 }
5224 }
5225}
5226
5227
5228/**
5229 * @opcode 0x87
5230 */
5231FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5232{
5233 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5235
5236 /*
5237 * If rm is denoting a register, no more instruction bytes.
5238 */
5239 if (IEM_IS_MODRM_REG_MODE(bRm))
5240 {
5241 switch (pVCpu->iem.s.enmEffOpSize)
5242 {
5243 case IEMMODE_16BIT:
5244 IEM_MC_BEGIN(0, 2, 0, 0);
5245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5246 IEM_MC_LOCAL(uint16_t, uTmp1);
5247 IEM_MC_LOCAL(uint16_t, uTmp2);
5248
5249 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5250 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5251 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5252 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5253
5254 IEM_MC_ADVANCE_RIP_AND_FINISH();
5255 IEM_MC_END();
5256 break;
5257
5258 case IEMMODE_32BIT:
5259 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5261 IEM_MC_LOCAL(uint32_t, uTmp1);
5262 IEM_MC_LOCAL(uint32_t, uTmp2);
5263
5264 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5265 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5266 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5267 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5268
5269 IEM_MC_ADVANCE_RIP_AND_FINISH();
5270 IEM_MC_END();
5271 break;
5272
5273 case IEMMODE_64BIT:
5274 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5276 IEM_MC_LOCAL(uint64_t, uTmp1);
5277 IEM_MC_LOCAL(uint64_t, uTmp2);
5278
5279 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5280 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5281 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5282 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5283
5284 IEM_MC_ADVANCE_RIP_AND_FINISH();
5285 IEM_MC_END();
5286 break;
5287
5288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5289 }
5290 }
5291 else
5292 {
5293 /*
5294 * We're accessing memory.
5295 */
5296#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5297 do { \
5298 switch (pVCpu->iem.s.enmEffOpSize) \
5299 { \
5300 case IEMMODE_16BIT: \
5301 IEM_MC_BEGIN(2, 4, 0, 0); \
5302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5303 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5304 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5305 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5306 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5307 \
5308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5309 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5310 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5311 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5312 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5313 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5314 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5315 \
5316 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5317 IEM_MC_END(); \
5318 break; \
5319 \
5320 case IEMMODE_32BIT: \
5321 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5323 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5324 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5325 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5326 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5327 \
5328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5329 IEMOP_HLP_DONE_DECODING(); \
5330 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5331 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5332 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5333 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5334 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5335 \
5336 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5337 IEM_MC_END(); \
5338 break; \
5339 \
5340 case IEMMODE_64BIT: \
5341 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5343 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5344 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5345 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5346 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5347 \
5348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5349 IEMOP_HLP_DONE_DECODING(); \
5350 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5351 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5352 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5353 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5354 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5355 \
5356 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5357 IEM_MC_END(); \
5358 break; \
5359 \
5360 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5361 } \
5362 } while (0)
5363 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5364 {
5365 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5366 }
5367 else
5368 {
5369 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5370 }
5371 }
5372}
5373
5374
5375/**
5376 * @opcode 0x88
5377 */
5378FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5379{
5380 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5381
5382 uint8_t bRm;
5383 IEM_OPCODE_GET_NEXT_U8(&bRm);
5384
5385 /*
5386 * If rm is denoting a register, no more instruction bytes.
5387 */
5388 if (IEM_IS_MODRM_REG_MODE(bRm))
5389 {
5390 IEM_MC_BEGIN(0, 1, 0, 0);
5391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5392 IEM_MC_LOCAL(uint8_t, u8Value);
5393 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5394 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5395 IEM_MC_ADVANCE_RIP_AND_FINISH();
5396 IEM_MC_END();
5397 }
5398 else
5399 {
5400 /*
5401 * We're writing a register to memory.
5402 */
5403 IEM_MC_BEGIN(0, 2, 0, 0);
5404 IEM_MC_LOCAL(uint8_t, u8Value);
5405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5408 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5409 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5410 IEM_MC_ADVANCE_RIP_AND_FINISH();
5411 IEM_MC_END();
5412 }
5413}
5414
5415
5416/**
5417 * @opcode 0x89
5418 */
5419FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5420{
5421 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5422
5423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5424
5425 /*
5426 * If rm is denoting a register, no more instruction bytes.
5427 */
5428 if (IEM_IS_MODRM_REG_MODE(bRm))
5429 {
5430 switch (pVCpu->iem.s.enmEffOpSize)
5431 {
5432 case IEMMODE_16BIT:
5433 IEM_MC_BEGIN(0, 1, 0, 0);
5434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5435 IEM_MC_LOCAL(uint16_t, u16Value);
5436 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5437 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5438 IEM_MC_ADVANCE_RIP_AND_FINISH();
5439 IEM_MC_END();
5440 break;
5441
5442 case IEMMODE_32BIT:
5443 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5445 IEM_MC_LOCAL(uint32_t, u32Value);
5446 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5447 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5448 IEM_MC_ADVANCE_RIP_AND_FINISH();
5449 IEM_MC_END();
5450 break;
5451
5452 case IEMMODE_64BIT:
5453 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5455 IEM_MC_LOCAL(uint64_t, u64Value);
5456 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5457 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5458 IEM_MC_ADVANCE_RIP_AND_FINISH();
5459 IEM_MC_END();
5460 break;
5461
5462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5463 }
5464 }
5465 else
5466 {
5467 /*
5468 * We're writing a register to memory.
5469 */
5470 switch (pVCpu->iem.s.enmEffOpSize)
5471 {
5472 case IEMMODE_16BIT:
5473 IEM_MC_BEGIN(0, 2, 0, 0);
5474 IEM_MC_LOCAL(uint16_t, u16Value);
5475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5478 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5479 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5480 IEM_MC_ADVANCE_RIP_AND_FINISH();
5481 IEM_MC_END();
5482 break;
5483
5484 case IEMMODE_32BIT:
5485 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5486 IEM_MC_LOCAL(uint32_t, u32Value);
5487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5490 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5491 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5492 IEM_MC_ADVANCE_RIP_AND_FINISH();
5493 IEM_MC_END();
5494 break;
5495
5496 case IEMMODE_64BIT:
5497 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5498 IEM_MC_LOCAL(uint64_t, u64Value);
5499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5503 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5504 IEM_MC_ADVANCE_RIP_AND_FINISH();
5505 IEM_MC_END();
5506 break;
5507
5508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5509 }
5510 }
5511}
5512
5513
5514/**
5515 * @opcode 0x8a
5516 */
5517FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5518{
5519 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5520
5521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5522
5523 /*
5524 * If rm is denoting a register, no more instruction bytes.
5525 */
5526 if (IEM_IS_MODRM_REG_MODE(bRm))
5527 {
5528 IEM_MC_BEGIN(0, 1, 0, 0);
5529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5530 IEM_MC_LOCAL(uint8_t, u8Value);
5531 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5532 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5533 IEM_MC_ADVANCE_RIP_AND_FINISH();
5534 IEM_MC_END();
5535 }
5536 else
5537 {
5538 /*
5539 * We're loading a register from memory.
5540 */
5541 IEM_MC_BEGIN(0, 2, 0, 0);
5542 IEM_MC_LOCAL(uint8_t, u8Value);
5543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5546 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5547 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5548 IEM_MC_ADVANCE_RIP_AND_FINISH();
5549 IEM_MC_END();
5550 }
5551}
5552
5553
5554/**
5555 * @opcode 0x8b
5556 */
5557FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5558{
5559 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5560
5561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5562
5563 /*
5564 * If rm is denoting a register, no more instruction bytes.
5565 */
5566 if (IEM_IS_MODRM_REG_MODE(bRm))
5567 {
5568 switch (pVCpu->iem.s.enmEffOpSize)
5569 {
5570 case IEMMODE_16BIT:
5571 IEM_MC_BEGIN(0, 1, 0, 0);
5572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5573 IEM_MC_LOCAL(uint16_t, u16Value);
5574 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5575 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5576 IEM_MC_ADVANCE_RIP_AND_FINISH();
5577 IEM_MC_END();
5578 break;
5579
5580 case IEMMODE_32BIT:
5581 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5583 IEM_MC_LOCAL(uint32_t, u32Value);
5584 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5585 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5586 IEM_MC_ADVANCE_RIP_AND_FINISH();
5587 IEM_MC_END();
5588 break;
5589
5590 case IEMMODE_64BIT:
5591 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5593 IEM_MC_LOCAL(uint64_t, u64Value);
5594 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5595 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5596 IEM_MC_ADVANCE_RIP_AND_FINISH();
5597 IEM_MC_END();
5598 break;
5599
5600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5601 }
5602 }
5603 else
5604 {
5605 /*
5606 * We're loading a register from memory.
5607 */
5608 switch (pVCpu->iem.s.enmEffOpSize)
5609 {
5610 case IEMMODE_16BIT:
5611 IEM_MC_BEGIN(0, 2, 0, 0);
5612 IEM_MC_LOCAL(uint16_t, u16Value);
5613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5616 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5617 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5618 IEM_MC_ADVANCE_RIP_AND_FINISH();
5619 IEM_MC_END();
5620 break;
5621
5622 case IEMMODE_32BIT:
5623 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5624 IEM_MC_LOCAL(uint32_t, u32Value);
5625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5628 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5629 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5630 IEM_MC_ADVANCE_RIP_AND_FINISH();
5631 IEM_MC_END();
5632 break;
5633
5634 case IEMMODE_64BIT:
5635 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5636 IEM_MC_LOCAL(uint64_t, u64Value);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5641 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5642 IEM_MC_ADVANCE_RIP_AND_FINISH();
5643 IEM_MC_END();
5644 break;
5645
5646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5647 }
5648 }
5649}
5650
5651
5652/**
5653 * opcode 0x63
5654 * @todo Table fixme
5655 */
5656FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5657{
5658 if (!IEM_IS_64BIT_CODE(pVCpu))
5659 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5660 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5661 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5662 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5663}
5664
5665
5666/**
5667 * @opcode 0x8c
5668 */
5669FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5670{
5671 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5672
5673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5674
5675 /*
5676 * Check that the destination register exists. The REX.R prefix is ignored.
5677 */
5678 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5679 if (iSegReg > X86_SREG_GS)
5680 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5681
5682 /*
5683 * If rm is denoting a register, no more instruction bytes.
5684 * In that case, the operand size is respected and the upper bits are
5685 * cleared (starting with some pentium).
5686 */
5687 if (IEM_IS_MODRM_REG_MODE(bRm))
5688 {
5689 switch (pVCpu->iem.s.enmEffOpSize)
5690 {
5691 case IEMMODE_16BIT:
5692 IEM_MC_BEGIN(0, 1, 0, 0);
5693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5694 IEM_MC_LOCAL(uint16_t, u16Value);
5695 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5696 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5697 IEM_MC_ADVANCE_RIP_AND_FINISH();
5698 IEM_MC_END();
5699 break;
5700
5701 case IEMMODE_32BIT:
5702 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5704 IEM_MC_LOCAL(uint32_t, u32Value);
5705 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5706 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5707 IEM_MC_ADVANCE_RIP_AND_FINISH();
5708 IEM_MC_END();
5709 break;
5710
5711 case IEMMODE_64BIT:
5712 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5714 IEM_MC_LOCAL(uint64_t, u64Value);
5715 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5716 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5717 IEM_MC_ADVANCE_RIP_AND_FINISH();
5718 IEM_MC_END();
5719 break;
5720
5721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5722 }
5723 }
5724 else
5725 {
5726 /*
5727 * We're saving the register to memory. The access is word sized
5728 * regardless of operand size prefixes.
5729 */
5730#if 0 /* not necessary */
5731 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5732#endif
5733 IEM_MC_BEGIN(0, 2, 0, 0);
5734 IEM_MC_LOCAL(uint16_t, u16Value);
5735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5738 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5739 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5740 IEM_MC_ADVANCE_RIP_AND_FINISH();
5741 IEM_MC_END();
5742 }
5743}
5744
5745
5746
5747
5748/**
5749 * @opcode 0x8d
5750 */
5751FNIEMOP_DEF(iemOp_lea_Gv_M)
5752{
5753 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5755 if (IEM_IS_MODRM_REG_MODE(bRm))
5756 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5757
5758 switch (pVCpu->iem.s.enmEffOpSize)
5759 {
5760 case IEMMODE_16BIT:
5761 IEM_MC_BEGIN(0, 2, 0, 0);
5762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5765 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5766 * operand-size, which is usually the case. It'll save an instruction
5767 * and a register. */
5768 IEM_MC_LOCAL(uint16_t, u16Cast);
5769 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5770 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5771 IEM_MC_ADVANCE_RIP_AND_FINISH();
5772 IEM_MC_END();
5773 break;
5774
5775 case IEMMODE_32BIT:
5776 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5780 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5781 * operand-size, which is usually the case. It'll save an instruction
5782 * and a register. */
5783 IEM_MC_LOCAL(uint32_t, u32Cast);
5784 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5785 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5786 IEM_MC_ADVANCE_RIP_AND_FINISH();
5787 IEM_MC_END();
5788 break;
5789
5790 case IEMMODE_64BIT:
5791 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5795 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5796 IEM_MC_ADVANCE_RIP_AND_FINISH();
5797 IEM_MC_END();
5798 break;
5799
5800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5801 }
5802}
5803
5804
5805/**
5806 * @opcode 0x8e
5807 */
5808FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5809{
5810 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5811
5812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5813
5814 /*
5815 * The practical operand size is 16-bit.
5816 */
5817#if 0 /* not necessary */
5818 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5819#endif
5820
5821 /*
5822 * Check that the destination register exists and can be used with this
5823 * instruction. The REX.R prefix is ignored.
5824 */
5825 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5826 /** @todo r=bird: What does 8086 do here wrt CS? */
5827 if ( iSegReg == X86_SREG_CS
5828 || iSegReg > X86_SREG_GS)
5829 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5830
5831 /*
5832 * If rm is denoting a register, no more instruction bytes.
5833 *
5834 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5835 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5836 * register. This is a restriction of the current recompiler
5837 * approach.
5838 */
5839 if (IEM_IS_MODRM_REG_MODE(bRm))
5840 {
5841#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5842 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5844 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5845 IEM_MC_ARG(uint16_t, u16Value, 1); \
5846 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5847 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5848 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5849 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5850 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5851 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5852 iemCImpl_load_SReg, iSRegArg, u16Value); \
5853 IEM_MC_END()
5854
5855 if (iSegReg == X86_SREG_SS)
5856 {
5857 if (IEM_IS_32BIT_CODE(pVCpu))
5858 {
5859 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5860 }
5861 else
5862 {
5863 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5864 }
5865 }
5866 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5867 {
5868 IEMOP_MOV_SW_EV_REG_BODY(0);
5869 }
5870 else
5871 {
5872 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5873 }
5874#undef IEMOP_MOV_SW_EV_REG_BODY
5875 }
5876 else
5877 {
5878 /*
5879 * We're loading the register from memory. The access is word sized
5880 * regardless of operand size prefixes.
5881 */
5882#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5883 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5884 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5885 IEM_MC_ARG(uint16_t, u16Value, 1); \
5886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5889 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5890 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5891 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5892 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5893 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5894 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5895 iemCImpl_load_SReg, iSRegArg, u16Value); \
5896 IEM_MC_END()
5897
5898 if (iSegReg == X86_SREG_SS)
5899 {
5900 if (IEM_IS_32BIT_CODE(pVCpu))
5901 {
5902 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5903 }
5904 else
5905 {
5906 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5907 }
5908 }
5909 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5910 {
5911 IEMOP_MOV_SW_EV_MEM_BODY(0);
5912 }
5913 else
5914 {
5915 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5916 }
5917#undef IEMOP_MOV_SW_EV_MEM_BODY
5918 }
5919}
5920
5921
5922/** Opcode 0x8f /0. */
5923FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5924{
5925 /* This bugger is rather annoying as it requires rSP to be updated before
5926 doing the effective address calculations. Will eventually require a
5927 split between the R/M+SIB decoding and the effective address
5928 calculation - which is something that is required for any attempt at
5929 reusing this code for a recompiler. It may also be good to have if we
5930 need to delay #UD exception caused by invalid lock prefixes.
5931
5932 For now, we'll do a mostly safe interpreter-only implementation here. */
5933 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5934 * now until tests show it's checked.. */
5935 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5936
5937 /* Register access is relatively easy and can share code. */
5938 if (IEM_IS_MODRM_REG_MODE(bRm))
5939 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5940
5941 /*
5942 * Memory target.
5943 *
5944 * Intel says that RSP is incremented before it's used in any effective
5945 * address calcuations. This means some serious extra annoyance here since
5946 * we decode and calculate the effective address in one step and like to
5947 * delay committing registers till everything is done.
5948 *
5949 * So, we'll decode and calculate the effective address twice. This will
5950 * require some recoding if turned into a recompiler.
5951 */
5952 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5953
5954#if 1 /* This can be compiled, optimize later if needed. */
5955 switch (pVCpu->iem.s.enmEffOpSize)
5956 {
5957 case IEMMODE_16BIT:
5958 IEM_MC_BEGIN(2, 0, 0, 0);
5959 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5962 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5963 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5964 IEM_MC_END();
5965 break;
5966
5967 case IEMMODE_32BIT:
5968 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5969 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5972 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5973 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5974 IEM_MC_END();
5975 break;
5976
5977 case IEMMODE_64BIT:
5978 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
5979 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5982 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5983 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5984 IEM_MC_END();
5985 break;
5986
5987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5988 }
5989
5990#else
5991# ifndef TST_IEM_CHECK_MC
5992 /* Calc effective address with modified ESP. */
5993/** @todo testcase */
5994 RTGCPTR GCPtrEff;
5995 VBOXSTRICTRC rcStrict;
5996 switch (pVCpu->iem.s.enmEffOpSize)
5997 {
5998 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5999 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6000 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6002 }
6003 if (rcStrict != VINF_SUCCESS)
6004 return rcStrict;
6005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6006
6007 /* Perform the operation - this should be CImpl. */
6008 RTUINT64U TmpRsp;
6009 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6010 switch (pVCpu->iem.s.enmEffOpSize)
6011 {
6012 case IEMMODE_16BIT:
6013 {
6014 uint16_t u16Value;
6015 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6016 if (rcStrict == VINF_SUCCESS)
6017 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6018 break;
6019 }
6020
6021 case IEMMODE_32BIT:
6022 {
6023 uint32_t u32Value;
6024 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6025 if (rcStrict == VINF_SUCCESS)
6026 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6027 break;
6028 }
6029
6030 case IEMMODE_64BIT:
6031 {
6032 uint64_t u64Value;
6033 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6034 if (rcStrict == VINF_SUCCESS)
6035 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6036 break;
6037 }
6038
6039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6040 }
6041 if (rcStrict == VINF_SUCCESS)
6042 {
6043 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6044 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6045 }
6046 return rcStrict;
6047
6048# else
6049 return VERR_IEM_IPE_2;
6050# endif
6051#endif
6052}
6053
6054
6055/**
6056 * @opcode 0x8f
6057 */
6058FNIEMOP_DEF(iemOp_Grp1A__xop)
6059{
6060 /*
6061 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6062 * three byte VEX prefix, except that the mmmmm field cannot have the values
6063 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6064 */
6065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6066 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6067 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6068
6069 IEMOP_MNEMONIC(xop, "xop");
6070 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6071 {
6072 /** @todo Test when exctly the XOP conformance checks kick in during
6073 * instruction decoding and fetching (using \#PF). */
6074 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6075 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6076 if ( ( pVCpu->iem.s.fPrefixes
6077 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6078 == 0)
6079 {
6080 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6081 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6082 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6083 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6084 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6085 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6086 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6087 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6088 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6089
6090 /** @todo XOP: Just use new tables and decoders. */
6091 switch (bRm & 0x1f)
6092 {
6093 case 8: /* xop opcode map 8. */
6094 IEMOP_BITCH_ABOUT_STUB();
6095 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6096
6097 case 9: /* xop opcode map 9. */
6098 IEMOP_BITCH_ABOUT_STUB();
6099 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6100
6101 case 10: /* xop opcode map 10. */
6102 IEMOP_BITCH_ABOUT_STUB();
6103 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6104
6105 default:
6106 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6107 IEMOP_RAISE_INVALID_OPCODE_RET();
6108 }
6109 }
6110 else
6111 Log(("XOP: Invalid prefix mix!\n"));
6112 }
6113 else
6114 Log(("XOP: XOP support disabled!\n"));
6115 IEMOP_RAISE_INVALID_OPCODE_RET();
6116}
6117
6118
6119/**
6120 * Common 'xchg reg,rAX' helper.
6121 */
6122FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6123{
6124 iReg |= pVCpu->iem.s.uRexB;
6125 switch (pVCpu->iem.s.enmEffOpSize)
6126 {
6127 case IEMMODE_16BIT:
6128 IEM_MC_BEGIN(0, 2, 0, 0);
6129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6130 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6131 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6132 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6133 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6134 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6135 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6136 IEM_MC_ADVANCE_RIP_AND_FINISH();
6137 IEM_MC_END();
6138 break;
6139
6140 case IEMMODE_32BIT:
6141 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6143 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6144 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6145 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6146 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6147 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6148 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6149 IEM_MC_ADVANCE_RIP_AND_FINISH();
6150 IEM_MC_END();
6151 break;
6152
6153 case IEMMODE_64BIT:
6154 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6156 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6157 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6158 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6159 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6160 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6161 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6162 IEM_MC_ADVANCE_RIP_AND_FINISH();
6163 IEM_MC_END();
6164 break;
6165
6166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6167 }
6168}
6169
6170
6171/**
6172 * @opcode 0x90
6173 */
6174FNIEMOP_DEF(iemOp_nop)
6175{
6176 /* R8/R8D and RAX/EAX can be exchanged. */
6177 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6178 {
6179 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6180 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6181 }
6182
6183 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6184 {
6185 IEMOP_MNEMONIC(pause, "pause");
6186 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6187 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6188 if (!IEM_IS_IN_GUEST(pVCpu))
6189 { /* probable */ }
6190#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6191 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6192 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6193#endif
6194#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6195 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6196 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6197#endif
6198 }
6199 else
6200 IEMOP_MNEMONIC(nop, "nop");
6201 /** @todo testcase: lock nop; lock pause */
6202 IEM_MC_BEGIN(0, 0, 0, 0);
6203 IEMOP_HLP_DONE_DECODING();
6204 IEM_MC_ADVANCE_RIP_AND_FINISH();
6205 IEM_MC_END();
6206}
6207
6208
6209/**
6210 * @opcode 0x91
6211 */
6212FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6213{
6214 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6215 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6216}
6217
6218
6219/**
6220 * @opcode 0x92
6221 */
6222FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6223{
6224 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6225 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6226}
6227
6228
6229/**
6230 * @opcode 0x93
6231 */
6232FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6233{
6234 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6235 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6236}
6237
6238
6239/**
6240 * @opcode 0x94
6241 */
6242FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6243{
6244 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6245 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6246}
6247
6248
6249/**
6250 * @opcode 0x95
6251 */
6252FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6253{
6254 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6255 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6256}
6257
6258
6259/**
6260 * @opcode 0x96
6261 */
6262FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6263{
6264 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6265 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6266}
6267
6268
6269/**
6270 * @opcode 0x97
6271 */
6272FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6273{
6274 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6275 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6276}
6277
6278
6279/**
6280 * @opcode 0x98
6281 */
6282FNIEMOP_DEF(iemOp_cbw)
6283{
6284 switch (pVCpu->iem.s.enmEffOpSize)
6285 {
6286 case IEMMODE_16BIT:
6287 IEMOP_MNEMONIC(cbw, "cbw");
6288 IEM_MC_BEGIN(0, 1, 0, 0);
6289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6290 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6291 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6292 } IEM_MC_ELSE() {
6293 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6294 } IEM_MC_ENDIF();
6295 IEM_MC_ADVANCE_RIP_AND_FINISH();
6296 IEM_MC_END();
6297 break;
6298
6299 case IEMMODE_32BIT:
6300 IEMOP_MNEMONIC(cwde, "cwde");
6301 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6303 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6304 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6305 } IEM_MC_ELSE() {
6306 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6307 } IEM_MC_ENDIF();
6308 IEM_MC_ADVANCE_RIP_AND_FINISH();
6309 IEM_MC_END();
6310 break;
6311
6312 case IEMMODE_64BIT:
6313 IEMOP_MNEMONIC(cdqe, "cdqe");
6314 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6316 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6317 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6318 } IEM_MC_ELSE() {
6319 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6320 } IEM_MC_ENDIF();
6321 IEM_MC_ADVANCE_RIP_AND_FINISH();
6322 IEM_MC_END();
6323 break;
6324
6325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6326 }
6327}
6328
6329
6330/**
6331 * @opcode 0x99
6332 */
6333FNIEMOP_DEF(iemOp_cwd)
6334{
6335 switch (pVCpu->iem.s.enmEffOpSize)
6336 {
6337 case IEMMODE_16BIT:
6338 IEMOP_MNEMONIC(cwd, "cwd");
6339 IEM_MC_BEGIN(0, 1, 0, 0);
6340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6341 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6342 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6343 } IEM_MC_ELSE() {
6344 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6345 } IEM_MC_ENDIF();
6346 IEM_MC_ADVANCE_RIP_AND_FINISH();
6347 IEM_MC_END();
6348 break;
6349
6350 case IEMMODE_32BIT:
6351 IEMOP_MNEMONIC(cdq, "cdq");
6352 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6354 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6355 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6356 } IEM_MC_ELSE() {
6357 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6358 } IEM_MC_ENDIF();
6359 IEM_MC_ADVANCE_RIP_AND_FINISH();
6360 IEM_MC_END();
6361 break;
6362
6363 case IEMMODE_64BIT:
6364 IEMOP_MNEMONIC(cqo, "cqo");
6365 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6367 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6368 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6369 } IEM_MC_ELSE() {
6370 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6371 } IEM_MC_ENDIF();
6372 IEM_MC_ADVANCE_RIP_AND_FINISH();
6373 IEM_MC_END();
6374 break;
6375
6376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6377 }
6378}
6379
6380
6381/**
6382 * @opcode 0x9a
6383 */
6384FNIEMOP_DEF(iemOp_call_Ap)
6385{
6386 IEMOP_MNEMONIC(call_Ap, "call Ap");
6387 IEMOP_HLP_NO_64BIT();
6388
6389 /* Decode the far pointer address and pass it on to the far call C implementation. */
6390 uint32_t off32Seg;
6391 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6392 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6393 else
6394 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6395 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6397 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6398 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6399 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6400 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6401}
6402
6403
6404/** Opcode 0x9b. (aka fwait) */
6405FNIEMOP_DEF(iemOp_wait)
6406{
6407 IEMOP_MNEMONIC(wait, "wait");
6408 IEM_MC_BEGIN(0, 0, 0, 0);
6409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6410 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6411 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6412 IEM_MC_ADVANCE_RIP_AND_FINISH();
6413 IEM_MC_END();
6414}
6415
6416
6417/**
6418 * @opcode 0x9c
6419 */
6420FNIEMOP_DEF(iemOp_pushf_Fv)
6421{
6422 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6424 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6425 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6426 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6427}
6428
6429
6430/**
6431 * @opcode 0x9d
6432 */
6433FNIEMOP_DEF(iemOp_popf_Fv)
6434{
6435 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6437 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6438 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6439 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6440 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6441}
6442
6443
6444/**
6445 * @opcode 0x9e
6446 */
6447FNIEMOP_DEF(iemOp_sahf)
6448{
6449 IEMOP_MNEMONIC(sahf, "sahf");
6450 if ( IEM_IS_64BIT_CODE(pVCpu)
6451 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6452 IEMOP_RAISE_INVALID_OPCODE_RET();
6453 IEM_MC_BEGIN(0, 2, 0, 0);
6454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6455 IEM_MC_LOCAL(uint32_t, u32Flags);
6456 IEM_MC_LOCAL(uint32_t, EFlags);
6457 IEM_MC_FETCH_EFLAGS(EFlags);
6458 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6459 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6460 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6461 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6462 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6463 IEM_MC_COMMIT_EFLAGS(EFlags);
6464 IEM_MC_ADVANCE_RIP_AND_FINISH();
6465 IEM_MC_END();
6466}
6467
6468
6469/**
6470 * @opcode 0x9f
6471 */
6472FNIEMOP_DEF(iemOp_lahf)
6473{
6474 IEMOP_MNEMONIC(lahf, "lahf");
6475 if ( IEM_IS_64BIT_CODE(pVCpu)
6476 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6477 IEMOP_RAISE_INVALID_OPCODE_RET();
6478 IEM_MC_BEGIN(0, 1, 0, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_LOCAL(uint8_t, u8Flags);
6481 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6482 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6483 IEM_MC_ADVANCE_RIP_AND_FINISH();
6484 IEM_MC_END();
6485}
6486
6487
6488/**
6489 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6490 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6491 * Will return/throw on failures.
6492 * @param a_GCPtrMemOff The variable to store the offset in.
6493 */
6494#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6495 do \
6496 { \
6497 switch (pVCpu->iem.s.enmEffAddrMode) \
6498 { \
6499 case IEMMODE_16BIT: \
6500 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6501 break; \
6502 case IEMMODE_32BIT: \
6503 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6504 break; \
6505 case IEMMODE_64BIT: \
6506 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6507 break; \
6508 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6509 } \
6510 } while (0)
6511
6512/**
6513 * @opcode 0xa0
6514 */
6515FNIEMOP_DEF(iemOp_mov_AL_Ob)
6516{
6517 /*
6518 * Get the offset.
6519 */
6520 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6521 RTGCPTR GCPtrMemOffDecode;
6522 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6523
6524 /*
6525 * Fetch AL.
6526 */
6527 IEM_MC_BEGIN(0, 2, 0, 0);
6528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6529 IEM_MC_LOCAL(uint8_t, u8Tmp);
6530 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6531 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6532 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6533 IEM_MC_ADVANCE_RIP_AND_FINISH();
6534 IEM_MC_END();
6535}
6536
6537
6538/**
6539 * @opcode 0xa1
6540 */
6541FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6542{
6543 /*
6544 * Get the offset.
6545 */
6546 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6547 RTGCPTR GCPtrMemOffDecode;
6548 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6549
6550 /*
6551 * Fetch rAX.
6552 */
6553 switch (pVCpu->iem.s.enmEffOpSize)
6554 {
6555 case IEMMODE_16BIT:
6556 IEM_MC_BEGIN(0, 2, 0, 0);
6557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6558 IEM_MC_LOCAL(uint16_t, u16Tmp);
6559 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6560 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6561 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6562 IEM_MC_ADVANCE_RIP_AND_FINISH();
6563 IEM_MC_END();
6564 break;
6565
6566 case IEMMODE_32BIT:
6567 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6569 IEM_MC_LOCAL(uint32_t, u32Tmp);
6570 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6571 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6572 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6573 IEM_MC_ADVANCE_RIP_AND_FINISH();
6574 IEM_MC_END();
6575 break;
6576
6577 case IEMMODE_64BIT:
6578 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580 IEM_MC_LOCAL(uint64_t, u64Tmp);
6581 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6582 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6583 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6584 IEM_MC_ADVANCE_RIP_AND_FINISH();
6585 IEM_MC_END();
6586 break;
6587
6588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6589 }
6590}
6591
6592
6593/**
6594 * @opcode 0xa2
6595 */
6596FNIEMOP_DEF(iemOp_mov_Ob_AL)
6597{
6598 /*
6599 * Get the offset.
6600 */
6601 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6602 RTGCPTR GCPtrMemOffDecode;
6603 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6604
6605 /*
6606 * Store AL.
6607 */
6608 IEM_MC_BEGIN(0, 2, 0, 0);
6609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6610 IEM_MC_LOCAL(uint8_t, u8Tmp);
6611 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6612 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6613 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6614 IEM_MC_ADVANCE_RIP_AND_FINISH();
6615 IEM_MC_END();
6616}
6617
6618
6619/**
6620 * @opcode 0xa3
6621 */
6622FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6623{
6624 /*
6625 * Get the offset.
6626 */
6627 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6628 RTGCPTR GCPtrMemOffDecode;
6629 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6630
6631 /*
6632 * Store rAX.
6633 */
6634 switch (pVCpu->iem.s.enmEffOpSize)
6635 {
6636 case IEMMODE_16BIT:
6637 IEM_MC_BEGIN(0, 2, 0, 0);
6638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6639 IEM_MC_LOCAL(uint16_t, u16Tmp);
6640 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6641 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6642 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6643 IEM_MC_ADVANCE_RIP_AND_FINISH();
6644 IEM_MC_END();
6645 break;
6646
6647 case IEMMODE_32BIT:
6648 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6650 IEM_MC_LOCAL(uint32_t, u32Tmp);
6651 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6652 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6653 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6654 IEM_MC_ADVANCE_RIP_AND_FINISH();
6655 IEM_MC_END();
6656 break;
6657
6658 case IEMMODE_64BIT:
6659 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6661 IEM_MC_LOCAL(uint64_t, u64Tmp);
6662 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6663 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6664 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6665 IEM_MC_ADVANCE_RIP_AND_FINISH();
6666 IEM_MC_END();
6667 break;
6668
6669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6670 }
6671}
6672
6673/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6674#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6675 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6677 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6678 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6679 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6680 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6681 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6682 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6684 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6685 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6686 } IEM_MC_ELSE() { \
6687 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6688 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6689 } IEM_MC_ENDIF(); \
6690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6691 IEM_MC_END() \
6692
6693/**
6694 * @opcode 0xa4
6695 */
6696FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6697{
6698 /*
6699 * Use the C implementation if a repeat prefix is encountered.
6700 */
6701 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6702 {
6703 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6705 switch (pVCpu->iem.s.enmEffAddrMode)
6706 {
6707 case IEMMODE_16BIT:
6708 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6709 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6710 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6711 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6712 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6713 case IEMMODE_32BIT:
6714 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6715 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6716 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6717 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6718 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6719 case IEMMODE_64BIT:
6720 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6721 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6722 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6723 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6724 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6726 }
6727 }
6728
6729 /*
6730 * Sharing case implementation with movs[wdq] below.
6731 */
6732 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6733 switch (pVCpu->iem.s.enmEffAddrMode)
6734 {
6735 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6736 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6737 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6739 }
6740}
6741
6742
6743/**
6744 * @opcode 0xa5
6745 */
6746FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6747{
6748
6749 /*
6750 * Use the C implementation if a repeat prefix is encountered.
6751 */
6752 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6753 {
6754 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6756 switch (pVCpu->iem.s.enmEffOpSize)
6757 {
6758 case IEMMODE_16BIT:
6759 switch (pVCpu->iem.s.enmEffAddrMode)
6760 {
6761 case IEMMODE_16BIT:
6762 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6763 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6764 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6765 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6766 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6767 case IEMMODE_32BIT:
6768 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6769 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6770 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6771 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6772 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6773 case IEMMODE_64BIT:
6774 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6775 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6776 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6777 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6778 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6780 }
6781 break;
6782 case IEMMODE_32BIT:
6783 switch (pVCpu->iem.s.enmEffAddrMode)
6784 {
6785 case IEMMODE_16BIT:
6786 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6787 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6788 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6789 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6790 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6791 case IEMMODE_32BIT:
6792 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6793 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6794 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6795 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6796 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6797 case IEMMODE_64BIT:
6798 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6799 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6800 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6801 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6802 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6804 }
6805 case IEMMODE_64BIT:
6806 switch (pVCpu->iem.s.enmEffAddrMode)
6807 {
6808 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6809 case IEMMODE_32BIT:
6810 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6811 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6812 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6814 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6815 case IEMMODE_64BIT:
6816 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6818 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6819 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6820 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6822 }
6823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6824 }
6825 }
6826
6827 /*
6828 * Annoying double switch here.
6829 * Using ugly macro for implementing the cases, sharing it with movsb.
6830 */
6831 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6832 switch (pVCpu->iem.s.enmEffOpSize)
6833 {
6834 case IEMMODE_16BIT:
6835 switch (pVCpu->iem.s.enmEffAddrMode)
6836 {
6837 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6838 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6839 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6841 }
6842 break;
6843
6844 case IEMMODE_32BIT:
6845 switch (pVCpu->iem.s.enmEffAddrMode)
6846 {
6847 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6848 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6849 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6851 }
6852 break;
6853
6854 case IEMMODE_64BIT:
6855 switch (pVCpu->iem.s.enmEffAddrMode)
6856 {
6857 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6858 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6859 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6861 }
6862 break;
6863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6864 }
6865}
6866
6867#undef IEM_MOVS_CASE
6868
6869/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6870#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6871 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6873 \
6874 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6875 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6876 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6877 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6878 \
6879 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6880 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6881 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6882 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6883 \
6884 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6885 IEM_MC_REF_EFLAGS(pEFlags); \
6886 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6887 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6888 \
6889 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6890 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6891 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6892 } IEM_MC_ELSE() { \
6893 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6894 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6895 } IEM_MC_ENDIF(); \
6896 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6897 IEM_MC_END() \
6898
6899/**
6900 * @opcode 0xa6
6901 */
6902FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6903{
6904
6905 /*
6906 * Use the C implementation if a repeat prefix is encountered.
6907 */
6908 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6909 {
6910 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6912 switch (pVCpu->iem.s.enmEffAddrMode)
6913 {
6914 case IEMMODE_16BIT:
6915 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6916 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6917 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6918 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6919 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6920 case IEMMODE_32BIT:
6921 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6922 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6923 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6924 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6925 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6926 case IEMMODE_64BIT:
6927 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6928 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6929 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6930 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6931 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6933 }
6934 }
6935 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6936 {
6937 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6939 switch (pVCpu->iem.s.enmEffAddrMode)
6940 {
6941 case IEMMODE_16BIT:
6942 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6943 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6944 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6945 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6946 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6947 case IEMMODE_32BIT:
6948 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6949 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6950 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6951 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6952 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6953 case IEMMODE_64BIT:
6954 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6955 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6956 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6957 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6958 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6960 }
6961 }
6962
6963 /*
6964 * Sharing case implementation with cmps[wdq] below.
6965 */
6966 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6967 switch (pVCpu->iem.s.enmEffAddrMode)
6968 {
6969 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6970 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6971 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
6972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6973 }
6974}
6975
6976
6977/**
6978 * @opcode 0xa7
6979 */
6980FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6981{
6982 /*
6983 * Use the C implementation if a repeat prefix is encountered.
6984 */
6985 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6986 {
6987 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6989 switch (pVCpu->iem.s.enmEffOpSize)
6990 {
6991 case IEMMODE_16BIT:
6992 switch (pVCpu->iem.s.enmEffAddrMode)
6993 {
6994 case IEMMODE_16BIT:
6995 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6996 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6997 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6998 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6999 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7000 case IEMMODE_32BIT:
7001 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7002 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7003 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7004 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7005 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7006 case IEMMODE_64BIT:
7007 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7008 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7009 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7010 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7011 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7013 }
7014 break;
7015 case IEMMODE_32BIT:
7016 switch (pVCpu->iem.s.enmEffAddrMode)
7017 {
7018 case IEMMODE_16BIT:
7019 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7020 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7021 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7022 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7023 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7024 case IEMMODE_32BIT:
7025 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7026 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7027 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7028 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7029 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7030 case IEMMODE_64BIT:
7031 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7032 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7033 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7034 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7035 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7037 }
7038 case IEMMODE_64BIT:
7039 switch (pVCpu->iem.s.enmEffAddrMode)
7040 {
7041 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7042 case IEMMODE_32BIT:
7043 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7044 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7045 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7046 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7047 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7048 case IEMMODE_64BIT:
7049 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7050 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7051 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7052 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7053 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7055 }
7056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7057 }
7058 }
7059
7060 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7061 {
7062 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7064 switch (pVCpu->iem.s.enmEffOpSize)
7065 {
7066 case IEMMODE_16BIT:
7067 switch (pVCpu->iem.s.enmEffAddrMode)
7068 {
7069 case IEMMODE_16BIT:
7070 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7071 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7072 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7073 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7074 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7075 case IEMMODE_32BIT:
7076 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7077 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7078 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7079 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7080 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7081 case IEMMODE_64BIT:
7082 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7083 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7084 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7085 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7086 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7088 }
7089 break;
7090 case IEMMODE_32BIT:
7091 switch (pVCpu->iem.s.enmEffAddrMode)
7092 {
7093 case IEMMODE_16BIT:
7094 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7095 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7096 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7097 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7098 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7099 case IEMMODE_32BIT:
7100 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7102 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7103 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7104 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7105 case IEMMODE_64BIT:
7106 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7107 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7108 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7109 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7110 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7112 }
7113 case IEMMODE_64BIT:
7114 switch (pVCpu->iem.s.enmEffAddrMode)
7115 {
7116 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7117 case IEMMODE_32BIT:
7118 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7119 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7120 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7121 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7122 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7123 case IEMMODE_64BIT:
7124 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7125 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7126 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7127 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7128 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7130 }
7131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7132 }
7133 }
7134
7135 /*
7136 * Annoying double switch here.
7137 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7138 */
7139 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7140 switch (pVCpu->iem.s.enmEffOpSize)
7141 {
7142 case IEMMODE_16BIT:
7143 switch (pVCpu->iem.s.enmEffAddrMode)
7144 {
7145 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7146 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7147 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7149 }
7150 break;
7151
7152 case IEMMODE_32BIT:
7153 switch (pVCpu->iem.s.enmEffAddrMode)
7154 {
7155 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7156 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7157 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7159 }
7160 break;
7161
7162 case IEMMODE_64BIT:
7163 switch (pVCpu->iem.s.enmEffAddrMode)
7164 {
7165 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7166 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7167 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7169 }
7170 break;
7171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7172 }
7173}
7174
7175#undef IEM_CMPS_CASE
7176
7177/**
7178 * @opcode 0xa8
7179 */
7180FNIEMOP_DEF(iemOp_test_AL_Ib)
7181{
7182 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7183 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7184 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7185}
7186
7187
7188/**
7189 * @opcode 0xa9
7190 */
7191FNIEMOP_DEF(iemOp_test_eAX_Iz)
7192{
7193 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7195 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7196}
7197
7198
7199/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7200#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7201 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7203 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7204 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7205 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7206 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7207 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7208 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7209 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7210 } IEM_MC_ELSE() { \
7211 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7212 } IEM_MC_ENDIF(); \
7213 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7214 IEM_MC_END() \
7215
7216/**
7217 * @opcode 0xaa
7218 */
7219FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7220{
7221 /*
7222 * Use the C implementation if a repeat prefix is encountered.
7223 */
7224 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7225 {
7226 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7228 switch (pVCpu->iem.s.enmEffAddrMode)
7229 {
7230 case IEMMODE_16BIT:
7231 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7232 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7233 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7234 iemCImpl_stos_al_m16);
7235 case IEMMODE_32BIT:
7236 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7237 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7238 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7239 iemCImpl_stos_al_m32);
7240 case IEMMODE_64BIT:
7241 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7242 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7243 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7244 iemCImpl_stos_al_m64);
7245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7246 }
7247 }
7248
7249 /*
7250 * Sharing case implementation with stos[wdq] below.
7251 */
7252 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7253 switch (pVCpu->iem.s.enmEffAddrMode)
7254 {
7255 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7256 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7257 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7259 }
7260}
7261
7262
7263/**
7264 * @opcode 0xab
7265 */
7266FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7267{
7268 /*
7269 * Use the C implementation if a repeat prefix is encountered.
7270 */
7271 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7272 {
7273 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7275 switch (pVCpu->iem.s.enmEffOpSize)
7276 {
7277 case IEMMODE_16BIT:
7278 switch (pVCpu->iem.s.enmEffAddrMode)
7279 {
7280 case IEMMODE_16BIT:
7281 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7282 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7283 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7284 iemCImpl_stos_ax_m16);
7285 case IEMMODE_32BIT:
7286 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7287 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7288 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7289 iemCImpl_stos_ax_m32);
7290 case IEMMODE_64BIT:
7291 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7292 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7293 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7294 iemCImpl_stos_ax_m64);
7295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7296 }
7297 break;
7298 case IEMMODE_32BIT:
7299 switch (pVCpu->iem.s.enmEffAddrMode)
7300 {
7301 case IEMMODE_16BIT:
7302 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7303 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7304 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7305 iemCImpl_stos_eax_m16);
7306 case IEMMODE_32BIT:
7307 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7308 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7309 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7310 iemCImpl_stos_eax_m32);
7311 case IEMMODE_64BIT:
7312 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7313 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7314 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7315 iemCImpl_stos_eax_m64);
7316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7317 }
7318 case IEMMODE_64BIT:
7319 switch (pVCpu->iem.s.enmEffAddrMode)
7320 {
7321 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7322 case IEMMODE_32BIT:
7323 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7324 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7325 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7326 iemCImpl_stos_rax_m32);
7327 case IEMMODE_64BIT:
7328 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7329 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7330 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7331 iemCImpl_stos_rax_m64);
7332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7333 }
7334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7335 }
7336 }
7337
7338 /*
7339 * Annoying double switch here.
7340 * Using ugly macro for implementing the cases, sharing it with stosb.
7341 */
7342 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7343 switch (pVCpu->iem.s.enmEffOpSize)
7344 {
7345 case IEMMODE_16BIT:
7346 switch (pVCpu->iem.s.enmEffAddrMode)
7347 {
7348 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7349 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7350 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7352 }
7353 break;
7354
7355 case IEMMODE_32BIT:
7356 switch (pVCpu->iem.s.enmEffAddrMode)
7357 {
7358 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7359 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7360 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7362 }
7363 break;
7364
7365 case IEMMODE_64BIT:
7366 switch (pVCpu->iem.s.enmEffAddrMode)
7367 {
7368 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7369 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7370 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7372 }
7373 break;
7374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7375 }
7376}
7377
7378#undef IEM_STOS_CASE
7379
7380/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7381#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7382 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7384 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7385 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7386 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7387 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7388 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7389 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7390 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7391 } IEM_MC_ELSE() { \
7392 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7393 } IEM_MC_ENDIF(); \
7394 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7395 IEM_MC_END() \
7396
7397/**
7398 * @opcode 0xac
7399 */
7400FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7401{
7402 /*
7403 * Use the C implementation if a repeat prefix is encountered.
7404 */
7405 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7406 {
7407 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7409 switch (pVCpu->iem.s.enmEffAddrMode)
7410 {
7411 case IEMMODE_16BIT:
7412 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7413 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7414 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7415 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7416 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7417 case IEMMODE_32BIT:
7418 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7419 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7420 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7421 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7422 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7423 case IEMMODE_64BIT:
7424 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7425 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7426 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7427 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7428 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7430 }
7431 }
7432
7433 /*
7434 * Sharing case implementation with stos[wdq] below.
7435 */
7436 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7437 switch (pVCpu->iem.s.enmEffAddrMode)
7438 {
7439 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7440 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7441 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7443 }
7444}
7445
7446
7447/**
7448 * @opcode 0xad
7449 */
7450FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7451{
7452 /*
7453 * Use the C implementation if a repeat prefix is encountered.
7454 */
7455 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7456 {
7457 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7459 switch (pVCpu->iem.s.enmEffOpSize)
7460 {
7461 case IEMMODE_16BIT:
7462 switch (pVCpu->iem.s.enmEffAddrMode)
7463 {
7464 case IEMMODE_16BIT:
7465 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7466 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7467 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7468 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7469 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7470 case IEMMODE_32BIT:
7471 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7472 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7473 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7474 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7475 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7476 case IEMMODE_64BIT:
7477 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7478 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7479 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7480 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7481 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7483 }
7484 break;
7485 case IEMMODE_32BIT:
7486 switch (pVCpu->iem.s.enmEffAddrMode)
7487 {
7488 case IEMMODE_16BIT:
7489 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7490 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7491 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7492 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7493 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7494 case IEMMODE_32BIT:
7495 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7496 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7497 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7498 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7499 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7500 case IEMMODE_64BIT:
7501 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7502 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7503 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7504 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7505 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7507 }
7508 case IEMMODE_64BIT:
7509 switch (pVCpu->iem.s.enmEffAddrMode)
7510 {
7511 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7512 case IEMMODE_32BIT:
7513 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7514 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7515 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7516 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7517 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7518 case IEMMODE_64BIT:
7519 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7520 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7521 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7522 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7523 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7525 }
7526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7527 }
7528 }
7529
7530 /*
7531 * Annoying double switch here.
7532 * Using ugly macro for implementing the cases, sharing it with lodsb.
7533 */
7534 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7535 switch (pVCpu->iem.s.enmEffOpSize)
7536 {
7537 case IEMMODE_16BIT:
7538 switch (pVCpu->iem.s.enmEffAddrMode)
7539 {
7540 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7541 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7542 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7544 }
7545 break;
7546
7547 case IEMMODE_32BIT:
7548 switch (pVCpu->iem.s.enmEffAddrMode)
7549 {
7550 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7551 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7552 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7554 }
7555 break;
7556
7557 case IEMMODE_64BIT:
7558 switch (pVCpu->iem.s.enmEffAddrMode)
7559 {
7560 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7561 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7562 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7564 }
7565 break;
7566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7567 }
7568}
7569
7570#undef IEM_LODS_CASE
7571
7572/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7573#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7574 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7576 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7577 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7578 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7579 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7580 \
7581 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7582 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7583 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7584 IEM_MC_REF_EFLAGS(pEFlags); \
7585 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7586 \
7587 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7588 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7589 } IEM_MC_ELSE() { \
7590 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7591 } IEM_MC_ENDIF(); \
7592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7593 IEM_MC_END();
7594
7595/**
7596 * @opcode 0xae
7597 */
7598FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7599{
7600 /*
7601 * Use the C implementation if a repeat prefix is encountered.
7602 */
7603 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7604 {
7605 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7607 switch (pVCpu->iem.s.enmEffAddrMode)
7608 {
7609 case IEMMODE_16BIT:
7610 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7611 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7612 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7613 iemCImpl_repe_scas_al_m16);
7614 case IEMMODE_32BIT:
7615 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7616 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7617 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7618 iemCImpl_repe_scas_al_m32);
7619 case IEMMODE_64BIT:
7620 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7621 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7622 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7623 iemCImpl_repe_scas_al_m64);
7624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7625 }
7626 }
7627 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7628 {
7629 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7631 switch (pVCpu->iem.s.enmEffAddrMode)
7632 {
7633 case IEMMODE_16BIT:
7634 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7635 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7636 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7637 iemCImpl_repne_scas_al_m16);
7638 case IEMMODE_32BIT:
7639 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7640 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7641 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7642 iemCImpl_repne_scas_al_m32);
7643 case IEMMODE_64BIT:
7644 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7645 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7646 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7647 iemCImpl_repne_scas_al_m64);
7648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7649 }
7650 }
7651
7652 /*
7653 * Sharing case implementation with stos[wdq] below.
7654 */
7655 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7656 switch (pVCpu->iem.s.enmEffAddrMode)
7657 {
7658 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7659 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7660 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7662 }
7663}
7664
7665
7666/**
7667 * @opcode 0xaf
7668 */
7669FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7670{
7671 /*
7672 * Use the C implementation if a repeat prefix is encountered.
7673 */
7674 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7675 {
7676 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7678 switch (pVCpu->iem.s.enmEffOpSize)
7679 {
7680 case IEMMODE_16BIT:
7681 switch (pVCpu->iem.s.enmEffAddrMode)
7682 {
7683 case IEMMODE_16BIT:
7684 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7685 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7686 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7687 iemCImpl_repe_scas_ax_m16);
7688 case IEMMODE_32BIT:
7689 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7690 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7691 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7692 iemCImpl_repe_scas_ax_m32);
7693 case IEMMODE_64BIT:
7694 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7695 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7696 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7697 iemCImpl_repe_scas_ax_m64);
7698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7699 }
7700 break;
7701 case IEMMODE_32BIT:
7702 switch (pVCpu->iem.s.enmEffAddrMode)
7703 {
7704 case IEMMODE_16BIT:
7705 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7706 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7707 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7708 iemCImpl_repe_scas_eax_m16);
7709 case IEMMODE_32BIT:
7710 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7711 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7712 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7713 iemCImpl_repe_scas_eax_m32);
7714 case IEMMODE_64BIT:
7715 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7716 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7717 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7718 iemCImpl_repe_scas_eax_m64);
7719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7720 }
7721 case IEMMODE_64BIT:
7722 switch (pVCpu->iem.s.enmEffAddrMode)
7723 {
7724 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7725 case IEMMODE_32BIT:
7726 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7727 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7728 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7729 iemCImpl_repe_scas_rax_m32);
7730 case IEMMODE_64BIT:
7731 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7732 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7733 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7734 iemCImpl_repe_scas_rax_m64);
7735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7736 }
7737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7738 }
7739 }
7740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7741 {
7742 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7744 switch (pVCpu->iem.s.enmEffOpSize)
7745 {
7746 case IEMMODE_16BIT:
7747 switch (pVCpu->iem.s.enmEffAddrMode)
7748 {
7749 case IEMMODE_16BIT:
7750 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7751 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7752 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7753 iemCImpl_repne_scas_ax_m16);
7754 case IEMMODE_32BIT:
7755 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7756 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7758 iemCImpl_repne_scas_ax_m32);
7759 case IEMMODE_64BIT:
7760 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7761 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7762 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7763 iemCImpl_repne_scas_ax_m64);
7764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7765 }
7766 break;
7767 case IEMMODE_32BIT:
7768 switch (pVCpu->iem.s.enmEffAddrMode)
7769 {
7770 case IEMMODE_16BIT:
7771 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7773 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7774 iemCImpl_repne_scas_eax_m16);
7775 case IEMMODE_32BIT:
7776 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7777 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7778 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7779 iemCImpl_repne_scas_eax_m32);
7780 case IEMMODE_64BIT:
7781 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7782 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7783 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7784 iemCImpl_repne_scas_eax_m64);
7785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7786 }
7787 case IEMMODE_64BIT:
7788 switch (pVCpu->iem.s.enmEffAddrMode)
7789 {
7790 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7791 case IEMMODE_32BIT:
7792 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7793 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7794 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7795 iemCImpl_repne_scas_rax_m32);
7796 case IEMMODE_64BIT:
7797 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7798 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7799 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7800 iemCImpl_repne_scas_rax_m64);
7801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7802 }
7803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7804 }
7805 }
7806
7807 /*
7808 * Annoying double switch here.
7809 * Using ugly macro for implementing the cases, sharing it with scasb.
7810 */
7811 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7812 switch (pVCpu->iem.s.enmEffOpSize)
7813 {
7814 case IEMMODE_16BIT:
7815 switch (pVCpu->iem.s.enmEffAddrMode)
7816 {
7817 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7818 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7819 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7821 }
7822 break;
7823
7824 case IEMMODE_32BIT:
7825 switch (pVCpu->iem.s.enmEffAddrMode)
7826 {
7827 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7828 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7829 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7831 }
7832 break;
7833
7834 case IEMMODE_64BIT:
7835 switch (pVCpu->iem.s.enmEffAddrMode)
7836 {
7837 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7838 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7839 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7841 }
7842 break;
7843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7844 }
7845}
7846
7847#undef IEM_SCAS_CASE
7848
7849/**
7850 * Common 'mov r8, imm8' helper.
7851 */
7852FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7853{
7854 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7855 IEM_MC_BEGIN(0, 0, 0, 0);
7856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7857 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7858 IEM_MC_ADVANCE_RIP_AND_FINISH();
7859 IEM_MC_END();
7860}
7861
7862
7863/**
7864 * @opcode 0xb0
7865 */
7866FNIEMOP_DEF(iemOp_mov_AL_Ib)
7867{
7868 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7869 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7870}
7871
7872
7873/**
7874 * @opcode 0xb1
7875 */
7876FNIEMOP_DEF(iemOp_CL_Ib)
7877{
7878 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7879 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7880}
7881
7882
7883/**
7884 * @opcode 0xb2
7885 */
7886FNIEMOP_DEF(iemOp_DL_Ib)
7887{
7888 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7889 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7890}
7891
7892
7893/**
7894 * @opcode 0xb3
7895 */
7896FNIEMOP_DEF(iemOp_BL_Ib)
7897{
7898 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7899 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7900}
7901
7902
7903/**
7904 * @opcode 0xb4
7905 */
7906FNIEMOP_DEF(iemOp_mov_AH_Ib)
7907{
7908 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7909 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7910}
7911
7912
7913/**
7914 * @opcode 0xb5
7915 */
7916FNIEMOP_DEF(iemOp_CH_Ib)
7917{
7918 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7919 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7920}
7921
7922
7923/**
7924 * @opcode 0xb6
7925 */
7926FNIEMOP_DEF(iemOp_DH_Ib)
7927{
7928 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7929 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7930}
7931
7932
7933/**
7934 * @opcode 0xb7
7935 */
7936FNIEMOP_DEF(iemOp_BH_Ib)
7937{
7938 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7939 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7940}
7941
7942
7943/**
7944 * Common 'mov regX,immX' helper.
7945 */
7946FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7947{
7948 switch (pVCpu->iem.s.enmEffOpSize)
7949 {
7950 case IEMMODE_16BIT:
7951 IEM_MC_BEGIN(0, 0, 0, 0);
7952 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7954 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7955 IEM_MC_ADVANCE_RIP_AND_FINISH();
7956 IEM_MC_END();
7957 break;
7958
7959 case IEMMODE_32BIT:
7960 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7961 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7963 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
7964 IEM_MC_ADVANCE_RIP_AND_FINISH();
7965 IEM_MC_END();
7966 break;
7967
7968 case IEMMODE_64BIT:
7969 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7970 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7972 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
7973 IEM_MC_ADVANCE_RIP_AND_FINISH();
7974 IEM_MC_END();
7975 break;
7976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7977 }
7978}
7979
7980
7981/**
7982 * @opcode 0xb8
7983 */
7984FNIEMOP_DEF(iemOp_eAX_Iv)
7985{
7986 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7987 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7988}
7989
7990
7991/**
7992 * @opcode 0xb9
7993 */
7994FNIEMOP_DEF(iemOp_eCX_Iv)
7995{
7996 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7997 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7998}
7999
8000
8001/**
8002 * @opcode 0xba
8003 */
8004FNIEMOP_DEF(iemOp_eDX_Iv)
8005{
8006 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8007 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8008}
8009
8010
8011/**
8012 * @opcode 0xbb
8013 */
8014FNIEMOP_DEF(iemOp_eBX_Iv)
8015{
8016 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8017 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8018}
8019
8020
8021/**
8022 * @opcode 0xbc
8023 */
8024FNIEMOP_DEF(iemOp_eSP_Iv)
8025{
8026 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8027 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8028}
8029
8030
8031/**
8032 * @opcode 0xbd
8033 */
8034FNIEMOP_DEF(iemOp_eBP_Iv)
8035{
8036 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8037 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8038}
8039
8040
8041/**
8042 * @opcode 0xbe
8043 */
8044FNIEMOP_DEF(iemOp_eSI_Iv)
8045{
8046 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8047 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8048}
8049
8050
8051/**
8052 * @opcode 0xbf
8053 */
8054FNIEMOP_DEF(iemOp_eDI_Iv)
8055{
8056 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8057 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8058}
8059
8060
8061/**
8062 * @opcode 0xc0
8063 */
8064FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8065{
8066 IEMOP_HLP_MIN_186();
8067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8068 PCIEMOPSHIFTSIZES pImpl;
8069 switch (IEM_GET_MODRM_REG_8(bRm))
8070 {
8071 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
8072 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
8073 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
8074 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
8075 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
8076 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
8077 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
8078 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8079 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8080 }
8081 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8082
8083 if (IEM_IS_MODRM_REG_MODE(bRm))
8084 {
8085 /* register */
8086 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8087 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8089 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8090 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8091 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8092 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8093 IEM_MC_REF_EFLAGS(pEFlags);
8094 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8095 IEM_MC_ADVANCE_RIP_AND_FINISH();
8096 IEM_MC_END();
8097 }
8098 else
8099 {
8100 /* memory */
8101 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
8102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8104
8105 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8107
8108 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8109 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8110 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8111
8112 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8113 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8114 IEM_MC_FETCH_EFLAGS(EFlags);
8115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8116
8117 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8118 IEM_MC_COMMIT_EFLAGS(EFlags);
8119 IEM_MC_ADVANCE_RIP_AND_FINISH();
8120 IEM_MC_END();
8121 }
8122}
8123
8124
8125/**
8126 * @opcode 0xc1
8127 */
8128FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8129{
8130 IEMOP_HLP_MIN_186();
8131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8132 PCIEMOPSHIFTSIZES pImpl;
8133 switch (IEM_GET_MODRM_REG_8(bRm))
8134 {
8135 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
8136 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
8137 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
8138 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
8139 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
8140 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
8141 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
8142 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8143 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8144 }
8145 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8146
8147 if (IEM_IS_MODRM_REG_MODE(bRm))
8148 {
8149 /* register */
8150 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8151 switch (pVCpu->iem.s.enmEffOpSize)
8152 {
8153 case IEMMODE_16BIT:
8154 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8156 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8157 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8158 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8159 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8160 IEM_MC_REF_EFLAGS(pEFlags);
8161 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8162 IEM_MC_ADVANCE_RIP_AND_FINISH();
8163 IEM_MC_END();
8164 break;
8165
8166 case IEMMODE_32BIT:
8167 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8169 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8170 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8171 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8172 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8173 IEM_MC_REF_EFLAGS(pEFlags);
8174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8175 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8176 IEM_MC_ADVANCE_RIP_AND_FINISH();
8177 IEM_MC_END();
8178 break;
8179
8180 case IEMMODE_64BIT:
8181 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8183 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8184 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8185 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8186 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8187 IEM_MC_REF_EFLAGS(pEFlags);
8188 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8189 IEM_MC_ADVANCE_RIP_AND_FINISH();
8190 IEM_MC_END();
8191 break;
8192
8193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8194 }
8195 }
8196 else
8197 {
8198 /* memory */
8199 switch (pVCpu->iem.s.enmEffOpSize)
8200 {
8201 case IEMMODE_16BIT:
8202 IEM_MC_BEGIN(3, 3, 0, 0);
8203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8205
8206 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8208
8209 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8210 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8211 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8212
8213 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8214 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8215 IEM_MC_FETCH_EFLAGS(EFlags);
8216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8217
8218 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8219 IEM_MC_COMMIT_EFLAGS(EFlags);
8220 IEM_MC_ADVANCE_RIP_AND_FINISH();
8221 IEM_MC_END();
8222 break;
8223
8224 case IEMMODE_32BIT:
8225 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8228
8229 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8231
8232 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8233 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8234 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8235
8236 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8237 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8238 IEM_MC_FETCH_EFLAGS(EFlags);
8239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8240
8241 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8242 IEM_MC_COMMIT_EFLAGS(EFlags);
8243 IEM_MC_ADVANCE_RIP_AND_FINISH();
8244 IEM_MC_END();
8245 break;
8246
8247 case IEMMODE_64BIT:
8248 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8251
8252 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8254
8255 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8256 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8257 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8258
8259 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8260 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8261 IEM_MC_FETCH_EFLAGS(EFlags);
8262 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8263
8264 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8265 IEM_MC_COMMIT_EFLAGS(EFlags);
8266 IEM_MC_ADVANCE_RIP_AND_FINISH();
8267 IEM_MC_END();
8268 break;
8269
8270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8271 }
8272 }
8273}
8274
8275
8276/**
8277 * @opcode 0xc2
8278 */
8279FNIEMOP_DEF(iemOp_retn_Iw)
8280{
8281 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8282 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8283 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8285 switch (pVCpu->iem.s.enmEffOpSize)
8286 {
8287 case IEMMODE_16BIT:
8288 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8289 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8290 case IEMMODE_32BIT:
8291 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8292 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8293 case IEMMODE_64BIT:
8294 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8295 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8297 }
8298}
8299
8300
8301/**
8302 * @opcode 0xc3
8303 */
8304FNIEMOP_DEF(iemOp_retn)
8305{
8306 IEMOP_MNEMONIC(retn, "retn");
8307 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8309 switch (pVCpu->iem.s.enmEffOpSize)
8310 {
8311 case IEMMODE_16BIT:
8312 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8313 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8314 case IEMMODE_32BIT:
8315 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8316 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8317 case IEMMODE_64BIT:
8318 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8319 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8321 }
8322}
8323
8324
8325/**
8326 * @opcode 0xc4
8327 */
8328FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8329{
8330 /* The LDS instruction is invalid 64-bit mode. In legacy and
8331 compatability mode it is invalid with MOD=3.
8332 The use as a VEX prefix is made possible by assigning the inverted
8333 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8334 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8336 if ( IEM_IS_64BIT_CODE(pVCpu)
8337 || IEM_IS_MODRM_REG_MODE(bRm) )
8338 {
8339 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8340 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8341 {
8342 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8343 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8344 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8345 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8346 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8347 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8348 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8349 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8350 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8351 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8352 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8353 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8354 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8355
8356 switch (bRm & 0x1f)
8357 {
8358 case 1: /* 0x0f lead opcode byte. */
8359#ifdef IEM_WITH_VEX
8360 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8361#else
8362 IEMOP_BITCH_ABOUT_STUB();
8363 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8364#endif
8365
8366 case 2: /* 0x0f 0x38 lead opcode bytes. */
8367#ifdef IEM_WITH_VEX
8368 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8369#else
8370 IEMOP_BITCH_ABOUT_STUB();
8371 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8372#endif
8373
8374 case 3: /* 0x0f 0x3a lead opcode bytes. */
8375#ifdef IEM_WITH_VEX
8376 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8377#else
8378 IEMOP_BITCH_ABOUT_STUB();
8379 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8380#endif
8381
8382 default:
8383 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8384 IEMOP_RAISE_INVALID_OPCODE_RET();
8385 }
8386 }
8387 Log(("VEX3: VEX support disabled!\n"));
8388 IEMOP_RAISE_INVALID_OPCODE_RET();
8389 }
8390
8391 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8392 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8393}
8394
8395
8396/**
8397 * @opcode 0xc5
8398 */
8399FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8400{
8401 /* The LES instruction is invalid 64-bit mode. In legacy and
8402 compatability mode it is invalid with MOD=3.
8403 The use as a VEX prefix is made possible by assigning the inverted
8404 REX.R to the top MOD bit, and the top bit in the inverted register
8405 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8406 to accessing registers 0..7 in this VEX form. */
8407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8408 if ( IEM_IS_64BIT_CODE(pVCpu)
8409 || IEM_IS_MODRM_REG_MODE(bRm))
8410 {
8411 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8413 {
8414 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8415 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8416 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8417 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8418 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8419 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8420 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8421 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8422
8423#ifdef IEM_WITH_VEX
8424 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8425#else
8426 IEMOP_BITCH_ABOUT_STUB();
8427 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8428#endif
8429 }
8430
8431 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8432 Log(("VEX2: VEX support disabled!\n"));
8433 IEMOP_RAISE_INVALID_OPCODE_RET();
8434 }
8435
8436 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8437 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8438}
8439
8440
8441/**
8442 * @opcode 0xc6
8443 */
8444FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8445{
8446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8447 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8448 IEMOP_RAISE_INVALID_OPCODE_RET();
8449 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8450
8451 if (IEM_IS_MODRM_REG_MODE(bRm))
8452 {
8453 /* register access */
8454 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8455 IEM_MC_BEGIN(0, 0, 0, 0);
8456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8457 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8458 IEM_MC_ADVANCE_RIP_AND_FINISH();
8459 IEM_MC_END();
8460 }
8461 else
8462 {
8463 /* memory access. */
8464 IEM_MC_BEGIN(0, 1, 0, 0);
8465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8467 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8469 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8470 IEM_MC_ADVANCE_RIP_AND_FINISH();
8471 IEM_MC_END();
8472 }
8473}
8474
8475
8476/**
8477 * @opcode 0xc7
8478 */
8479FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8480{
8481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8482 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8483 IEMOP_RAISE_INVALID_OPCODE_RET();
8484 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8485
8486 if (IEM_IS_MODRM_REG_MODE(bRm))
8487 {
8488 /* register access */
8489 switch (pVCpu->iem.s.enmEffOpSize)
8490 {
8491 case IEMMODE_16BIT:
8492 IEM_MC_BEGIN(0, 0, 0, 0);
8493 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8495 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8496 IEM_MC_ADVANCE_RIP_AND_FINISH();
8497 IEM_MC_END();
8498 break;
8499
8500 case IEMMODE_32BIT:
8501 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8502 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8504 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8505 IEM_MC_ADVANCE_RIP_AND_FINISH();
8506 IEM_MC_END();
8507 break;
8508
8509 case IEMMODE_64BIT:
8510 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8511 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8513 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8514 IEM_MC_ADVANCE_RIP_AND_FINISH();
8515 IEM_MC_END();
8516 break;
8517
8518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8519 }
8520 }
8521 else
8522 {
8523 /* memory access. */
8524 switch (pVCpu->iem.s.enmEffOpSize)
8525 {
8526 case IEMMODE_16BIT:
8527 IEM_MC_BEGIN(0, 1, 0, 0);
8528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8530 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8532 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8533 IEM_MC_ADVANCE_RIP_AND_FINISH();
8534 IEM_MC_END();
8535 break;
8536
8537 case IEMMODE_32BIT:
8538 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8541 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8543 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8544 IEM_MC_ADVANCE_RIP_AND_FINISH();
8545 IEM_MC_END();
8546 break;
8547
8548 case IEMMODE_64BIT:
8549 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8552 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8554 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8555 IEM_MC_ADVANCE_RIP_AND_FINISH();
8556 IEM_MC_END();
8557 break;
8558
8559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8560 }
8561 }
8562}
8563
8564
8565
8566
8567/**
8568 * @opcode 0xc8
8569 */
8570FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8571{
8572 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8573 IEMOP_HLP_MIN_186();
8574 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8575 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8576 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8578 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8579 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8580 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8581 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8582}
8583
8584
8585/**
8586 * @opcode 0xc9
8587 */
8588FNIEMOP_DEF(iemOp_leave)
8589{
8590 IEMOP_MNEMONIC(leave, "leave");
8591 IEMOP_HLP_MIN_186();
8592 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8594 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8595 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8596 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8597 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8598}
8599
8600
8601/**
8602 * @opcode 0xca
8603 */
8604FNIEMOP_DEF(iemOp_retf_Iw)
8605{
8606 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8607 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8609 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8610 | IEM_CIMPL_F_MODE,
8611 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8612 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8613 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8614 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8615 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8616 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8617 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8618 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8619 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8620 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8621 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8622 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8623 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8624 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8625 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8626 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8627 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8628 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8629}
8630
8631
8632/**
8633 * @opcode 0xcb
8634 */
8635FNIEMOP_DEF(iemOp_retf)
8636{
8637 IEMOP_MNEMONIC(retf, "retf");
8638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8639 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8640 | IEM_CIMPL_F_MODE,
8641 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8642 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8643 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8644 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8645 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8646 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8647 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8648 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8649 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8650 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8651 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8652 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8653 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8654 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8655 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8656 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8657 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8658 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8659}
8660
8661
8662/**
8663 * @opcode 0xcc
8664 */
8665FNIEMOP_DEF(iemOp_int3)
8666{
8667 IEMOP_MNEMONIC(int3, "int3");
8668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8669 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8670 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8671 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8672}
8673
8674
8675/**
8676 * @opcode 0xcd
8677 */
8678FNIEMOP_DEF(iemOp_int_Ib)
8679{
8680 IEMOP_MNEMONIC(int_Ib, "int Ib");
8681 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8683 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8684 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8685 iemCImpl_int, u8Int, IEMINT_INTN);
8686 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8687}
8688
8689
8690/**
8691 * @opcode 0xce
8692 */
8693FNIEMOP_DEF(iemOp_into)
8694{
8695 IEMOP_MNEMONIC(into, "into");
8696 IEMOP_HLP_NO_64BIT();
8697 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8698 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8699 UINT64_MAX,
8700 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8701 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8702}
8703
8704
8705/**
8706 * @opcode 0xcf
8707 */
8708FNIEMOP_DEF(iemOp_iret)
8709{
8710 IEMOP_MNEMONIC(iret, "iret");
8711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8712 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8713 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8714 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8715 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8716 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8717 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8718 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8719 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8720 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8721 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8722 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8723 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8724 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8725 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8726 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8727 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8728 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8729 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8730 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8731 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8732 /* Segment registers are sanitized when returning to an outer ring, or fully
8733 reloaded when returning to v86 mode. Thus the large flush list above. */
8734}
8735
8736
8737/**
8738 * @opcode 0xd0
8739 */
8740FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8741{
8742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8743 PCIEMOPSHIFTSIZES pImpl;
8744 switch (IEM_GET_MODRM_REG_8(bRm))
8745 {
8746 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8747 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8748 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8749 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8750 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8751 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8752 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8753 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8754 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8755 }
8756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8757
8758 if (IEM_IS_MODRM_REG_MODE(bRm))
8759 {
8760 /* register */
8761 IEM_MC_BEGIN(3, 0, 0, 0);
8762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8763 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8764 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8765 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8766 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8767 IEM_MC_REF_EFLAGS(pEFlags);
8768 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8769 IEM_MC_ADVANCE_RIP_AND_FINISH();
8770 IEM_MC_END();
8771 }
8772 else
8773 {
8774 /* memory */
8775 IEM_MC_BEGIN(3, 3, 0, 0);
8776 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8777 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8778 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8780 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8781
8782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8784 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8785 IEM_MC_FETCH_EFLAGS(EFlags);
8786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8787
8788 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8789 IEM_MC_COMMIT_EFLAGS(EFlags);
8790 IEM_MC_ADVANCE_RIP_AND_FINISH();
8791 IEM_MC_END();
8792 }
8793}
8794
8795
8796
8797/**
8798 * @opcode 0xd1
8799 */
8800FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8801{
8802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8803 PCIEMOPSHIFTSIZES pImpl;
8804 switch (IEM_GET_MODRM_REG_8(bRm))
8805 {
8806 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8807 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8808 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8809 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8810 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8811 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8812 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8813 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8814 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8815 }
8816 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8817
8818 if (IEM_IS_MODRM_REG_MODE(bRm))
8819 {
8820 /* register */
8821 switch (pVCpu->iem.s.enmEffOpSize)
8822 {
8823 case IEMMODE_16BIT:
8824 IEM_MC_BEGIN(3, 0, 0, 0);
8825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8826 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8827 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8828 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8829 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8830 IEM_MC_REF_EFLAGS(pEFlags);
8831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8832 IEM_MC_ADVANCE_RIP_AND_FINISH();
8833 IEM_MC_END();
8834 break;
8835
8836 case IEMMODE_32BIT:
8837 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8839 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8840 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8841 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8842 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8843 IEM_MC_REF_EFLAGS(pEFlags);
8844 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8845 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8846 IEM_MC_ADVANCE_RIP_AND_FINISH();
8847 IEM_MC_END();
8848 break;
8849
8850 case IEMMODE_64BIT:
8851 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8853 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8854 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8856 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8857 IEM_MC_REF_EFLAGS(pEFlags);
8858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8859 IEM_MC_ADVANCE_RIP_AND_FINISH();
8860 IEM_MC_END();
8861 break;
8862
8863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8864 }
8865 }
8866 else
8867 {
8868 /* memory */
8869 switch (pVCpu->iem.s.enmEffOpSize)
8870 {
8871 case IEMMODE_16BIT:
8872 IEM_MC_BEGIN(3, 3, 0, 0);
8873 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8874 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8875 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8877 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8878
8879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8881 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8882 IEM_MC_FETCH_EFLAGS(EFlags);
8883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8884
8885 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8886 IEM_MC_COMMIT_EFLAGS(EFlags);
8887 IEM_MC_ADVANCE_RIP_AND_FINISH();
8888 IEM_MC_END();
8889 break;
8890
8891 case IEMMODE_32BIT:
8892 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8893 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8894 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8895 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8897 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8898
8899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8902 IEM_MC_FETCH_EFLAGS(EFlags);
8903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8904
8905 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8906 IEM_MC_COMMIT_EFLAGS(EFlags);
8907 IEM_MC_ADVANCE_RIP_AND_FINISH();
8908 IEM_MC_END();
8909 break;
8910
8911 case IEMMODE_64BIT:
8912 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8913 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8914 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8915 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8917 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8918
8919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8922 IEM_MC_FETCH_EFLAGS(EFlags);
8923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8924
8925 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8926 IEM_MC_COMMIT_EFLAGS(EFlags);
8927 IEM_MC_ADVANCE_RIP_AND_FINISH();
8928 IEM_MC_END();
8929 break;
8930
8931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8932 }
8933 }
8934}
8935
8936
8937/**
8938 * @opcode 0xd2
8939 */
8940FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8941{
8942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8943 PCIEMOPSHIFTSIZES pImpl;
8944 switch (IEM_GET_MODRM_REG_8(bRm))
8945 {
8946 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8947 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8948 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8949 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8950 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8951 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8952 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8953 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8954 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8955 }
8956 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8957
8958 if (IEM_IS_MODRM_REG_MODE(bRm))
8959 {
8960 /* register */
8961 IEM_MC_BEGIN(3, 0, 0, 0);
8962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8963 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8964 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8965 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8966 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8967 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8968 IEM_MC_REF_EFLAGS(pEFlags);
8969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8970 IEM_MC_ADVANCE_RIP_AND_FINISH();
8971 IEM_MC_END();
8972 }
8973 else
8974 {
8975 /* memory */
8976 IEM_MC_BEGIN(3, 3, 0, 0);
8977 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8978 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8979 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8981 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8982
8983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8985 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8986 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8987 IEM_MC_FETCH_EFLAGS(EFlags);
8988 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8989
8990 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8991 IEM_MC_COMMIT_EFLAGS(EFlags);
8992 IEM_MC_ADVANCE_RIP_AND_FINISH();
8993 IEM_MC_END();
8994 }
8995}
8996
8997
8998/**
8999 * @opcode 0xd3
9000 */
9001FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9002{
9003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9004 PCIEMOPSHIFTSIZES pImpl;
9005 switch (IEM_GET_MODRM_REG_8(bRm))
9006 {
9007 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
9008 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
9009 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
9010 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
9011 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
9012 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
9013 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
9014 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9015 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9016 }
9017 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9018
9019 if (IEM_IS_MODRM_REG_MODE(bRm))
9020 {
9021 /* register */
9022 switch (pVCpu->iem.s.enmEffOpSize)
9023 {
9024 case IEMMODE_16BIT:
9025 IEM_MC_BEGIN(3, 0, 0, 0);
9026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9027 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9028 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9029 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9030 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9031 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9032 IEM_MC_REF_EFLAGS(pEFlags);
9033 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9034 IEM_MC_ADVANCE_RIP_AND_FINISH();
9035 IEM_MC_END();
9036 break;
9037
9038 case IEMMODE_32BIT:
9039 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
9040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9041 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9042 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9043 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9044 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9045 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9046 IEM_MC_REF_EFLAGS(pEFlags);
9047 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9048 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9049 IEM_MC_ADVANCE_RIP_AND_FINISH();
9050 IEM_MC_END();
9051 break;
9052
9053 case IEMMODE_64BIT:
9054 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
9055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9056 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9057 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9059 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9060 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9061 IEM_MC_REF_EFLAGS(pEFlags);
9062 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9063 IEM_MC_ADVANCE_RIP_AND_FINISH();
9064 IEM_MC_END();
9065 break;
9066
9067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9068 }
9069 }
9070 else
9071 {
9072 /* memory */
9073 switch (pVCpu->iem.s.enmEffOpSize)
9074 {
9075 case IEMMODE_16BIT:
9076 IEM_MC_BEGIN(3, 3, 0, 0);
9077 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9078 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9079 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9081 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9082
9083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9085 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9086 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9087 IEM_MC_FETCH_EFLAGS(EFlags);
9088 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9089
9090 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9091 IEM_MC_COMMIT_EFLAGS(EFlags);
9092 IEM_MC_ADVANCE_RIP_AND_FINISH();
9093 IEM_MC_END();
9094 break;
9095
9096 case IEMMODE_32BIT:
9097 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
9098 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9099 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9100 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9102 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9103
9104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9106 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9107 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9108 IEM_MC_FETCH_EFLAGS(EFlags);
9109 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9110
9111 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9112 IEM_MC_COMMIT_EFLAGS(EFlags);
9113 IEM_MC_ADVANCE_RIP_AND_FINISH();
9114 IEM_MC_END();
9115 break;
9116
9117 case IEMMODE_64BIT:
9118 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
9119 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9120 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9121 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9123 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9124
9125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9127 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9128 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9129 IEM_MC_FETCH_EFLAGS(EFlags);
9130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9131
9132 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9133 IEM_MC_COMMIT_EFLAGS(EFlags);
9134 IEM_MC_ADVANCE_RIP_AND_FINISH();
9135 IEM_MC_END();
9136 break;
9137
9138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9139 }
9140 }
9141}
9142
9143/**
9144 * @opcode 0xd4
9145 */
9146FNIEMOP_DEF(iemOp_aam_Ib)
9147{
9148 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9149 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9151 IEMOP_HLP_NO_64BIT();
9152 if (!bImm)
9153 IEMOP_RAISE_DIVIDE_ERROR_RET();
9154 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9155}
9156
9157
9158/**
9159 * @opcode 0xd5
9160 */
9161FNIEMOP_DEF(iemOp_aad_Ib)
9162{
9163 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9164 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9166 IEMOP_HLP_NO_64BIT();
9167 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9168}
9169
9170
9171/**
9172 * @opcode 0xd6
9173 */
9174FNIEMOP_DEF(iemOp_salc)
9175{
9176 IEMOP_MNEMONIC(salc, "salc");
9177 IEMOP_HLP_NO_64BIT();
9178
9179 IEM_MC_BEGIN(0, 0, 0, 0);
9180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9181 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9182 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9183 } IEM_MC_ELSE() {
9184 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9185 } IEM_MC_ENDIF();
9186 IEM_MC_ADVANCE_RIP_AND_FINISH();
9187 IEM_MC_END();
9188}
9189
9190
9191/**
9192 * @opcode 0xd7
9193 */
9194FNIEMOP_DEF(iemOp_xlat)
9195{
9196 IEMOP_MNEMONIC(xlat, "xlat");
9197 switch (pVCpu->iem.s.enmEffAddrMode)
9198 {
9199 case IEMMODE_16BIT:
9200 IEM_MC_BEGIN(2, 0, 0, 0);
9201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9202 IEM_MC_LOCAL(uint8_t, u8Tmp);
9203 IEM_MC_LOCAL(uint16_t, u16Addr);
9204 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9205 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9206 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9207 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9208 IEM_MC_ADVANCE_RIP_AND_FINISH();
9209 IEM_MC_END();
9210 break;
9211
9212 case IEMMODE_32BIT:
9213 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9215 IEM_MC_LOCAL(uint8_t, u8Tmp);
9216 IEM_MC_LOCAL(uint32_t, u32Addr);
9217 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9218 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9219 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9220 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9221 IEM_MC_ADVANCE_RIP_AND_FINISH();
9222 IEM_MC_END();
9223 break;
9224
9225 case IEMMODE_64BIT:
9226 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9228 IEM_MC_LOCAL(uint8_t, u8Tmp);
9229 IEM_MC_LOCAL(uint64_t, u64Addr);
9230 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9231 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9232 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9233 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9234 IEM_MC_ADVANCE_RIP_AND_FINISH();
9235 IEM_MC_END();
9236 break;
9237
9238 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9239 }
9240}
9241
9242
9243/**
9244 * Common worker for FPU instructions working on ST0 and STn, and storing the
9245 * result in ST0.
9246 *
9247 * @param bRm Mod R/M byte.
9248 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9249 */
9250FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9251{
9252 IEM_MC_BEGIN(3, 1, 0, 0);
9253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9254 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9255 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9256 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9257 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9258
9259 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9260 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9261 IEM_MC_PREPARE_FPU_USAGE();
9262 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9263 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9264 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9265 } IEM_MC_ELSE() {
9266 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9267 } IEM_MC_ENDIF();
9268 IEM_MC_ADVANCE_RIP_AND_FINISH();
9269
9270 IEM_MC_END();
9271}
9272
9273
9274/**
9275 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9276 * flags.
9277 *
9278 * @param bRm Mod R/M byte.
9279 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9280 */
9281FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9282{
9283 IEM_MC_BEGIN(3, 1, 0, 0);
9284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9285 IEM_MC_LOCAL(uint16_t, u16Fsw);
9286 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9287 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9288 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9289
9290 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9291 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9292 IEM_MC_PREPARE_FPU_USAGE();
9293 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9294 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9295 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9296 } IEM_MC_ELSE() {
9297 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9298 } IEM_MC_ENDIF();
9299 IEM_MC_ADVANCE_RIP_AND_FINISH();
9300
9301 IEM_MC_END();
9302}
9303
9304
9305/**
9306 * Common worker for FPU instructions working on ST0 and STn, only affecting
9307 * flags, and popping when done.
9308 *
9309 * @param bRm Mod R/M byte.
9310 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9311 */
9312FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9313{
9314 IEM_MC_BEGIN(3, 1, 0, 0);
9315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9316 IEM_MC_LOCAL(uint16_t, u16Fsw);
9317 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9318 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9319 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9320
9321 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9322 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9323 IEM_MC_PREPARE_FPU_USAGE();
9324 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9325 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9326 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9327 } IEM_MC_ELSE() {
9328 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9329 } IEM_MC_ENDIF();
9330 IEM_MC_ADVANCE_RIP_AND_FINISH();
9331
9332 IEM_MC_END();
9333}
9334
9335
9336/** Opcode 0xd8 11/0. */
9337FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9338{
9339 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9340 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9341}
9342
9343
9344/** Opcode 0xd8 11/1. */
9345FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9346{
9347 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9348 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9349}
9350
9351
9352/** Opcode 0xd8 11/2. */
9353FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9354{
9355 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9356 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9357}
9358
9359
9360/** Opcode 0xd8 11/3. */
9361FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9362{
9363 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9364 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9365}
9366
9367
9368/** Opcode 0xd8 11/4. */
9369FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9370{
9371 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9372 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9373}
9374
9375
9376/** Opcode 0xd8 11/5. */
9377FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9378{
9379 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9380 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9381}
9382
9383
9384/** Opcode 0xd8 11/6. */
9385FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9386{
9387 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9388 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9389}
9390
9391
9392/** Opcode 0xd8 11/7. */
9393FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9394{
9395 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9396 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9397}
9398
9399
9400/**
9401 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9402 * the result in ST0.
9403 *
9404 * @param bRm Mod R/M byte.
9405 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9406 */
9407FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9408{
9409 IEM_MC_BEGIN(3, 3, 0, 0);
9410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9411 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9412 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9413 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9414 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9415 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9416
9417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9419
9420 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9421 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9422 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9423
9424 IEM_MC_PREPARE_FPU_USAGE();
9425 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9426 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9427 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9428 } IEM_MC_ELSE() {
9429 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9430 } IEM_MC_ENDIF();
9431 IEM_MC_ADVANCE_RIP_AND_FINISH();
9432
9433 IEM_MC_END();
9434}
9435
9436
9437/** Opcode 0xd8 !11/0. */
9438FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9439{
9440 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9441 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9442}
9443
9444
9445/** Opcode 0xd8 !11/1. */
9446FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9447{
9448 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9449 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9450}
9451
9452
9453/** Opcode 0xd8 !11/2. */
9454FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
9455{
9456 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
9457
9458 IEM_MC_BEGIN(3, 3, 0, 0);
9459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9460 IEM_MC_LOCAL(uint16_t, u16Fsw);
9461 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9462 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9463 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9464 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9465
9466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9468
9469 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9470 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9471 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9472
9473 IEM_MC_PREPARE_FPU_USAGE();
9474 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9475 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9476 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9477 } IEM_MC_ELSE() {
9478 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9479 } IEM_MC_ENDIF();
9480 IEM_MC_ADVANCE_RIP_AND_FINISH();
9481
9482 IEM_MC_END();
9483}
9484
9485
9486/** Opcode 0xd8 !11/3. */
9487FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
9488{
9489 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
9490
9491 IEM_MC_BEGIN(3, 3, 0, 0);
9492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9493 IEM_MC_LOCAL(uint16_t, u16Fsw);
9494 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9495 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9496 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9497 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9498
9499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9501
9502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9504 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9505
9506 IEM_MC_PREPARE_FPU_USAGE();
9507 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9508 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9509 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9510 } IEM_MC_ELSE() {
9511 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9512 } IEM_MC_ENDIF();
9513 IEM_MC_ADVANCE_RIP_AND_FINISH();
9514
9515 IEM_MC_END();
9516}
9517
9518
9519/** Opcode 0xd8 !11/4. */
9520FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9521{
9522 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9523 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9524}
9525
9526
9527/** Opcode 0xd8 !11/5. */
9528FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9529{
9530 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9531 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9532}
9533
9534
9535/** Opcode 0xd8 !11/6. */
9536FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9537{
9538 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9539 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9540}
9541
9542
9543/** Opcode 0xd8 !11/7. */
9544FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9545{
9546 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9547 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9548}
9549
9550
9551/**
9552 * @opcode 0xd8
9553 */
9554FNIEMOP_DEF(iemOp_EscF0)
9555{
9556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9557 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9558
9559 if (IEM_IS_MODRM_REG_MODE(bRm))
9560 {
9561 switch (IEM_GET_MODRM_REG_8(bRm))
9562 {
9563 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9564 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9565 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9566 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9567 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9568 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9569 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9570 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9572 }
9573 }
9574 else
9575 {
9576 switch (IEM_GET_MODRM_REG_8(bRm))
9577 {
9578 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9579 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9580 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9581 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9582 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9583 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9584 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9585 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9587 }
9588 }
9589}
9590
9591
9592/** Opcode 0xd9 /0 mem32real
9593 * @sa iemOp_fld_m64r */
9594FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9595{
9596 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9597
9598 IEM_MC_BEGIN(2, 3, 0, 0);
9599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9600 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9601 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9602 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9603 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9604
9605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9607
9608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9609 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9610 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9611 IEM_MC_PREPARE_FPU_USAGE();
9612 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9613 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9614 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9615 } IEM_MC_ELSE() {
9616 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9617 } IEM_MC_ENDIF();
9618 IEM_MC_ADVANCE_RIP_AND_FINISH();
9619
9620 IEM_MC_END();
9621}
9622
9623
9624/** Opcode 0xd9 !11/2 mem32real */
9625FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9626{
9627 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9628 IEM_MC_BEGIN(3, 3, 0, 0);
9629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9631
9632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9633 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9634 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9635 IEM_MC_PREPARE_FPU_USAGE();
9636
9637 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9638 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9639 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9640
9641 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9642 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9643 IEM_MC_LOCAL(uint16_t, u16Fsw);
9644 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
9645 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9646 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
9647 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9648 } IEM_MC_ELSE() {
9649 IEM_MC_IF_FCW_IM() {
9650 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9651 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
9652 } IEM_MC_ELSE() {
9653 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
9654 } IEM_MC_ENDIF();
9655 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9656 } IEM_MC_ENDIF();
9657 IEM_MC_ADVANCE_RIP_AND_FINISH();
9658
9659 IEM_MC_END();
9660}
9661
9662
9663/** Opcode 0xd9 !11/3 */
9664FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9665{
9666 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9667 IEM_MC_BEGIN(3, 3, 0, 0);
9668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9670
9671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9673 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9674 IEM_MC_PREPARE_FPU_USAGE();
9675
9676 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9677 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9678 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9679
9680 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9681 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9682 IEM_MC_LOCAL(uint16_t, u16Fsw);
9683 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
9684 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9685 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
9686 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9687 } IEM_MC_ELSE() {
9688 IEM_MC_IF_FCW_IM() {
9689 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9690 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
9691 } IEM_MC_ELSE() {
9692 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
9693 } IEM_MC_ENDIF();
9694 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9695 } IEM_MC_ENDIF();
9696 IEM_MC_ADVANCE_RIP_AND_FINISH();
9697
9698 IEM_MC_END();
9699}
9700
9701
9702/** Opcode 0xd9 !11/4 */
9703FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9704{
9705 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9706 IEM_MC_BEGIN(3, 0, 0, 0);
9707 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9709
9710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9712 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9713
9714 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9715 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9716 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9717 IEM_MC_END();
9718}
9719
9720
9721/** Opcode 0xd9 !11/5 */
9722FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9723{
9724 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9725 IEM_MC_BEGIN(1, 1, 0, 0);
9726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9728
9729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9730 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9731 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9732
9733 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9734 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9735
9736 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
9737 IEM_MC_END();
9738}
9739
9740
9741/** Opcode 0xd9 !11/6 */
9742FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9743{
9744 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9745 IEM_MC_BEGIN(3, 0, 0, 0);
9746 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9748
9749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9751 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9752
9753 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9754 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9755 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9756 IEM_MC_END();
9757}
9758
9759
9760/** Opcode 0xd9 !11/7 */
9761FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9762{
9763 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9764 IEM_MC_BEGIN(2, 0, 0, 0);
9765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9766 IEM_MC_LOCAL(uint16_t, u16Fcw);
9767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9769 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9770 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9771 IEM_MC_FETCH_FCW(u16Fcw);
9772 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9773 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9774 IEM_MC_END();
9775}
9776
9777
9778/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9779FNIEMOP_DEF(iemOp_fnop)
9780{
9781 IEMOP_MNEMONIC(fnop, "fnop");
9782 IEM_MC_BEGIN(0, 0, 0, 0);
9783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9784 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9785 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9786 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9787 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9788 * intel optimizations. Investigate. */
9789 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9790 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9791 IEM_MC_END();
9792}
9793
9794
9795/** Opcode 0xd9 11/0 stN */
9796FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9797{
9798 IEMOP_MNEMONIC(fld_stN, "fld stN");
9799 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9800 * indicates that it does. */
9801 IEM_MC_BEGIN(0, 2, 0, 0);
9802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9803 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9804 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9805 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9806 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9807
9808 IEM_MC_PREPARE_FPU_USAGE();
9809 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9810 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9811 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9812 } IEM_MC_ELSE() {
9813 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9814 } IEM_MC_ENDIF();
9815
9816 IEM_MC_ADVANCE_RIP_AND_FINISH();
9817 IEM_MC_END();
9818}
9819
9820
9821/** Opcode 0xd9 11/3 stN */
9822FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9823{
9824 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9825 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9826 * indicates that it does. */
9827 IEM_MC_BEGIN(2, 3, 0, 0);
9828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9829 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9830 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9831 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9832 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9833 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9836
9837 IEM_MC_PREPARE_FPU_USAGE();
9838 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9839 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9840 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9841 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9842 } IEM_MC_ELSE() {
9843 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9844 } IEM_MC_ENDIF();
9845
9846 IEM_MC_ADVANCE_RIP_AND_FINISH();
9847 IEM_MC_END();
9848}
9849
9850
9851/** Opcode 0xd9 11/4, 0xdd 11/2. */
9852FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9853{
9854 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9855
9856 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9857 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9858 if (!iDstReg)
9859 {
9860 IEM_MC_BEGIN(0, 1, 0, 0);
9861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9862 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9863 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9864 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9865
9866 IEM_MC_PREPARE_FPU_USAGE();
9867 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9868 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9869 } IEM_MC_ELSE() {
9870 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9871 } IEM_MC_ENDIF();
9872
9873 IEM_MC_ADVANCE_RIP_AND_FINISH();
9874 IEM_MC_END();
9875 }
9876 else
9877 {
9878 IEM_MC_BEGIN(0, 2, 0, 0);
9879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9880 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9881 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9882 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9883 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9884
9885 IEM_MC_PREPARE_FPU_USAGE();
9886 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9887 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9888 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9889 } IEM_MC_ELSE() {
9890 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9891 } IEM_MC_ENDIF();
9892
9893 IEM_MC_ADVANCE_RIP_AND_FINISH();
9894 IEM_MC_END();
9895 }
9896}
9897
9898
9899/**
9900 * Common worker for FPU instructions working on ST0 and replaces it with the
9901 * result, i.e. unary operators.
9902 *
9903 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9904 */
9905FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9906{
9907 IEM_MC_BEGIN(2, 1, 0, 0);
9908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9909 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9910 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9911 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9912
9913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9915 IEM_MC_PREPARE_FPU_USAGE();
9916 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9917 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9918 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9919 } IEM_MC_ELSE() {
9920 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9921 } IEM_MC_ENDIF();
9922 IEM_MC_ADVANCE_RIP_AND_FINISH();
9923
9924 IEM_MC_END();
9925}
9926
9927
9928/** Opcode 0xd9 0xe0. */
9929FNIEMOP_DEF(iemOp_fchs)
9930{
9931 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9932 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9933}
9934
9935
9936/** Opcode 0xd9 0xe1. */
9937FNIEMOP_DEF(iemOp_fabs)
9938{
9939 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9940 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9941}
9942
9943
9944/** Opcode 0xd9 0xe4. */
9945FNIEMOP_DEF(iemOp_ftst)
9946{
9947 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9948 IEM_MC_BEGIN(2, 1, 0, 0);
9949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9950 IEM_MC_LOCAL(uint16_t, u16Fsw);
9951 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9952 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9953
9954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9956 IEM_MC_PREPARE_FPU_USAGE();
9957 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9958 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9959 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9960 } IEM_MC_ELSE() {
9961 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9962 } IEM_MC_ENDIF();
9963 IEM_MC_ADVANCE_RIP_AND_FINISH();
9964
9965 IEM_MC_END();
9966}
9967
9968
9969/** Opcode 0xd9 0xe5. */
9970FNIEMOP_DEF(iemOp_fxam)
9971{
9972 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9973 IEM_MC_BEGIN(2, 1, 0, 0);
9974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9975 IEM_MC_LOCAL(uint16_t, u16Fsw);
9976 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9977 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9978
9979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9980 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9981 IEM_MC_PREPARE_FPU_USAGE();
9982 IEM_MC_REF_FPUREG(pr80Value, 0);
9983 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9984 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9985 IEM_MC_ADVANCE_RIP_AND_FINISH();
9986
9987 IEM_MC_END();
9988}
9989
9990
9991/**
9992 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9993 *
9994 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9995 */
9996FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9997{
9998 IEM_MC_BEGIN(1, 1, 0, 0);
9999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10000 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10001 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10002
10003 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10004 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10005 IEM_MC_PREPARE_FPU_USAGE();
10006 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10007 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10008 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10009 } IEM_MC_ELSE() {
10010 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10011 } IEM_MC_ENDIF();
10012 IEM_MC_ADVANCE_RIP_AND_FINISH();
10013
10014 IEM_MC_END();
10015}
10016
10017
10018/** Opcode 0xd9 0xe8. */
10019FNIEMOP_DEF(iemOp_fld1)
10020{
10021 IEMOP_MNEMONIC(fld1, "fld1");
10022 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10023}
10024
10025
10026/** Opcode 0xd9 0xe9. */
10027FNIEMOP_DEF(iemOp_fldl2t)
10028{
10029 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10030 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10031}
10032
10033
10034/** Opcode 0xd9 0xea. */
10035FNIEMOP_DEF(iemOp_fldl2e)
10036{
10037 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10038 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10039}
10040
10041/** Opcode 0xd9 0xeb. */
10042FNIEMOP_DEF(iemOp_fldpi)
10043{
10044 IEMOP_MNEMONIC(fldpi, "fldpi");
10045 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10046}
10047
10048
10049/** Opcode 0xd9 0xec. */
10050FNIEMOP_DEF(iemOp_fldlg2)
10051{
10052 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10053 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10054}
10055
10056/** Opcode 0xd9 0xed. */
10057FNIEMOP_DEF(iemOp_fldln2)
10058{
10059 IEMOP_MNEMONIC(fldln2, "fldln2");
10060 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10061}
10062
10063
10064/** Opcode 0xd9 0xee. */
10065FNIEMOP_DEF(iemOp_fldz)
10066{
10067 IEMOP_MNEMONIC(fldz, "fldz");
10068 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10069}
10070
10071
10072/** Opcode 0xd9 0xf0.
10073 *
10074 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10075 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10076 * to produce proper results for +Inf and -Inf.
10077 *
10078 * This is probably usful in the implementation pow() and similar.
10079 */
10080FNIEMOP_DEF(iemOp_f2xm1)
10081{
10082 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10083 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10084}
10085
10086
10087/**
10088 * Common worker for FPU instructions working on STn and ST0, storing the result
10089 * in STn, and popping the stack unless IE, DE or ZE was raised.
10090 *
10091 * @param bRm Mod R/M byte.
10092 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10093 */
10094FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10095{
10096 IEM_MC_BEGIN(3, 1, 0, 0);
10097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10098 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10099 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10100 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10101 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10102
10103 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10104 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10105
10106 IEM_MC_PREPARE_FPU_USAGE();
10107 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10108 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10109 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10110 } IEM_MC_ELSE() {
10111 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10112 } IEM_MC_ENDIF();
10113 IEM_MC_ADVANCE_RIP_AND_FINISH();
10114
10115 IEM_MC_END();
10116}
10117
10118
10119/** Opcode 0xd9 0xf1. */
10120FNIEMOP_DEF(iemOp_fyl2x)
10121{
10122 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10123 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10124}
10125
10126
10127/**
10128 * Common worker for FPU instructions working on ST0 and having two outputs, one
10129 * replacing ST0 and one pushed onto the stack.
10130 *
10131 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10132 */
10133FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10134{
10135 IEM_MC_BEGIN(2, 1, 0, 0);
10136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10137 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10138 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10139 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10140
10141 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10142 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10143 IEM_MC_PREPARE_FPU_USAGE();
10144 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10145 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10146 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10147 } IEM_MC_ELSE() {
10148 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10149 } IEM_MC_ENDIF();
10150 IEM_MC_ADVANCE_RIP_AND_FINISH();
10151
10152 IEM_MC_END();
10153}
10154
10155
10156/** Opcode 0xd9 0xf2. */
10157FNIEMOP_DEF(iemOp_fptan)
10158{
10159 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10160 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10161}
10162
10163
10164/** Opcode 0xd9 0xf3. */
10165FNIEMOP_DEF(iemOp_fpatan)
10166{
10167 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10168 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10169}
10170
10171
10172/** Opcode 0xd9 0xf4. */
10173FNIEMOP_DEF(iemOp_fxtract)
10174{
10175 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10176 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10177}
10178
10179
10180/** Opcode 0xd9 0xf5. */
10181FNIEMOP_DEF(iemOp_fprem1)
10182{
10183 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10184 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10185}
10186
10187
10188/** Opcode 0xd9 0xf6. */
10189FNIEMOP_DEF(iemOp_fdecstp)
10190{
10191 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10192 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10193 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10194 * FINCSTP and FDECSTP. */
10195 IEM_MC_BEGIN(0, 0, 0, 0);
10196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10197
10198 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10199 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10200
10201 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10202 IEM_MC_FPU_STACK_DEC_TOP();
10203 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10204
10205 IEM_MC_ADVANCE_RIP_AND_FINISH();
10206 IEM_MC_END();
10207}
10208
10209
10210/** Opcode 0xd9 0xf7. */
10211FNIEMOP_DEF(iemOp_fincstp)
10212{
10213 IEMOP_MNEMONIC(fincstp, "fincstp");
10214 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10215 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10216 * FINCSTP and FDECSTP. */
10217 IEM_MC_BEGIN(0, 0, 0, 0);
10218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10219
10220 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10221 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10222
10223 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10224 IEM_MC_FPU_STACK_INC_TOP();
10225 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10226
10227 IEM_MC_ADVANCE_RIP_AND_FINISH();
10228 IEM_MC_END();
10229}
10230
10231
10232/** Opcode 0xd9 0xf8. */
10233FNIEMOP_DEF(iemOp_fprem)
10234{
10235 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10236 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10237}
10238
10239
10240/** Opcode 0xd9 0xf9. */
10241FNIEMOP_DEF(iemOp_fyl2xp1)
10242{
10243 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10244 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10245}
10246
10247
10248/** Opcode 0xd9 0xfa. */
10249FNIEMOP_DEF(iemOp_fsqrt)
10250{
10251 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10252 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10253}
10254
10255
10256/** Opcode 0xd9 0xfb. */
10257FNIEMOP_DEF(iemOp_fsincos)
10258{
10259 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10260 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10261}
10262
10263
10264/** Opcode 0xd9 0xfc. */
10265FNIEMOP_DEF(iemOp_frndint)
10266{
10267 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10268 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10269}
10270
10271
10272/** Opcode 0xd9 0xfd. */
10273FNIEMOP_DEF(iemOp_fscale)
10274{
10275 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10276 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10277}
10278
10279
10280/** Opcode 0xd9 0xfe. */
10281FNIEMOP_DEF(iemOp_fsin)
10282{
10283 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10284 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10285}
10286
10287
10288/** Opcode 0xd9 0xff. */
10289FNIEMOP_DEF(iemOp_fcos)
10290{
10291 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10292 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10293}
10294
10295
10296/** Used by iemOp_EscF1. */
10297IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10298{
10299 /* 0xe0 */ iemOp_fchs,
10300 /* 0xe1 */ iemOp_fabs,
10301 /* 0xe2 */ iemOp_Invalid,
10302 /* 0xe3 */ iemOp_Invalid,
10303 /* 0xe4 */ iemOp_ftst,
10304 /* 0xe5 */ iemOp_fxam,
10305 /* 0xe6 */ iemOp_Invalid,
10306 /* 0xe7 */ iemOp_Invalid,
10307 /* 0xe8 */ iemOp_fld1,
10308 /* 0xe9 */ iemOp_fldl2t,
10309 /* 0xea */ iemOp_fldl2e,
10310 /* 0xeb */ iemOp_fldpi,
10311 /* 0xec */ iemOp_fldlg2,
10312 /* 0xed */ iemOp_fldln2,
10313 /* 0xee */ iemOp_fldz,
10314 /* 0xef */ iemOp_Invalid,
10315 /* 0xf0 */ iemOp_f2xm1,
10316 /* 0xf1 */ iemOp_fyl2x,
10317 /* 0xf2 */ iemOp_fptan,
10318 /* 0xf3 */ iemOp_fpatan,
10319 /* 0xf4 */ iemOp_fxtract,
10320 /* 0xf5 */ iemOp_fprem1,
10321 /* 0xf6 */ iemOp_fdecstp,
10322 /* 0xf7 */ iemOp_fincstp,
10323 /* 0xf8 */ iemOp_fprem,
10324 /* 0xf9 */ iemOp_fyl2xp1,
10325 /* 0xfa */ iemOp_fsqrt,
10326 /* 0xfb */ iemOp_fsincos,
10327 /* 0xfc */ iemOp_frndint,
10328 /* 0xfd */ iemOp_fscale,
10329 /* 0xfe */ iemOp_fsin,
10330 /* 0xff */ iemOp_fcos
10331};
10332
10333
10334/**
10335 * @opcode 0xd9
10336 */
10337FNIEMOP_DEF(iemOp_EscF1)
10338{
10339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10340 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10341
10342 if (IEM_IS_MODRM_REG_MODE(bRm))
10343 {
10344 switch (IEM_GET_MODRM_REG_8(bRm))
10345 {
10346 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10347 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10348 case 2:
10349 if (bRm == 0xd0)
10350 return FNIEMOP_CALL(iemOp_fnop);
10351 IEMOP_RAISE_INVALID_OPCODE_RET();
10352 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10353 case 4:
10354 case 5:
10355 case 6:
10356 case 7:
10357 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10358 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10360 }
10361 }
10362 else
10363 {
10364 switch (IEM_GET_MODRM_REG_8(bRm))
10365 {
10366 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10367 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10368 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10369 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10370 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10371 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10372 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10373 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10375 }
10376 }
10377}
10378
10379
10380/** Opcode 0xda 11/0. */
10381FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10382{
10383 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10384 IEM_MC_BEGIN(0, 1, 0, 0);
10385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10386 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10387
10388 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10389 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10390
10391 IEM_MC_PREPARE_FPU_USAGE();
10392 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10393 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10394 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10395 } IEM_MC_ENDIF();
10396 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10397 } IEM_MC_ELSE() {
10398 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10399 } IEM_MC_ENDIF();
10400 IEM_MC_ADVANCE_RIP_AND_FINISH();
10401
10402 IEM_MC_END();
10403}
10404
10405
10406/** Opcode 0xda 11/1. */
10407FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10408{
10409 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10410 IEM_MC_BEGIN(0, 1, 0, 0);
10411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10412 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10413
10414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10415 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10416
10417 IEM_MC_PREPARE_FPU_USAGE();
10418 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10419 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10420 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10421 } IEM_MC_ENDIF();
10422 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10423 } IEM_MC_ELSE() {
10424 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10425 } IEM_MC_ENDIF();
10426 IEM_MC_ADVANCE_RIP_AND_FINISH();
10427
10428 IEM_MC_END();
10429}
10430
10431
10432/** Opcode 0xda 11/2. */
10433FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10434{
10435 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10436 IEM_MC_BEGIN(0, 1, 0, 0);
10437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10438 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10439
10440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10441 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10442
10443 IEM_MC_PREPARE_FPU_USAGE();
10444 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10445 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10446 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10447 } IEM_MC_ENDIF();
10448 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10449 } IEM_MC_ELSE() {
10450 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10451 } IEM_MC_ENDIF();
10452 IEM_MC_ADVANCE_RIP_AND_FINISH();
10453
10454 IEM_MC_END();
10455}
10456
10457
10458/** Opcode 0xda 11/3. */
10459FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
10460{
10461 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
10462 IEM_MC_BEGIN(0, 1, 0, 0);
10463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10464 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10465
10466 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10467 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10468
10469 IEM_MC_PREPARE_FPU_USAGE();
10470 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10472 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10473 } IEM_MC_ENDIF();
10474 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10475 } IEM_MC_ELSE() {
10476 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10477 } IEM_MC_ENDIF();
10478 IEM_MC_ADVANCE_RIP_AND_FINISH();
10479
10480 IEM_MC_END();
10481}
10482
10483
10484/**
10485 * Common worker for FPU instructions working on ST0 and ST1, only affecting
10486 * flags, and popping twice when done.
10487 *
10488 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10489 */
10490FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10491{
10492 IEM_MC_BEGIN(3, 1, 0, 0);
10493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10494 IEM_MC_LOCAL(uint16_t, u16Fsw);
10495 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10496 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10497 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10498
10499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10501
10502 IEM_MC_PREPARE_FPU_USAGE();
10503 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10504 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10505 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10506 } IEM_MC_ELSE() {
10507 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10508 } IEM_MC_ENDIF();
10509 IEM_MC_ADVANCE_RIP_AND_FINISH();
10510
10511 IEM_MC_END();
10512}
10513
10514
10515/** Opcode 0xda 0xe9. */
10516FNIEMOP_DEF(iemOp_fucompp)
10517{
10518 IEMOP_MNEMONIC(fucompp, "fucompp");
10519 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10520}
10521
10522
10523/**
10524 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10525 * the result in ST0.
10526 *
10527 * @param bRm Mod R/M byte.
10528 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10529 */
10530FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10531{
10532 IEM_MC_BEGIN(3, 3, 0, 0);
10533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10534 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10535 IEM_MC_LOCAL(int32_t, i32Val2);
10536 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10537 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10538 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10539
10540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10542
10543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10544 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10545 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10546
10547 IEM_MC_PREPARE_FPU_USAGE();
10548 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10549 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10550 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10551 } IEM_MC_ELSE() {
10552 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10553 } IEM_MC_ENDIF();
10554 IEM_MC_ADVANCE_RIP_AND_FINISH();
10555
10556 IEM_MC_END();
10557}
10558
10559
10560/** Opcode 0xda !11/0. */
10561FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10562{
10563 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10564 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10565}
10566
10567
10568/** Opcode 0xda !11/1. */
10569FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10570{
10571 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10572 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10573}
10574
10575
10576/** Opcode 0xda !11/2. */
10577FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10578{
10579 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10580
10581 IEM_MC_BEGIN(3, 3, 0, 0);
10582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10583 IEM_MC_LOCAL(uint16_t, u16Fsw);
10584 IEM_MC_LOCAL(int32_t, i32Val2);
10585 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10586 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10587 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10588
10589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10591
10592 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10593 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10594 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10595
10596 IEM_MC_PREPARE_FPU_USAGE();
10597 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10598 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10599 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10600 } IEM_MC_ELSE() {
10601 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10602 } IEM_MC_ENDIF();
10603 IEM_MC_ADVANCE_RIP_AND_FINISH();
10604
10605 IEM_MC_END();
10606}
10607
10608
10609/** Opcode 0xda !11/3. */
10610FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10611{
10612 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10613
10614 IEM_MC_BEGIN(3, 3, 0, 0);
10615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10616 IEM_MC_LOCAL(uint16_t, u16Fsw);
10617 IEM_MC_LOCAL(int32_t, i32Val2);
10618 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10619 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10620 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10621
10622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10624
10625 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10626 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10627 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10628
10629 IEM_MC_PREPARE_FPU_USAGE();
10630 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10631 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10632 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10633 } IEM_MC_ELSE() {
10634 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10635 } IEM_MC_ENDIF();
10636 IEM_MC_ADVANCE_RIP_AND_FINISH();
10637
10638 IEM_MC_END();
10639}
10640
10641
10642/** Opcode 0xda !11/4. */
10643FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10644{
10645 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10646 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10647}
10648
10649
10650/** Opcode 0xda !11/5. */
10651FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10652{
10653 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10654 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10655}
10656
10657
10658/** Opcode 0xda !11/6. */
10659FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10660{
10661 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10662 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10663}
10664
10665
10666/** Opcode 0xda !11/7. */
10667FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10668{
10669 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10670 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10671}
10672
10673
10674/**
10675 * @opcode 0xda
10676 */
10677FNIEMOP_DEF(iemOp_EscF2)
10678{
10679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10680 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10681 if (IEM_IS_MODRM_REG_MODE(bRm))
10682 {
10683 switch (IEM_GET_MODRM_REG_8(bRm))
10684 {
10685 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10686 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10687 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10688 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10689 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10690 case 5:
10691 if (bRm == 0xe9)
10692 return FNIEMOP_CALL(iemOp_fucompp);
10693 IEMOP_RAISE_INVALID_OPCODE_RET();
10694 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10695 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10697 }
10698 }
10699 else
10700 {
10701 switch (IEM_GET_MODRM_REG_8(bRm))
10702 {
10703 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10704 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10705 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10706 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10707 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10708 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10709 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10710 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10712 }
10713 }
10714}
10715
10716
10717/** Opcode 0xdb !11/0. */
10718FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10719{
10720 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10721
10722 IEM_MC_BEGIN(2, 3, 0, 0);
10723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10724 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10725 IEM_MC_LOCAL(int32_t, i32Val);
10726 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10727 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10728
10729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10731
10732 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10733 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10734 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10735
10736 IEM_MC_PREPARE_FPU_USAGE();
10737 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10738 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10739 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10740 } IEM_MC_ELSE() {
10741 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10742 } IEM_MC_ENDIF();
10743 IEM_MC_ADVANCE_RIP_AND_FINISH();
10744
10745 IEM_MC_END();
10746}
10747
10748
10749/** Opcode 0xdb !11/1. */
10750FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10751{
10752 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10753 IEM_MC_BEGIN(3, 3, 0, 0);
10754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10756
10757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10760 IEM_MC_PREPARE_FPU_USAGE();
10761
10762 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10763 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10764 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10765
10766 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10767 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10768 IEM_MC_LOCAL(uint16_t, u16Fsw);
10769 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10770 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10771 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10772 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10773 } IEM_MC_ELSE() {
10774 IEM_MC_IF_FCW_IM() {
10775 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10776 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10777 } IEM_MC_ELSE() {
10778 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10779 } IEM_MC_ENDIF();
10780 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10781 } IEM_MC_ENDIF();
10782 IEM_MC_ADVANCE_RIP_AND_FINISH();
10783
10784 IEM_MC_END();
10785}
10786
10787
10788/** Opcode 0xdb !11/2. */
10789FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10790{
10791 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10792 IEM_MC_BEGIN(3, 3, 0, 0);
10793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10795
10796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10797 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10798 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10799 IEM_MC_PREPARE_FPU_USAGE();
10800
10801 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10802 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10803 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10804
10805 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10806 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10807 IEM_MC_LOCAL(uint16_t, u16Fsw);
10808 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10809 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10810 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10811 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10812 } IEM_MC_ELSE() {
10813 IEM_MC_IF_FCW_IM() {
10814 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10815 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10816 } IEM_MC_ELSE() {
10817 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10818 } IEM_MC_ENDIF();
10819 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10820 } IEM_MC_ENDIF();
10821 IEM_MC_ADVANCE_RIP_AND_FINISH();
10822
10823 IEM_MC_END();
10824}
10825
10826
10827/** Opcode 0xdb !11/3. */
10828FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10829{
10830 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10831 IEM_MC_BEGIN(3, 2, 0, 0);
10832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10834
10835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10836 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10837 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10838 IEM_MC_PREPARE_FPU_USAGE();
10839
10840 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10841 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10842 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10843
10844 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10845 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10846 IEM_MC_LOCAL(uint16_t, u16Fsw);
10847 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10848 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10849 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10850 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10851 } IEM_MC_ELSE() {
10852 IEM_MC_IF_FCW_IM() {
10853 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10854 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10855 } IEM_MC_ELSE() {
10856 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10857 } IEM_MC_ENDIF();
10858 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10859 } IEM_MC_ENDIF();
10860 IEM_MC_ADVANCE_RIP_AND_FINISH();
10861
10862 IEM_MC_END();
10863}
10864
10865
10866/** Opcode 0xdb !11/5. */
10867FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10868{
10869 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10870
10871 IEM_MC_BEGIN(2, 3, 0, 0);
10872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10873 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10874 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10875 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10876 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10877
10878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10880
10881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10883 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10884
10885 IEM_MC_PREPARE_FPU_USAGE();
10886 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10887 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10888 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10889 } IEM_MC_ELSE() {
10890 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10891 } IEM_MC_ENDIF();
10892 IEM_MC_ADVANCE_RIP_AND_FINISH();
10893
10894 IEM_MC_END();
10895}
10896
10897
10898/** Opcode 0xdb !11/7. */
10899FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10900{
10901 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10902 IEM_MC_BEGIN(3, 3, 0, 0);
10903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10905
10906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10907 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10908 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10909 IEM_MC_PREPARE_FPU_USAGE();
10910
10911 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10912 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10913 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10914
10915 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10916 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10917 IEM_MC_LOCAL(uint16_t, u16Fsw);
10918 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10919 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10920 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10921 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10922 } IEM_MC_ELSE() {
10923 IEM_MC_IF_FCW_IM() {
10924 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10925 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10926 } IEM_MC_ELSE() {
10927 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10928 } IEM_MC_ENDIF();
10929 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10930 } IEM_MC_ENDIF();
10931 IEM_MC_ADVANCE_RIP_AND_FINISH();
10932
10933 IEM_MC_END();
10934}
10935
10936
10937/** Opcode 0xdb 11/0. */
10938FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10939{
10940 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10941 IEM_MC_BEGIN(0, 1, 0, 0);
10942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10943 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10944
10945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10947
10948 IEM_MC_PREPARE_FPU_USAGE();
10949 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10950 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10951 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10952 } IEM_MC_ENDIF();
10953 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10954 } IEM_MC_ELSE() {
10955 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10956 } IEM_MC_ENDIF();
10957 IEM_MC_ADVANCE_RIP_AND_FINISH();
10958
10959 IEM_MC_END();
10960}
10961
10962
10963/** Opcode 0xdb 11/1. */
10964FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10965{
10966 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10967 IEM_MC_BEGIN(0, 1, 0, 0);
10968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10969 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10970
10971 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10972 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10973
10974 IEM_MC_PREPARE_FPU_USAGE();
10975 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10976 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10977 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10978 } IEM_MC_ENDIF();
10979 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10980 } IEM_MC_ELSE() {
10981 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10982 } IEM_MC_ENDIF();
10983 IEM_MC_ADVANCE_RIP_AND_FINISH();
10984
10985 IEM_MC_END();
10986}
10987
10988
10989/** Opcode 0xdb 11/2. */
10990FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10991{
10992 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10993 IEM_MC_BEGIN(0, 1, 0, 0);
10994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10995 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10996
10997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10999
11000 IEM_MC_PREPARE_FPU_USAGE();
11001 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11002 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11003 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11004 } IEM_MC_ENDIF();
11005 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11006 } IEM_MC_ELSE() {
11007 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11008 } IEM_MC_ENDIF();
11009 IEM_MC_ADVANCE_RIP_AND_FINISH();
11010
11011 IEM_MC_END();
11012}
11013
11014
11015/** Opcode 0xdb 11/3. */
11016FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11017{
11018 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11019 IEM_MC_BEGIN(0, 1, 0, 0);
11020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11021 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11022
11023 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11024 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11025
11026 IEM_MC_PREPARE_FPU_USAGE();
11027 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11028 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11029 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11030 } IEM_MC_ENDIF();
11031 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11032 } IEM_MC_ELSE() {
11033 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11034 } IEM_MC_ENDIF();
11035 IEM_MC_ADVANCE_RIP_AND_FINISH();
11036
11037 IEM_MC_END();
11038}
11039
11040
11041/** Opcode 0xdb 0xe0. */
11042FNIEMOP_DEF(iemOp_fneni)
11043{
11044 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11045 IEM_MC_BEGIN(0, 0, 0, 0);
11046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11047 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11048 IEM_MC_ADVANCE_RIP_AND_FINISH();
11049 IEM_MC_END();
11050}
11051
11052
11053/** Opcode 0xdb 0xe1. */
11054FNIEMOP_DEF(iemOp_fndisi)
11055{
11056 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11057 IEM_MC_BEGIN(0, 0, 0, 0);
11058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11059 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11060 IEM_MC_ADVANCE_RIP_AND_FINISH();
11061 IEM_MC_END();
11062}
11063
11064
11065/** Opcode 0xdb 0xe2. */
11066FNIEMOP_DEF(iemOp_fnclex)
11067{
11068 IEMOP_MNEMONIC(fnclex, "fnclex");
11069 IEM_MC_BEGIN(0, 0, 0, 0);
11070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11072 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11073 IEM_MC_CLEAR_FSW_EX();
11074 IEM_MC_ADVANCE_RIP_AND_FINISH();
11075 IEM_MC_END();
11076}
11077
11078
11079/** Opcode 0xdb 0xe3. */
11080FNIEMOP_DEF(iemOp_fninit)
11081{
11082 IEMOP_MNEMONIC(fninit, "fninit");
11083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11084 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11085}
11086
11087
11088/** Opcode 0xdb 0xe4. */
11089FNIEMOP_DEF(iemOp_fnsetpm)
11090{
11091 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11092 IEM_MC_BEGIN(0, 0, 0, 0);
11093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11095 IEM_MC_ADVANCE_RIP_AND_FINISH();
11096 IEM_MC_END();
11097}
11098
11099
11100/** Opcode 0xdb 0xe5. */
11101FNIEMOP_DEF(iemOp_frstpm)
11102{
11103 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11104#if 0 /* #UDs on newer CPUs */
11105 IEM_MC_BEGIN(0, 0, 0, 0);
11106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11107 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11108 IEM_MC_ADVANCE_RIP_AND_FINISH();
11109 IEM_MC_END();
11110 return VINF_SUCCESS;
11111#else
11112 IEMOP_RAISE_INVALID_OPCODE_RET();
11113#endif
11114}
11115
11116
11117/** Opcode 0xdb 11/5. */
11118FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11119{
11120 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11121 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11122 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11123 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11124}
11125
11126
11127/** Opcode 0xdb 11/6. */
11128FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11129{
11130 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11131 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11132 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11133 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11134}
11135
11136
11137/**
11138 * @opcode 0xdb
11139 */
11140FNIEMOP_DEF(iemOp_EscF3)
11141{
11142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11143 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11144 if (IEM_IS_MODRM_REG_MODE(bRm))
11145 {
11146 switch (IEM_GET_MODRM_REG_8(bRm))
11147 {
11148 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11149 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11150 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11151 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11152 case 4:
11153 switch (bRm)
11154 {
11155 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11156 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11157 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11158 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11159 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11160 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11161 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11162 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11164 }
11165 break;
11166 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11167 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11168 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11170 }
11171 }
11172 else
11173 {
11174 switch (IEM_GET_MODRM_REG_8(bRm))
11175 {
11176 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11177 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11178 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11179 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11180 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11181 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11182 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11183 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11185 }
11186 }
11187}
11188
11189
11190/**
11191 * Common worker for FPU instructions working on STn and ST0, and storing the
11192 * result in STn unless IE, DE or ZE was raised.
11193 *
11194 * @param bRm Mod R/M byte.
11195 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11196 */
11197FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11198{
11199 IEM_MC_BEGIN(3, 1, 0, 0);
11200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11201 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11202 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11203 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11204 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11205
11206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11208
11209 IEM_MC_PREPARE_FPU_USAGE();
11210 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11211 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11212 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11213 } IEM_MC_ELSE() {
11214 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11215 } IEM_MC_ENDIF();
11216 IEM_MC_ADVANCE_RIP_AND_FINISH();
11217
11218 IEM_MC_END();
11219}
11220
11221
11222/** Opcode 0xdc 11/0. */
11223FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11224{
11225 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11226 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11227}
11228
11229
11230/** Opcode 0xdc 11/1. */
11231FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11232{
11233 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11234 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11235}
11236
11237
11238/** Opcode 0xdc 11/4. */
11239FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11240{
11241 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11242 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11243}
11244
11245
11246/** Opcode 0xdc 11/5. */
11247FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11248{
11249 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11250 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11251}
11252
11253
11254/** Opcode 0xdc 11/6. */
11255FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11256{
11257 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11258 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11259}
11260
11261
11262/** Opcode 0xdc 11/7. */
11263FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11264{
11265 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11266 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11267}
11268
11269
11270/**
11271 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11272 * memory operand, and storing the result in ST0.
11273 *
11274 * @param bRm Mod R/M byte.
11275 * @param pfnImpl Pointer to the instruction implementation (assembly).
11276 */
11277FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11278{
11279 IEM_MC_BEGIN(3, 3, 0, 0);
11280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11281 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11282 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11283 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11284 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11285 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11286
11287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11289 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11290 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11291
11292 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11293 IEM_MC_PREPARE_FPU_USAGE();
11294 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11295 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11296 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11297 } IEM_MC_ELSE() {
11298 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11299 } IEM_MC_ENDIF();
11300 IEM_MC_ADVANCE_RIP_AND_FINISH();
11301
11302 IEM_MC_END();
11303}
11304
11305
11306/** Opcode 0xdc !11/0. */
11307FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11308{
11309 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11310 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11311}
11312
11313
11314/** Opcode 0xdc !11/1. */
11315FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11316{
11317 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11318 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11319}
11320
11321
11322/** Opcode 0xdc !11/2. */
11323FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11324{
11325 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11326
11327 IEM_MC_BEGIN(3, 3, 0, 0);
11328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11329 IEM_MC_LOCAL(uint16_t, u16Fsw);
11330 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11331 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11332 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11333 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11334
11335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11337
11338 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11339 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11340 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11341
11342 IEM_MC_PREPARE_FPU_USAGE();
11343 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11344 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11345 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11346 } IEM_MC_ELSE() {
11347 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11348 } IEM_MC_ENDIF();
11349 IEM_MC_ADVANCE_RIP_AND_FINISH();
11350
11351 IEM_MC_END();
11352}
11353
11354
11355/** Opcode 0xdc !11/3. */
11356FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11357{
11358 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11359
11360 IEM_MC_BEGIN(3, 3, 0, 0);
11361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11362 IEM_MC_LOCAL(uint16_t, u16Fsw);
11363 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11364 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11365 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11366 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11367
11368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11370
11371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11372 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11373 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11374
11375 IEM_MC_PREPARE_FPU_USAGE();
11376 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11377 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11378 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11379 } IEM_MC_ELSE() {
11380 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11381 } IEM_MC_ENDIF();
11382 IEM_MC_ADVANCE_RIP_AND_FINISH();
11383
11384 IEM_MC_END();
11385}
11386
11387
11388/** Opcode 0xdc !11/4. */
11389FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11390{
11391 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11392 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11393}
11394
11395
11396/** Opcode 0xdc !11/5. */
11397FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11398{
11399 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11400 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11401}
11402
11403
11404/** Opcode 0xdc !11/6. */
11405FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11406{
11407 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11408 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11409}
11410
11411
11412/** Opcode 0xdc !11/7. */
11413FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11414{
11415 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11416 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11417}
11418
11419
11420/**
11421 * @opcode 0xdc
11422 */
11423FNIEMOP_DEF(iemOp_EscF4)
11424{
11425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11426 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11427 if (IEM_IS_MODRM_REG_MODE(bRm))
11428 {
11429 switch (IEM_GET_MODRM_REG_8(bRm))
11430 {
11431 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11432 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11433 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11434 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11435 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11436 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11437 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11438 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11440 }
11441 }
11442 else
11443 {
11444 switch (IEM_GET_MODRM_REG_8(bRm))
11445 {
11446 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11447 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11448 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11449 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11450 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11451 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11452 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11453 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
11454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11455 }
11456 }
11457}
11458
11459
11460/** Opcode 0xdd !11/0.
11461 * @sa iemOp_fld_m32r */
11462FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
11463{
11464 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
11465
11466 IEM_MC_BEGIN(2, 3, 0, 0);
11467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11468 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11469 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
11470 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11471 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
11472
11473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11475 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11476 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11477
11478 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11479 IEM_MC_PREPARE_FPU_USAGE();
11480 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11481 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
11482 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11483 } IEM_MC_ELSE() {
11484 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11485 } IEM_MC_ENDIF();
11486 IEM_MC_ADVANCE_RIP_AND_FINISH();
11487
11488 IEM_MC_END();
11489}
11490
11491
11492/** Opcode 0xdd !11/0. */
11493FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
11494{
11495 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
11496 IEM_MC_BEGIN(3, 3, 0, 0);
11497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11499
11500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11502 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11503 IEM_MC_PREPARE_FPU_USAGE();
11504
11505 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11506 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11507 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11508
11509 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11510 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11511 IEM_MC_LOCAL(uint16_t, u16Fsw);
11512 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11513 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11514 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11515 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11516 } IEM_MC_ELSE() {
11517 IEM_MC_IF_FCW_IM() {
11518 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11519 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11520 } IEM_MC_ELSE() {
11521 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11522 } IEM_MC_ENDIF();
11523 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11524 } IEM_MC_ENDIF();
11525 IEM_MC_ADVANCE_RIP_AND_FINISH();
11526
11527 IEM_MC_END();
11528}
11529
11530
11531/** Opcode 0xdd !11/0. */
11532FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11533{
11534 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11535 IEM_MC_BEGIN(3, 3, 0, 0);
11536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11538
11539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11540 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11541 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11542 IEM_MC_PREPARE_FPU_USAGE();
11543
11544 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11545 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11546 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11547
11548 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11549 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11550 IEM_MC_LOCAL(uint16_t, u16Fsw);
11551 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11552 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11553 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11554 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11555 } IEM_MC_ELSE() {
11556 IEM_MC_IF_FCW_IM() {
11557 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11558 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11559 } IEM_MC_ELSE() {
11560 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11561 } IEM_MC_ENDIF();
11562 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11563 } IEM_MC_ENDIF();
11564 IEM_MC_ADVANCE_RIP_AND_FINISH();
11565
11566 IEM_MC_END();
11567}
11568
11569
11570
11571
11572/** Opcode 0xdd !11/0. */
11573FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11574{
11575 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11576 IEM_MC_BEGIN(3, 3, 0, 0);
11577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11579
11580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11581 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11582 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11583 IEM_MC_PREPARE_FPU_USAGE();
11584
11585 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11586 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11587 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11588
11589 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11590 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11591 IEM_MC_LOCAL(uint16_t, u16Fsw);
11592 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11593 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11594 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11595 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11596 } IEM_MC_ELSE() {
11597 IEM_MC_IF_FCW_IM() {
11598 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11599 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11600 } IEM_MC_ELSE() {
11601 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11602 } IEM_MC_ENDIF();
11603 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11604 } IEM_MC_ENDIF();
11605 IEM_MC_ADVANCE_RIP_AND_FINISH();
11606
11607 IEM_MC_END();
11608}
11609
11610
11611/** Opcode 0xdd !11/0. */
11612FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11613{
11614 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11615 IEM_MC_BEGIN(3, 0, 0, 0);
11616 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11618
11619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11621 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11622
11623 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11624 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11625 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11626 IEM_MC_END();
11627}
11628
11629
11630/** Opcode 0xdd !11/0. */
11631FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11632{
11633 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11634 IEM_MC_BEGIN(3, 0, 0, 0);
11635 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11637
11638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11640 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11641
11642 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11643 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11644 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11645 IEM_MC_END();
11646}
11647
11648/** Opcode 0xdd !11/0. */
11649FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11650{
11651 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11652
11653 IEM_MC_BEGIN(0, 2, 0, 0);
11654 IEM_MC_LOCAL(uint16_t, u16Tmp);
11655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11656
11657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11660
11661 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11662 IEM_MC_FETCH_FSW(u16Tmp);
11663 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11664 IEM_MC_ADVANCE_RIP_AND_FINISH();
11665
11666/** @todo Debug / drop a hint to the verifier that things may differ
11667 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11668 * NT4SP1. (X86_FSW_PE) */
11669 IEM_MC_END();
11670}
11671
11672
11673/** Opcode 0xdd 11/0. */
11674FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11675{
11676 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11677 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11678 unmodified. */
11679 IEM_MC_BEGIN(0, 0, 0, 0);
11680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11681
11682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11684
11685 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11686 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11687 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11688
11689 IEM_MC_ADVANCE_RIP_AND_FINISH();
11690 IEM_MC_END();
11691}
11692
11693
11694/** Opcode 0xdd 11/1. */
11695FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11696{
11697 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11698 IEM_MC_BEGIN(0, 2, 0, 0);
11699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11700 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11701 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11702 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11704
11705 IEM_MC_PREPARE_FPU_USAGE();
11706 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11707 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11708 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11709 } IEM_MC_ELSE() {
11710 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11711 } IEM_MC_ENDIF();
11712
11713 IEM_MC_ADVANCE_RIP_AND_FINISH();
11714 IEM_MC_END();
11715}
11716
11717
11718/** Opcode 0xdd 11/3. */
11719FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11720{
11721 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11722 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11723}
11724
11725
11726/** Opcode 0xdd 11/4. */
11727FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11728{
11729 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11730 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11731}
11732
11733
11734/**
11735 * @opcode 0xdd
11736 */
11737FNIEMOP_DEF(iemOp_EscF5)
11738{
11739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11740 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11741 if (IEM_IS_MODRM_REG_MODE(bRm))
11742 {
11743 switch (IEM_GET_MODRM_REG_8(bRm))
11744 {
11745 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11746 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11747 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11748 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11749 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11750 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11751 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11752 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11754 }
11755 }
11756 else
11757 {
11758 switch (IEM_GET_MODRM_REG_8(bRm))
11759 {
11760 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11761 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11762 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11763 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11764 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11765 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11766 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11767 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11769 }
11770 }
11771}
11772
11773
11774/** Opcode 0xde 11/0. */
11775FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11776{
11777 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11778 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11779}
11780
11781
11782/** Opcode 0xde 11/0. */
11783FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11784{
11785 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11786 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11787}
11788
11789
11790/** Opcode 0xde 0xd9. */
11791FNIEMOP_DEF(iemOp_fcompp)
11792{
11793 IEMOP_MNEMONIC(fcompp, "fcompp");
11794 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11795}
11796
11797
11798/** Opcode 0xde 11/4. */
11799FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11800{
11801 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11802 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11803}
11804
11805
11806/** Opcode 0xde 11/5. */
11807FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11808{
11809 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11810 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11811}
11812
11813
11814/** Opcode 0xde 11/6. */
11815FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11816{
11817 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11818 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11819}
11820
11821
11822/** Opcode 0xde 11/7. */
11823FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11824{
11825 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11826 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11827}
11828
11829
11830/**
11831 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11832 * the result in ST0.
11833 *
11834 * @param bRm Mod R/M byte.
11835 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11836 */
11837FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11838{
11839 IEM_MC_BEGIN(3, 3, 0, 0);
11840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11841 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11842 IEM_MC_LOCAL(int16_t, i16Val2);
11843 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11844 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11845 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11846
11847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11849
11850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11852 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11853
11854 IEM_MC_PREPARE_FPU_USAGE();
11855 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11856 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11857 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11858 } IEM_MC_ELSE() {
11859 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11860 } IEM_MC_ENDIF();
11861 IEM_MC_ADVANCE_RIP_AND_FINISH();
11862
11863 IEM_MC_END();
11864}
11865
11866
11867/** Opcode 0xde !11/0. */
11868FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11869{
11870 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11871 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11872}
11873
11874
11875/** Opcode 0xde !11/1. */
11876FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11877{
11878 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11879 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11880}
11881
11882
11883/** Opcode 0xde !11/2. */
11884FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11885{
11886 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11887
11888 IEM_MC_BEGIN(3, 3, 0, 0);
11889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11890 IEM_MC_LOCAL(uint16_t, u16Fsw);
11891 IEM_MC_LOCAL(int16_t, i16Val2);
11892 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11893 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11894 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11895
11896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11898
11899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11901 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11902
11903 IEM_MC_PREPARE_FPU_USAGE();
11904 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11905 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11906 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11907 } IEM_MC_ELSE() {
11908 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11909 } IEM_MC_ENDIF();
11910 IEM_MC_ADVANCE_RIP_AND_FINISH();
11911
11912 IEM_MC_END();
11913}
11914
11915
11916/** Opcode 0xde !11/3. */
11917FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11918{
11919 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11920
11921 IEM_MC_BEGIN(3, 3, 0, 0);
11922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11923 IEM_MC_LOCAL(uint16_t, u16Fsw);
11924 IEM_MC_LOCAL(int16_t, i16Val2);
11925 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11926 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11927 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11928
11929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11931
11932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11934 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11935
11936 IEM_MC_PREPARE_FPU_USAGE();
11937 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11938 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11939 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11940 } IEM_MC_ELSE() {
11941 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11942 } IEM_MC_ENDIF();
11943 IEM_MC_ADVANCE_RIP_AND_FINISH();
11944
11945 IEM_MC_END();
11946}
11947
11948
11949/** Opcode 0xde !11/4. */
11950FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11951{
11952 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11953 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11954}
11955
11956
11957/** Opcode 0xde !11/5. */
11958FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11959{
11960 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11961 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11962}
11963
11964
11965/** Opcode 0xde !11/6. */
11966FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11967{
11968 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11969 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11970}
11971
11972
11973/** Opcode 0xde !11/7. */
11974FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11975{
11976 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11977 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11978}
11979
11980
11981/**
11982 * @opcode 0xde
11983 */
11984FNIEMOP_DEF(iemOp_EscF6)
11985{
11986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11987 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11988 if (IEM_IS_MODRM_REG_MODE(bRm))
11989 {
11990 switch (IEM_GET_MODRM_REG_8(bRm))
11991 {
11992 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11993 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11994 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11995 case 3: if (bRm == 0xd9)
11996 return FNIEMOP_CALL(iemOp_fcompp);
11997 IEMOP_RAISE_INVALID_OPCODE_RET();
11998 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11999 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12000 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12001 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12003 }
12004 }
12005 else
12006 {
12007 switch (IEM_GET_MODRM_REG_8(bRm))
12008 {
12009 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12010 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12011 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12012 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12013 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12014 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12015 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12016 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12018 }
12019 }
12020}
12021
12022
12023/** Opcode 0xdf 11/0.
12024 * Undocument instruction, assumed to work like ffree + fincstp. */
12025FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12026{
12027 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12028 IEM_MC_BEGIN(0, 0, 0, 0);
12029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12030
12031 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12032 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12033
12034 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12035 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12036 IEM_MC_FPU_STACK_INC_TOP();
12037 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12038
12039 IEM_MC_ADVANCE_RIP_AND_FINISH();
12040 IEM_MC_END();
12041}
12042
12043
12044/** Opcode 0xdf 0xe0. */
12045FNIEMOP_DEF(iemOp_fnstsw_ax)
12046{
12047 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12048 IEM_MC_BEGIN(0, 1, 0, 0);
12049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12050 IEM_MC_LOCAL(uint16_t, u16Tmp);
12051 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12052 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12053 IEM_MC_FETCH_FSW(u16Tmp);
12054 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12055 IEM_MC_ADVANCE_RIP_AND_FINISH();
12056 IEM_MC_END();
12057}
12058
12059
12060/** Opcode 0xdf 11/5. */
12061FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12062{
12063 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12064 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12065 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12066 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12067}
12068
12069
12070/** Opcode 0xdf 11/6. */
12071FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12072{
12073 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12074 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12075 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12076 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12077}
12078
12079
12080/** Opcode 0xdf !11/0. */
12081FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12082{
12083 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12084
12085 IEM_MC_BEGIN(2, 3, 0, 0);
12086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12087 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12088 IEM_MC_LOCAL(int16_t, i16Val);
12089 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12090 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12091
12092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12094
12095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12097 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12098
12099 IEM_MC_PREPARE_FPU_USAGE();
12100 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12101 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12102 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12103 } IEM_MC_ELSE() {
12104 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12105 } IEM_MC_ENDIF();
12106 IEM_MC_ADVANCE_RIP_AND_FINISH();
12107
12108 IEM_MC_END();
12109}
12110
12111
12112/** Opcode 0xdf !11/1. */
12113FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12114{
12115 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12116 IEM_MC_BEGIN(3, 3, 0, 0);
12117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12119
12120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12123 IEM_MC_PREPARE_FPU_USAGE();
12124
12125 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12126 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12127 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12128
12129 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12130 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12131 IEM_MC_LOCAL(uint16_t, u16Fsw);
12132 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12133 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12134 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12135 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12136 } IEM_MC_ELSE() {
12137 IEM_MC_IF_FCW_IM() {
12138 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12139 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12140 } IEM_MC_ELSE() {
12141 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12142 } IEM_MC_ENDIF();
12143 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12144 } IEM_MC_ENDIF();
12145 IEM_MC_ADVANCE_RIP_AND_FINISH();
12146
12147 IEM_MC_END();
12148}
12149
12150
12151/** Opcode 0xdf !11/2. */
12152FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12153{
12154 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12155 IEM_MC_BEGIN(3, 3, 0, 0);
12156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12158
12159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12160 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12161 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12162 IEM_MC_PREPARE_FPU_USAGE();
12163
12164 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12165 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12166 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12167
12168 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12169 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12170 IEM_MC_LOCAL(uint16_t, u16Fsw);
12171 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12172 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12173 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12174 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12175 } IEM_MC_ELSE() {
12176 IEM_MC_IF_FCW_IM() {
12177 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12178 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12179 } IEM_MC_ELSE() {
12180 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12181 } IEM_MC_ENDIF();
12182 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12183 } IEM_MC_ENDIF();
12184 IEM_MC_ADVANCE_RIP_AND_FINISH();
12185
12186 IEM_MC_END();
12187}
12188
12189
12190/** Opcode 0xdf !11/3. */
12191FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12192{
12193 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12194 IEM_MC_BEGIN(3, 3, 0, 0);
12195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12197
12198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12199 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12200 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12201 IEM_MC_PREPARE_FPU_USAGE();
12202
12203 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12204 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12205 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12206
12207 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12208 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12209 IEM_MC_LOCAL(uint16_t, u16Fsw);
12210 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12211 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12212 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12213 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12214 } IEM_MC_ELSE() {
12215 IEM_MC_IF_FCW_IM() {
12216 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12217 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12218 } IEM_MC_ELSE() {
12219 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12220 } IEM_MC_ENDIF();
12221 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12222 } IEM_MC_ENDIF();
12223 IEM_MC_ADVANCE_RIP_AND_FINISH();
12224
12225 IEM_MC_END();
12226}
12227
12228
12229/** Opcode 0xdf !11/4. */
12230FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12231{
12232 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12233
12234 IEM_MC_BEGIN(2, 3, 0, 0);
12235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12236 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12237 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12238 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12239 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12240
12241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12243
12244 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12245 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12246 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12247
12248 IEM_MC_PREPARE_FPU_USAGE();
12249 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12250 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12251 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12252 } IEM_MC_ELSE() {
12253 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12254 } IEM_MC_ENDIF();
12255 IEM_MC_ADVANCE_RIP_AND_FINISH();
12256
12257 IEM_MC_END();
12258}
12259
12260
12261/** Opcode 0xdf !11/5. */
12262FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12263{
12264 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12265
12266 IEM_MC_BEGIN(2, 3, 0, 0);
12267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12268 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12269 IEM_MC_LOCAL(int64_t, i64Val);
12270 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12271 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12272
12273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12275
12276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12278 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12279
12280 IEM_MC_PREPARE_FPU_USAGE();
12281 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12282 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12283 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12284 } IEM_MC_ELSE() {
12285 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12286 } IEM_MC_ENDIF();
12287 IEM_MC_ADVANCE_RIP_AND_FINISH();
12288
12289 IEM_MC_END();
12290}
12291
12292
12293/** Opcode 0xdf !11/6. */
12294FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12295{
12296 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12297 IEM_MC_BEGIN(3, 3, 0, 0);
12298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12300
12301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12304 IEM_MC_PREPARE_FPU_USAGE();
12305
12306 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12307 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12308 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12309
12310 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12311 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12312 IEM_MC_LOCAL(uint16_t, u16Fsw);
12313 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12314 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12315 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12316 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12317 } IEM_MC_ELSE() {
12318 IEM_MC_IF_FCW_IM() {
12319 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12320 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12321 } IEM_MC_ELSE() {
12322 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12323 } IEM_MC_ENDIF();
12324 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12325 } IEM_MC_ENDIF();
12326 IEM_MC_ADVANCE_RIP_AND_FINISH();
12327
12328 IEM_MC_END();
12329}
12330
12331
12332/** Opcode 0xdf !11/7. */
12333FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12334{
12335 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12336 IEM_MC_BEGIN(3, 3, 0, 0);
12337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12339
12340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12341 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12342 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12343 IEM_MC_PREPARE_FPU_USAGE();
12344
12345 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12346 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12347 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12348
12349 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12350 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12351 IEM_MC_LOCAL(uint16_t, u16Fsw);
12352 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12353 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12354 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12355 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12356 } IEM_MC_ELSE() {
12357 IEM_MC_IF_FCW_IM() {
12358 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12359 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12360 } IEM_MC_ELSE() {
12361 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12362 } IEM_MC_ENDIF();
12363 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12364 } IEM_MC_ENDIF();
12365 IEM_MC_ADVANCE_RIP_AND_FINISH();
12366
12367 IEM_MC_END();
12368}
12369
12370
12371/**
12372 * @opcode 0xdf
12373 */
12374FNIEMOP_DEF(iemOp_EscF7)
12375{
12376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12377 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12378 if (IEM_IS_MODRM_REG_MODE(bRm))
12379 {
12380 switch (IEM_GET_MODRM_REG_8(bRm))
12381 {
12382 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12383 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12384 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12385 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12386 case 4: if (bRm == 0xe0)
12387 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12388 IEMOP_RAISE_INVALID_OPCODE_RET();
12389 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12390 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12391 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12393 }
12394 }
12395 else
12396 {
12397 switch (IEM_GET_MODRM_REG_8(bRm))
12398 {
12399 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12400 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12401 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12402 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12403 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12404 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12405 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12406 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12408 }
12409 }
12410}
12411
12412
12413/**
12414 * @opcode 0xe0
12415 */
12416FNIEMOP_DEF(iemOp_loopne_Jb)
12417{
12418 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12419 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12420 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12421
12422 switch (pVCpu->iem.s.enmEffAddrMode)
12423 {
12424 case IEMMODE_16BIT:
12425 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12427 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12428 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12429 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12430 } IEM_MC_ELSE() {
12431 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12432 IEM_MC_ADVANCE_RIP_AND_FINISH();
12433 } IEM_MC_ENDIF();
12434 IEM_MC_END();
12435 break;
12436
12437 case IEMMODE_32BIT:
12438 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12440 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12441 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12442 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12443 } IEM_MC_ELSE() {
12444 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12445 IEM_MC_ADVANCE_RIP_AND_FINISH();
12446 } IEM_MC_ENDIF();
12447 IEM_MC_END();
12448 break;
12449
12450 case IEMMODE_64BIT:
12451 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12453 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12454 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12455 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12456 } IEM_MC_ELSE() {
12457 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12458 IEM_MC_ADVANCE_RIP_AND_FINISH();
12459 } IEM_MC_ENDIF();
12460 IEM_MC_END();
12461 break;
12462
12463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12464 }
12465}
12466
12467
12468/**
12469 * @opcode 0xe1
12470 */
12471FNIEMOP_DEF(iemOp_loope_Jb)
12472{
12473 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
12474 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12475 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12476
12477 switch (pVCpu->iem.s.enmEffAddrMode)
12478 {
12479 case IEMMODE_16BIT:
12480 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12482 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
12483 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12484 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12485 } IEM_MC_ELSE() {
12486 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12487 IEM_MC_ADVANCE_RIP_AND_FINISH();
12488 } IEM_MC_ENDIF();
12489 IEM_MC_END();
12490 break;
12491
12492 case IEMMODE_32BIT:
12493 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12495 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
12496 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12497 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12498 } IEM_MC_ELSE() {
12499 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12500 IEM_MC_ADVANCE_RIP_AND_FINISH();
12501 } IEM_MC_ENDIF();
12502 IEM_MC_END();
12503 break;
12504
12505 case IEMMODE_64BIT:
12506 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12508 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
12509 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12510 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12511 } IEM_MC_ELSE() {
12512 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12513 IEM_MC_ADVANCE_RIP_AND_FINISH();
12514 } IEM_MC_ENDIF();
12515 IEM_MC_END();
12516 break;
12517
12518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12519 }
12520}
12521
12522
12523/**
12524 * @opcode 0xe2
12525 */
12526FNIEMOP_DEF(iemOp_loop_Jb)
12527{
12528 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
12529 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12530 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12531
12532 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
12533 * using the 32-bit operand size override. How can that be restarted? See
12534 * weird pseudo code in intel manual. */
12535
12536 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
12537 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
12538 * the loop causes guest crashes, but when logging it's nice to skip a few million
12539 * lines of useless output. */
12540#if defined(LOG_ENABLED)
12541 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
12542 switch (pVCpu->iem.s.enmEffAddrMode)
12543 {
12544 case IEMMODE_16BIT:
12545 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12547 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12548 IEM_MC_ADVANCE_RIP_AND_FINISH();
12549 IEM_MC_END();
12550 break;
12551
12552 case IEMMODE_32BIT:
12553 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12555 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12556 IEM_MC_ADVANCE_RIP_AND_FINISH();
12557 IEM_MC_END();
12558 break;
12559
12560 case IEMMODE_64BIT:
12561 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12563 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12564 IEM_MC_ADVANCE_RIP_AND_FINISH();
12565 IEM_MC_END();
12566 break;
12567
12568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12569 }
12570#endif
12571
12572 switch (pVCpu->iem.s.enmEffAddrMode)
12573 {
12574 case IEMMODE_16BIT:
12575 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12577 IEM_MC_IF_CX_IS_NOT_ONE() {
12578 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12579 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12580 } IEM_MC_ELSE() {
12581 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12582 IEM_MC_ADVANCE_RIP_AND_FINISH();
12583 } IEM_MC_ENDIF();
12584 IEM_MC_END();
12585 break;
12586
12587 case IEMMODE_32BIT:
12588 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12590 IEM_MC_IF_ECX_IS_NOT_ONE() {
12591 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12592 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12593 } IEM_MC_ELSE() {
12594 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12595 IEM_MC_ADVANCE_RIP_AND_FINISH();
12596 } IEM_MC_ENDIF();
12597 IEM_MC_END();
12598 break;
12599
12600 case IEMMODE_64BIT:
12601 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12603 IEM_MC_IF_RCX_IS_NOT_ONE() {
12604 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12605 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12606 } IEM_MC_ELSE() {
12607 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12608 IEM_MC_ADVANCE_RIP_AND_FINISH();
12609 } IEM_MC_ENDIF();
12610 IEM_MC_END();
12611 break;
12612
12613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12614 }
12615}
12616
12617
12618/**
12619 * @opcode 0xe3
12620 */
12621FNIEMOP_DEF(iemOp_jecxz_Jb)
12622{
12623 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12624 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12625 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12626
12627 switch (pVCpu->iem.s.enmEffAddrMode)
12628 {
12629 case IEMMODE_16BIT:
12630 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12632 IEM_MC_IF_CX_IS_NZ() {
12633 IEM_MC_ADVANCE_RIP_AND_FINISH();
12634 } IEM_MC_ELSE() {
12635 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12636 } IEM_MC_ENDIF();
12637 IEM_MC_END();
12638 break;
12639
12640 case IEMMODE_32BIT:
12641 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12643 IEM_MC_IF_ECX_IS_NZ() {
12644 IEM_MC_ADVANCE_RIP_AND_FINISH();
12645 } IEM_MC_ELSE() {
12646 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12647 } IEM_MC_ENDIF();
12648 IEM_MC_END();
12649 break;
12650
12651 case IEMMODE_64BIT:
12652 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12654 IEM_MC_IF_RCX_IS_NZ() {
12655 IEM_MC_ADVANCE_RIP_AND_FINISH();
12656 } IEM_MC_ELSE() {
12657 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12658 } IEM_MC_ENDIF();
12659 IEM_MC_END();
12660 break;
12661
12662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12663 }
12664}
12665
12666
12667/** Opcode 0xe4 */
12668FNIEMOP_DEF(iemOp_in_AL_Ib)
12669{
12670 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12671 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12673 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12674 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12675}
12676
12677
12678/** Opcode 0xe5 */
12679FNIEMOP_DEF(iemOp_in_eAX_Ib)
12680{
12681 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12682 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12684 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12685 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12686 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12687}
12688
12689
12690/** Opcode 0xe6 */
12691FNIEMOP_DEF(iemOp_out_Ib_AL)
12692{
12693 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12694 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12696 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12697 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12698}
12699
12700
12701/** Opcode 0xe7 */
12702FNIEMOP_DEF(iemOp_out_Ib_eAX)
12703{
12704 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12705 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12707 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12708 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12709 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12710}
12711
12712
12713/**
12714 * @opcode 0xe8
12715 */
12716FNIEMOP_DEF(iemOp_call_Jv)
12717{
12718 IEMOP_MNEMONIC(call_Jv, "call Jv");
12719 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12720 switch (pVCpu->iem.s.enmEffOpSize)
12721 {
12722 case IEMMODE_16BIT:
12723 {
12724 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12725 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12726 iemCImpl_call_rel_16, (int16_t)u16Imm);
12727 }
12728
12729 case IEMMODE_32BIT:
12730 {
12731 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12732 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12733 iemCImpl_call_rel_32, (int32_t)u32Imm);
12734 }
12735
12736 case IEMMODE_64BIT:
12737 {
12738 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12739 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12740 iemCImpl_call_rel_64, u64Imm);
12741 }
12742
12743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12744 }
12745}
12746
12747
12748/**
12749 * @opcode 0xe9
12750 */
12751FNIEMOP_DEF(iemOp_jmp_Jv)
12752{
12753 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12754 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12755 switch (pVCpu->iem.s.enmEffOpSize)
12756 {
12757 case IEMMODE_16BIT:
12758 IEM_MC_BEGIN(0, 0, 0, 0);
12759 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12761 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12762 IEM_MC_END();
12763 break;
12764
12765 case IEMMODE_64BIT:
12766 case IEMMODE_32BIT:
12767 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12768 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12770 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12771 IEM_MC_END();
12772 break;
12773
12774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12775 }
12776}
12777
12778
12779/**
12780 * @opcode 0xea
12781 */
12782FNIEMOP_DEF(iemOp_jmp_Ap)
12783{
12784 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12785 IEMOP_HLP_NO_64BIT();
12786
12787 /* Decode the far pointer address and pass it on to the far call C implementation. */
12788 uint32_t off32Seg;
12789 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12790 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12791 else
12792 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12793 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12795 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12796 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
12797 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12798 /** @todo make task-switches, ring-switches, ++ return non-zero status */
12799}
12800
12801
12802/**
12803 * @opcode 0xeb
12804 */
12805FNIEMOP_DEF(iemOp_jmp_Jb)
12806{
12807 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12808 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12809 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12810
12811 IEM_MC_BEGIN(0, 0, 0, 0);
12812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12813 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12814 IEM_MC_END();
12815}
12816
12817
12818/** Opcode 0xec */
12819FNIEMOP_DEF(iemOp_in_AL_DX)
12820{
12821 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12823 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12824 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12825 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12826}
12827
12828
12829/** Opcode 0xed */
12830FNIEMOP_DEF(iemOp_in_eAX_DX)
12831{
12832 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12834 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12835 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12836 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12837 pVCpu->iem.s.enmEffAddrMode);
12838}
12839
12840
12841/** Opcode 0xee */
12842FNIEMOP_DEF(iemOp_out_DX_AL)
12843{
12844 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12846 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12847 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12848}
12849
12850
12851/** Opcode 0xef */
12852FNIEMOP_DEF(iemOp_out_DX_eAX)
12853{
12854 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12856 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12857 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12858 pVCpu->iem.s.enmEffAddrMode);
12859}
12860
12861
12862/**
12863 * @opcode 0xf0
12864 */
12865FNIEMOP_DEF(iemOp_lock)
12866{
12867 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12868 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12869
12870 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12871 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12872}
12873
12874
12875/**
12876 * @opcode 0xf1
12877 */
12878FNIEMOP_DEF(iemOp_int1)
12879{
12880 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12881 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12882 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12883 * LOADALL memo. Needs some testing. */
12884 IEMOP_HLP_MIN_386();
12885 /** @todo testcase! */
12886 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
12887 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
12888 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12889}
12890
12891
12892/**
12893 * @opcode 0xf2
12894 */
12895FNIEMOP_DEF(iemOp_repne)
12896{
12897 /* This overrides any previous REPE prefix. */
12898 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12899 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12900 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12901
12902 /* For the 4 entry opcode tables, REPNZ overrides any previous
12903 REPZ and operand size prefixes. */
12904 pVCpu->iem.s.idxPrefix = 3;
12905
12906 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12907 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12908}
12909
12910
12911/**
12912 * @opcode 0xf3
12913 */
12914FNIEMOP_DEF(iemOp_repe)
12915{
12916 /* This overrides any previous REPNE prefix. */
12917 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12918 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12919 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12920
12921 /* For the 4 entry opcode tables, REPNZ overrides any previous
12922 REPNZ and operand size prefixes. */
12923 pVCpu->iem.s.idxPrefix = 2;
12924
12925 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12926 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12927}
12928
12929
12930/**
12931 * @opcode 0xf4
12932 */
12933FNIEMOP_DEF(iemOp_hlt)
12934{
12935 IEMOP_MNEMONIC(hlt, "hlt");
12936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12937 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
12938}
12939
12940
12941/**
12942 * @opcode 0xf5
12943 */
12944FNIEMOP_DEF(iemOp_cmc)
12945{
12946 IEMOP_MNEMONIC(cmc, "cmc");
12947 IEM_MC_BEGIN(0, 0, 0, 0);
12948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12949 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12950 IEM_MC_ADVANCE_RIP_AND_FINISH();
12951 IEM_MC_END();
12952}
12953
12954
12955/**
12956 * Body for of 'inc/dec/not/neg Eb'.
12957 */
12958#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12959 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12960 { \
12961 /* register access */ \
12962 IEM_MC_BEGIN(2, 0, 0, 0); \
12963 IEMOP_HLP_DONE_DECODING(); \
12964 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12965 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12966 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12967 IEM_MC_REF_EFLAGS(pEFlags); \
12968 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12969 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12970 IEM_MC_END(); \
12971 } \
12972 else \
12973 { \
12974 /* memory access. */ \
12975 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
12976 { \
12977 IEM_MC_BEGIN(2, 2, 0, 0); \
12978 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12979 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12981 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12982 \
12983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12984 IEMOP_HLP_DONE_DECODING(); \
12985 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12986 IEM_MC_FETCH_EFLAGS(EFlags); \
12987 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12988 \
12989 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
12990 IEM_MC_COMMIT_EFLAGS(EFlags); \
12991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12992 IEM_MC_END(); \
12993 } \
12994 else \
12995 { \
12996 IEM_MC_BEGIN(2, 2, 0, 0); \
12997 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12998 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13000 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13001 \
13002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13003 IEMOP_HLP_DONE_DECODING(); \
13004 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13005 IEM_MC_FETCH_EFLAGS(EFlags); \
13006 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13007 \
13008 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13009 IEM_MC_COMMIT_EFLAGS(EFlags); \
13010 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13011 IEM_MC_END(); \
13012 } \
13013 } \
13014 (void)0
13015
13016
13017/**
13018 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13019 */
13020#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13021 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13022 { \
13023 /* \
13024 * Register target \
13025 */ \
13026 switch (pVCpu->iem.s.enmEffOpSize) \
13027 { \
13028 case IEMMODE_16BIT: \
13029 IEM_MC_BEGIN(2, 0, 0, 0); \
13030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13031 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13032 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13033 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13034 IEM_MC_REF_EFLAGS(pEFlags); \
13035 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13036 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13037 IEM_MC_END(); \
13038 break; \
13039 \
13040 case IEMMODE_32BIT: \
13041 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13043 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13044 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13045 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13046 IEM_MC_REF_EFLAGS(pEFlags); \
13047 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13048 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13049 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13050 IEM_MC_END(); \
13051 break; \
13052 \
13053 case IEMMODE_64BIT: \
13054 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13056 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13057 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13058 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13059 IEM_MC_REF_EFLAGS(pEFlags); \
13060 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13061 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13062 IEM_MC_END(); \
13063 break; \
13064 \
13065 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13066 } \
13067 } \
13068 else \
13069 { \
13070 /* \
13071 * Memory target. \
13072 */ \
13073 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13074 { \
13075 switch (pVCpu->iem.s.enmEffOpSize) \
13076 { \
13077 case IEMMODE_16BIT: \
13078 IEM_MC_BEGIN(2, 3, 0, 0); \
13079 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13080 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13082 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13083 \
13084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13085 IEMOP_HLP_DONE_DECODING(); \
13086 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13087 IEM_MC_FETCH_EFLAGS(EFlags); \
13088 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13089 \
13090 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13091 IEM_MC_COMMIT_EFLAGS(EFlags); \
13092 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13093 IEM_MC_END(); \
13094 break; \
13095 \
13096 case IEMMODE_32BIT: \
13097 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13098 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13099 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13102 \
13103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13104 IEMOP_HLP_DONE_DECODING(); \
13105 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13106 IEM_MC_FETCH_EFLAGS(EFlags); \
13107 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13108 \
13109 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13110 IEM_MC_COMMIT_EFLAGS(EFlags); \
13111 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13112 IEM_MC_END(); \
13113 break; \
13114 \
13115 case IEMMODE_64BIT: \
13116 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13117 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13118 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13120 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13121 \
13122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13123 IEMOP_HLP_DONE_DECODING(); \
13124 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13125 IEM_MC_FETCH_EFLAGS(EFlags); \
13126 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13127 \
13128 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13129 IEM_MC_COMMIT_EFLAGS(EFlags); \
13130 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13131 IEM_MC_END(); \
13132 break; \
13133 \
13134 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13135 } \
13136 } \
13137 else \
13138 { \
13139 (void)0
13140
13141#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13142 switch (pVCpu->iem.s.enmEffOpSize) \
13143 { \
13144 case IEMMODE_16BIT: \
13145 IEM_MC_BEGIN(2, 3, 0, 0); \
13146 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13149 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13150 \
13151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13152 IEMOP_HLP_DONE_DECODING(); \
13153 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13154 IEM_MC_FETCH_EFLAGS(EFlags); \
13155 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13156 \
13157 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13158 IEM_MC_COMMIT_EFLAGS(EFlags); \
13159 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13160 IEM_MC_END(); \
13161 break; \
13162 \
13163 case IEMMODE_32BIT: \
13164 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13165 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13166 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13168 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13169 \
13170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13171 IEMOP_HLP_DONE_DECODING(); \
13172 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13173 IEM_MC_FETCH_EFLAGS(EFlags); \
13174 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13175 \
13176 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13177 IEM_MC_COMMIT_EFLAGS(EFlags); \
13178 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13179 IEM_MC_END(); \
13180 break; \
13181 \
13182 case IEMMODE_64BIT: \
13183 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13184 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13185 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13187 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13188 \
13189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13190 IEMOP_HLP_DONE_DECODING(); \
13191 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13192 IEM_MC_FETCH_EFLAGS(EFlags); \
13193 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13194 \
13195 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13196 IEM_MC_COMMIT_EFLAGS(EFlags); \
13197 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13198 IEM_MC_END(); \
13199 break; \
13200 \
13201 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13202 } \
13203 } \
13204 } \
13205 (void)0
13206
13207
13208/**
13209 * @opmaps grp3_f6
13210 * @opcode /0
13211 * @todo also /1
13212 */
13213FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13214{
13215 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13217
13218 if (IEM_IS_MODRM_REG_MODE(bRm))
13219 {
13220 /* register access */
13221 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13222 IEM_MC_BEGIN(3, 0, 0, 0);
13223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13224 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13225 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13226 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13227 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13228 IEM_MC_REF_EFLAGS(pEFlags);
13229 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13230 IEM_MC_ADVANCE_RIP_AND_FINISH();
13231 IEM_MC_END();
13232 }
13233 else
13234 {
13235 /* memory access. */
13236 IEM_MC_BEGIN(3, 3, 0, 0);
13237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13239
13240 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13242
13243 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13244 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13245 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13246
13247 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13248 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13249 IEM_MC_FETCH_EFLAGS(EFlags);
13250 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13251
13252 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13253 IEM_MC_COMMIT_EFLAGS(EFlags);
13254 IEM_MC_ADVANCE_RIP_AND_FINISH();
13255 IEM_MC_END();
13256 }
13257}
13258
13259
13260/** Opcode 0xf6 /4, /5, /6 and /7. */
13261FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13262{
13263 if (IEM_IS_MODRM_REG_MODE(bRm))
13264 {
13265 /* register access */
13266 IEM_MC_BEGIN(3, 1, 0, 0);
13267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13268 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13269 IEM_MC_ARG(uint8_t, u8Value, 1);
13270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13271 IEM_MC_LOCAL(int32_t, rc);
13272
13273 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13274 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13275 IEM_MC_REF_EFLAGS(pEFlags);
13276 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13277 IEM_MC_IF_LOCAL_IS_Z(rc) {
13278 IEM_MC_ADVANCE_RIP_AND_FINISH();
13279 } IEM_MC_ELSE() {
13280 IEM_MC_RAISE_DIVIDE_ERROR();
13281 } IEM_MC_ENDIF();
13282
13283 IEM_MC_END();
13284 }
13285 else
13286 {
13287 /* memory access. */
13288 IEM_MC_BEGIN(3, 2, 0, 0);
13289 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13290 IEM_MC_ARG(uint8_t, u8Value, 1);
13291 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13293 IEM_MC_LOCAL(int32_t, rc);
13294
13295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13297 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13298 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13299 IEM_MC_REF_EFLAGS(pEFlags);
13300 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13301 IEM_MC_IF_LOCAL_IS_Z(rc) {
13302 IEM_MC_ADVANCE_RIP_AND_FINISH();
13303 } IEM_MC_ELSE() {
13304 IEM_MC_RAISE_DIVIDE_ERROR();
13305 } IEM_MC_ENDIF();
13306
13307 IEM_MC_END();
13308 }
13309}
13310
13311
13312/** Opcode 0xf7 /4, /5, /6 and /7. */
13313FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13314{
13315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13316
13317 if (IEM_IS_MODRM_REG_MODE(bRm))
13318 {
13319 /* register access */
13320 switch (pVCpu->iem.s.enmEffOpSize)
13321 {
13322 case IEMMODE_16BIT:
13323 IEM_MC_BEGIN(4, 1, 0, 0);
13324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13325 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13326 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13327 IEM_MC_ARG(uint16_t, u16Value, 2);
13328 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13329 IEM_MC_LOCAL(int32_t, rc);
13330
13331 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13332 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13333 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13334 IEM_MC_REF_EFLAGS(pEFlags);
13335 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13336 IEM_MC_IF_LOCAL_IS_Z(rc) {
13337 IEM_MC_ADVANCE_RIP_AND_FINISH();
13338 } IEM_MC_ELSE() {
13339 IEM_MC_RAISE_DIVIDE_ERROR();
13340 } IEM_MC_ENDIF();
13341
13342 IEM_MC_END();
13343 break;
13344
13345 case IEMMODE_32BIT:
13346 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13348 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13349 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13350 IEM_MC_ARG(uint32_t, u32Value, 2);
13351 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13352 IEM_MC_LOCAL(int32_t, rc);
13353
13354 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13355 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13356 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13357 IEM_MC_REF_EFLAGS(pEFlags);
13358 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13359 IEM_MC_IF_LOCAL_IS_Z(rc) {
13360 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13361 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13362 IEM_MC_ADVANCE_RIP_AND_FINISH();
13363 } IEM_MC_ELSE() {
13364 IEM_MC_RAISE_DIVIDE_ERROR();
13365 } IEM_MC_ENDIF();
13366
13367 IEM_MC_END();
13368 break;
13369
13370 case IEMMODE_64BIT:
13371 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13373 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13374 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13375 IEM_MC_ARG(uint64_t, u64Value, 2);
13376 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13377 IEM_MC_LOCAL(int32_t, rc);
13378
13379 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13380 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13381 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13382 IEM_MC_REF_EFLAGS(pEFlags);
13383 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13384 IEM_MC_IF_LOCAL_IS_Z(rc) {
13385 IEM_MC_ADVANCE_RIP_AND_FINISH();
13386 } IEM_MC_ELSE() {
13387 IEM_MC_RAISE_DIVIDE_ERROR();
13388 } IEM_MC_ENDIF();
13389
13390 IEM_MC_END();
13391 break;
13392
13393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13394 }
13395 }
13396 else
13397 {
13398 /* memory access. */
13399 switch (pVCpu->iem.s.enmEffOpSize)
13400 {
13401 case IEMMODE_16BIT:
13402 IEM_MC_BEGIN(4, 2, 0, 0);
13403 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13404 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13405 IEM_MC_ARG(uint16_t, u16Value, 2);
13406 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13408 IEM_MC_LOCAL(int32_t, rc);
13409
13410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13412 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13413 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13414 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13415 IEM_MC_REF_EFLAGS(pEFlags);
13416 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13417 IEM_MC_IF_LOCAL_IS_Z(rc) {
13418 IEM_MC_ADVANCE_RIP_AND_FINISH();
13419 } IEM_MC_ELSE() {
13420 IEM_MC_RAISE_DIVIDE_ERROR();
13421 } IEM_MC_ENDIF();
13422
13423 IEM_MC_END();
13424 break;
13425
13426 case IEMMODE_32BIT:
13427 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
13428 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13429 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13430 IEM_MC_ARG(uint32_t, u32Value, 2);
13431 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13433 IEM_MC_LOCAL(int32_t, rc);
13434
13435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13437 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13438 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13439 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13440 IEM_MC_REF_EFLAGS(pEFlags);
13441 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13442 IEM_MC_IF_LOCAL_IS_Z(rc) {
13443 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13444 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13445 IEM_MC_ADVANCE_RIP_AND_FINISH();
13446 } IEM_MC_ELSE() {
13447 IEM_MC_RAISE_DIVIDE_ERROR();
13448 } IEM_MC_ENDIF();
13449
13450 IEM_MC_END();
13451 break;
13452
13453 case IEMMODE_64BIT:
13454 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
13455 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13456 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13457 IEM_MC_ARG(uint64_t, u64Value, 2);
13458 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13460 IEM_MC_LOCAL(int32_t, rc);
13461
13462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13464 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13465 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13466 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13467 IEM_MC_REF_EFLAGS(pEFlags);
13468 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13469 IEM_MC_IF_LOCAL_IS_Z(rc) {
13470 IEM_MC_ADVANCE_RIP_AND_FINISH();
13471 } IEM_MC_ELSE() {
13472 IEM_MC_RAISE_DIVIDE_ERROR();
13473 } IEM_MC_ENDIF();
13474
13475 IEM_MC_END();
13476 break;
13477
13478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13479 }
13480 }
13481}
13482
13483
13484/**
13485 * @opmaps grp3_f6
13486 * @opcode /2
13487 */
13488FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
13489{
13490 IEMOP_MNEMONIC(not_Eb, "not Eb");
13491 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
13492}
13493
13494
13495/**
13496 * @opmaps grp3_f6
13497 * @opcode /3
13498 */
13499FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
13500{
13501 IEMOP_MNEMONIC(net_Eb, "neg Eb");
13502 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
13503}
13504
13505
13506/**
13507 * @opcode 0xf6
13508 */
13509FNIEMOP_DEF(iemOp_Grp3_Eb)
13510{
13511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13512 switch (IEM_GET_MODRM_REG_8(bRm))
13513 {
13514 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13515 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13516 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
13517 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
13518 case 4:
13519 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
13520 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13521 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
13522 case 5:
13523 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
13524 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13525 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
13526 case 6:
13527 IEMOP_MNEMONIC(div_Eb, "div Eb");
13528 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13529 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
13530 case 7:
13531 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
13532 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13533 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
13534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13535 }
13536}
13537
13538
13539/** Opcode 0xf7 /0. */
13540FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
13541{
13542 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
13543 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13544
13545 if (IEM_IS_MODRM_REG_MODE(bRm))
13546 {
13547 /* register access */
13548 switch (pVCpu->iem.s.enmEffOpSize)
13549 {
13550 case IEMMODE_16BIT:
13551 IEM_MC_BEGIN(3, 0, 0, 0);
13552 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13554 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13555 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
13556 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13557 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13558 IEM_MC_REF_EFLAGS(pEFlags);
13559 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13560 IEM_MC_ADVANCE_RIP_AND_FINISH();
13561 IEM_MC_END();
13562 break;
13563
13564 case IEMMODE_32BIT:
13565 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13566 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13568 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13569 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13570 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13571 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13572 IEM_MC_REF_EFLAGS(pEFlags);
13573 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13574 /* No clearing the high dword here - test doesn't write back the result. */
13575 IEM_MC_ADVANCE_RIP_AND_FINISH();
13576 IEM_MC_END();
13577 break;
13578
13579 case IEMMODE_64BIT:
13580 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13581 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13583 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13584 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13585 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13586 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13587 IEM_MC_REF_EFLAGS(pEFlags);
13588 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13589 IEM_MC_ADVANCE_RIP_AND_FINISH();
13590 IEM_MC_END();
13591 break;
13592
13593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13594 }
13595 }
13596 else
13597 {
13598 /* memory access. */
13599 switch (pVCpu->iem.s.enmEffOpSize)
13600 {
13601 case IEMMODE_16BIT:
13602 IEM_MC_BEGIN(3, 3, 0, 0);
13603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13605
13606 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13608
13609 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13610 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13611 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13612
13613 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13614 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13615 IEM_MC_FETCH_EFLAGS(EFlags);
13616 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13617
13618 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13619 IEM_MC_COMMIT_EFLAGS(EFlags);
13620 IEM_MC_ADVANCE_RIP_AND_FINISH();
13621 IEM_MC_END();
13622 break;
13623
13624 case IEMMODE_32BIT:
13625 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13628
13629 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13631
13632 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13633 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13634 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13635
13636 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13637 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13638 IEM_MC_FETCH_EFLAGS(EFlags);
13639 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13640
13641 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13642 IEM_MC_COMMIT_EFLAGS(EFlags);
13643 IEM_MC_ADVANCE_RIP_AND_FINISH();
13644 IEM_MC_END();
13645 break;
13646
13647 case IEMMODE_64BIT:
13648 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13651
13652 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13654
13655 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13656 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13657 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13658
13659 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13660 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13661 IEM_MC_FETCH_EFLAGS(EFlags);
13662 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13663
13664 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13665 IEM_MC_COMMIT_EFLAGS(EFlags);
13666 IEM_MC_ADVANCE_RIP_AND_FINISH();
13667 IEM_MC_END();
13668 break;
13669
13670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13671 }
13672 }
13673}
13674
13675
13676/** Opcode 0xf7 /2. */
13677FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13678{
13679 IEMOP_MNEMONIC(not_Ev, "not Ev");
13680 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13681 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13682}
13683
13684
13685/** Opcode 0xf7 /3. */
13686FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13687{
13688 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13689 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13690 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13691}
13692
13693
13694/**
13695 * @opcode 0xf7
13696 */
13697FNIEMOP_DEF(iemOp_Grp3_Ev)
13698{
13699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13700 switch (IEM_GET_MODRM_REG_8(bRm))
13701 {
13702 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13703 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13704 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13705 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13706 case 4:
13707 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13708 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13709 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13710 case 5:
13711 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13712 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13713 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13714 case 6:
13715 IEMOP_MNEMONIC(div_Ev, "div Ev");
13716 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13717 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13718 case 7:
13719 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13720 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13721 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13723 }
13724}
13725
13726
13727/**
13728 * @opcode 0xf8
13729 */
13730FNIEMOP_DEF(iemOp_clc)
13731{
13732 IEMOP_MNEMONIC(clc, "clc");
13733 IEM_MC_BEGIN(0, 0, 0, 0);
13734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13735 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13736 IEM_MC_ADVANCE_RIP_AND_FINISH();
13737 IEM_MC_END();
13738}
13739
13740
13741/**
13742 * @opcode 0xf9
13743 */
13744FNIEMOP_DEF(iemOp_stc)
13745{
13746 IEMOP_MNEMONIC(stc, "stc");
13747 IEM_MC_BEGIN(0, 0, 0, 0);
13748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13749 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13750 IEM_MC_ADVANCE_RIP_AND_FINISH();
13751 IEM_MC_END();
13752}
13753
13754
13755/**
13756 * @opcode 0xfa
13757 */
13758FNIEMOP_DEF(iemOp_cli)
13759{
13760 IEMOP_MNEMONIC(cli, "cli");
13761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13762 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
13763}
13764
13765
13766FNIEMOP_DEF(iemOp_sti)
13767{
13768 IEMOP_MNEMONIC(sti, "sti");
13769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13770 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13771 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
13772}
13773
13774
13775/**
13776 * @opcode 0xfc
13777 */
13778FNIEMOP_DEF(iemOp_cld)
13779{
13780 IEMOP_MNEMONIC(cld, "cld");
13781 IEM_MC_BEGIN(0, 0, 0, 0);
13782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13783 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13784 IEM_MC_ADVANCE_RIP_AND_FINISH();
13785 IEM_MC_END();
13786}
13787
13788
13789/**
13790 * @opcode 0xfd
13791 */
13792FNIEMOP_DEF(iemOp_std)
13793{
13794 IEMOP_MNEMONIC(std, "std");
13795 IEM_MC_BEGIN(0, 0, 0, 0);
13796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13797 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13798 IEM_MC_ADVANCE_RIP_AND_FINISH();
13799 IEM_MC_END();
13800}
13801
13802
13803/**
13804 * @opmaps grp4
13805 * @opcode /0
13806 */
13807FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13808{
13809 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13810 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13811}
13812
13813
13814/**
13815 * @opmaps grp4
13816 * @opcode /1
13817 */
13818FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13819{
13820 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13821 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13822}
13823
13824
13825/**
13826 * @opcode 0xfe
13827 */
13828FNIEMOP_DEF(iemOp_Grp4)
13829{
13830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13831 switch (IEM_GET_MODRM_REG_8(bRm))
13832 {
13833 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13834 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13835 default:
13836 /** @todo is the eff-addr decoded? */
13837 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13838 IEMOP_RAISE_INVALID_OPCODE_RET();
13839 }
13840}
13841
13842/** Opcode 0xff /0. */
13843FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13844{
13845 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13846 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13847 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13848}
13849
13850
13851/** Opcode 0xff /1. */
13852FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13853{
13854 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13855 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13856 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13857}
13858
13859
13860/**
13861 * Opcode 0xff /2.
13862 * @param bRm The RM byte.
13863 */
13864FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13865{
13866 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13867 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13868
13869 if (IEM_IS_MODRM_REG_MODE(bRm))
13870 {
13871 /* The new RIP is taken from a register. */
13872 switch (pVCpu->iem.s.enmEffOpSize)
13873 {
13874 case IEMMODE_16BIT:
13875 IEM_MC_BEGIN(1, 0, 0, 0);
13876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13877 IEM_MC_ARG(uint16_t, u16Target, 0);
13878 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13879 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13880 IEM_MC_END();
13881 break;
13882
13883 case IEMMODE_32BIT:
13884 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13886 IEM_MC_ARG(uint32_t, u32Target, 0);
13887 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13888 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13889 IEM_MC_END();
13890 break;
13891
13892 case IEMMODE_64BIT:
13893 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13895 IEM_MC_ARG(uint64_t, u64Target, 0);
13896 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13897 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13898 IEM_MC_END();
13899 break;
13900
13901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13902 }
13903 }
13904 else
13905 {
13906 /* The new RIP is taken from a register. */
13907 switch (pVCpu->iem.s.enmEffOpSize)
13908 {
13909 case IEMMODE_16BIT:
13910 IEM_MC_BEGIN(1, 1, 0, 0);
13911 IEM_MC_ARG(uint16_t, u16Target, 0);
13912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13915 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13916 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13917 IEM_MC_END();
13918 break;
13919
13920 case IEMMODE_32BIT:
13921 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13922 IEM_MC_ARG(uint32_t, u32Target, 0);
13923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13926 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13927 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13928 IEM_MC_END();
13929 break;
13930
13931 case IEMMODE_64BIT:
13932 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13933 IEM_MC_ARG(uint64_t, u64Target, 0);
13934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13937 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13938 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13939 IEM_MC_END();
13940 break;
13941
13942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13943 }
13944 }
13945}
13946
13947#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
13948 /* Registers? How?? */ \
13949 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13950 { /* likely */ } \
13951 else \
13952 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13953 \
13954 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13955 /** @todo what does VIA do? */ \
13956 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13957 { /* likely */ } \
13958 else \
13959 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13960 \
13961 /* Far pointer loaded from memory. */ \
13962 switch (pVCpu->iem.s.enmEffOpSize) \
13963 { \
13964 case IEMMODE_16BIT: \
13965 IEM_MC_BEGIN(3, 1, 0, 0); \
13966 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13967 IEM_MC_ARG(uint16_t, offSeg, 1); \
13968 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13972 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13973 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13974 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13975 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13976 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13977 IEM_MC_END(); \
13978 break; \
13979 \
13980 case IEMMODE_32BIT: \
13981 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13982 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13983 IEM_MC_ARG(uint32_t, offSeg, 1); \
13984 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13988 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13989 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13990 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13991 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13992 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13993 IEM_MC_END(); \
13994 break; \
13995 \
13996 case IEMMODE_64BIT: \
13997 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13998 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13999 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14000 IEM_MC_ARG(uint64_t, offSeg, 1); \
14001 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14005 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14006 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14007 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14008 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14009 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14010 IEM_MC_END(); \
14011 break; \
14012 \
14013 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14014 } do {} while (0)
14015
14016
14017/**
14018 * Opcode 0xff /3.
14019 * @param bRm The RM byte.
14020 */
14021FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14022{
14023 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14024 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14025}
14026
14027
14028/**
14029 * Opcode 0xff /4.
14030 * @param bRm The RM byte.
14031 */
14032FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14033{
14034 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14035 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14036
14037 if (IEM_IS_MODRM_REG_MODE(bRm))
14038 {
14039 /* The new RIP is taken from a register. */
14040 switch (pVCpu->iem.s.enmEffOpSize)
14041 {
14042 case IEMMODE_16BIT:
14043 IEM_MC_BEGIN(0, 1, 0, 0);
14044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14045 IEM_MC_LOCAL(uint16_t, u16Target);
14046 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14047 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14048 IEM_MC_END();
14049 break;
14050
14051 case IEMMODE_32BIT:
14052 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14054 IEM_MC_LOCAL(uint32_t, u32Target);
14055 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14056 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14057 IEM_MC_END();
14058 break;
14059
14060 case IEMMODE_64BIT:
14061 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14063 IEM_MC_LOCAL(uint64_t, u64Target);
14064 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14065 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14066 IEM_MC_END();
14067 break;
14068
14069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14070 }
14071 }
14072 else
14073 {
14074 /* The new RIP is taken from a memory location. */
14075 switch (pVCpu->iem.s.enmEffOpSize)
14076 {
14077 case IEMMODE_16BIT:
14078 IEM_MC_BEGIN(0, 2, 0, 0);
14079 IEM_MC_LOCAL(uint16_t, u16Target);
14080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14083 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14084 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14085 IEM_MC_END();
14086 break;
14087
14088 case IEMMODE_32BIT:
14089 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14090 IEM_MC_LOCAL(uint32_t, u32Target);
14091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14094 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14095 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14096 IEM_MC_END();
14097 break;
14098
14099 case IEMMODE_64BIT:
14100 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14101 IEM_MC_LOCAL(uint64_t, u64Target);
14102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14105 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14106 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14107 IEM_MC_END();
14108 break;
14109
14110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14111 }
14112 }
14113}
14114
14115
14116/**
14117 * Opcode 0xff /5.
14118 * @param bRm The RM byte.
14119 */
14120FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14121{
14122 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14123 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14124}
14125
14126
14127/**
14128 * Opcode 0xff /6.
14129 * @param bRm The RM byte.
14130 */
14131FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14132{
14133 IEMOP_MNEMONIC(push_Ev, "push Ev");
14134
14135 /* Registers are handled by a common worker. */
14136 if (IEM_IS_MODRM_REG_MODE(bRm))
14137 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14138
14139 /* Memory we do here. */
14140 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14141 switch (pVCpu->iem.s.enmEffOpSize)
14142 {
14143 case IEMMODE_16BIT:
14144 IEM_MC_BEGIN(0, 2, 0, 0);
14145 IEM_MC_LOCAL(uint16_t, u16Src);
14146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14149 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14150 IEM_MC_PUSH_U16(u16Src);
14151 IEM_MC_ADVANCE_RIP_AND_FINISH();
14152 IEM_MC_END();
14153 break;
14154
14155 case IEMMODE_32BIT:
14156 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14157 IEM_MC_LOCAL(uint32_t, u32Src);
14158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14161 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14162 IEM_MC_PUSH_U32(u32Src);
14163 IEM_MC_ADVANCE_RIP_AND_FINISH();
14164 IEM_MC_END();
14165 break;
14166
14167 case IEMMODE_64BIT:
14168 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14169 IEM_MC_LOCAL(uint64_t, u64Src);
14170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14173 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14174 IEM_MC_PUSH_U64(u64Src);
14175 IEM_MC_ADVANCE_RIP_AND_FINISH();
14176 IEM_MC_END();
14177 break;
14178
14179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14180 }
14181}
14182
14183
14184/**
14185 * @opcode 0xff
14186 */
14187FNIEMOP_DEF(iemOp_Grp5)
14188{
14189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14190 switch (IEM_GET_MODRM_REG_8(bRm))
14191 {
14192 case 0:
14193 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14194 case 1:
14195 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14196 case 2:
14197 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14198 case 3:
14199 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14200 case 4:
14201 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14202 case 5:
14203 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14204 case 6:
14205 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14206 case 7:
14207 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14208 IEMOP_RAISE_INVALID_OPCODE_RET();
14209 }
14210 AssertFailedReturn(VERR_IEM_IPE_3);
14211}
14212
14213
14214
14215const PFNIEMOP g_apfnOneByteMap[256] =
14216{
14217 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14218 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14219 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14220 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14221 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14222 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14223 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14224 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14225 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14226 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14227 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14228 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14229 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14230 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14231 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14232 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14233 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14234 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14235 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14236 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14237 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14238 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14239 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14240 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14241 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14242 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14243 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14244 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14245 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14246 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14247 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14248 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14249 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14250 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14251 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14252 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14253 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14254 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14255 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14256 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14257 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14258 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14259 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14260 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14261 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14262 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14263 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14264 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14265 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14266 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14267 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14268 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14269 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14270 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14271 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14272 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14273 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14274 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14275 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14276 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14277 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14278 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14279 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14280 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14281};
14282
14283
14284/** @} */
14285
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette