VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 101387

最後變更 在這個檔案從101387是 101387,由 vboxsync 提交於 18 月 前

VMM/IEM: Added a new class of threaded function variants, the 16f/32f/64f variants that will clear RF (and vbox internal friends) and check for TF (and vbox internal friends). The variants w/o the 'f' after the bitcount will skip this test+branch. The motivation of this was to deal with this issue that the threaded recompiler level rather than try optimize away the test+branch++ code when generating native code, make the IEM_MC_ADVANCE_RIP_AND_FINISH_THREADED_PC32 a very simple place to start emitting native code (compared to IEM_MC_ADVANCE_RIP_AND_FINISH_THREADED_PC32_WITH_FLAGS). bugref:10371

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 489.9 KB
 
1/* $Id: IEMAllInstOneByte.cpp.h 101387 2023-10-07 23:34:54Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.alldomusa.eu.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1126 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1127}
1128
1129
1130/**
1131 * @opcode 0x18
1132 * @opgroup og_gen_arith_bin
1133 * @opfltest cf
1134 * @opflmodify cf,pf,af,zf,sf,of
1135 */
1136FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1137{
1138 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1139 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1140 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1141}
1142
1143
1144/**
1145 * @opcode 0x19
1146 * @opgroup og_gen_arith_bin
1147 * @opfltest cf
1148 * @opflmodify cf,pf,af,zf,sf,of
1149 */
1150FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1151{
1152 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1153 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1154 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1155}
1156
1157
1158/**
1159 * @opcode 0x1a
1160 * @opgroup og_gen_arith_bin
1161 * @opfltest cf
1162 * @opflmodify cf,pf,af,zf,sf,of
1163 */
1164FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1165{
1166 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1168}
1169
1170
1171/**
1172 * @opcode 0x1b
1173 * @opgroup og_gen_arith_bin
1174 * @opfltest cf
1175 * @opflmodify cf,pf,af,zf,sf,of
1176 */
1177FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1178{
1179 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1180 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1181}
1182
1183
1184/**
1185 * @opcode 0x1c
1186 * @opgroup og_gen_arith_bin
1187 * @opfltest cf
1188 * @opflmodify cf,pf,af,zf,sf,of
1189 */
1190FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1191{
1192 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1193 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1194}
1195
1196
1197/**
1198 * @opcode 0x1d
1199 * @opgroup og_gen_arith_bin
1200 * @opfltest cf
1201 * @opflmodify cf,pf,af,zf,sf,of
1202 */
1203FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1204{
1205 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1206 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1207}
1208
1209
1210/**
1211 * @opcode 0x1e
1212 * @opgroup og_stack_sreg
1213 */
1214FNIEMOP_DEF(iemOp_push_DS)
1215{
1216 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1217 IEMOP_HLP_NO_64BIT();
1218 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1219}
1220
1221
1222/**
1223 * @opcode 0x1f
1224 * @opgroup og_stack_sreg
1225 */
1226FNIEMOP_DEF(iemOp_pop_DS)
1227{
1228 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1230 IEMOP_HLP_NO_64BIT();
1231 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1232}
1233
1234
1235/**
1236 * @opcode 0x20
1237 * @opgroup og_gen_arith_bin
1238 * @opflmodify cf,pf,af,zf,sf,of
1239 * @opflundef af
1240 * @opflclear of,cf
1241 */
1242FNIEMOP_DEF(iemOp_and_Eb_Gb)
1243{
1244 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1246 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1247 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1248}
1249
1250
1251/**
1252 * @opcode 0x21
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Ev_Gv)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1263 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x22
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Gb_Eb)
1275{
1276 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1279}
1280
1281
1282/**
1283 * @opcode 0x23
1284 * @opgroup og_gen_arith_bin
1285 * @opflmodify cf,pf,af,zf,sf,of
1286 * @opflundef af
1287 * @opflclear of,cf
1288 */
1289FNIEMOP_DEF(iemOp_and_Gv_Ev)
1290{
1291 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1293 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1294}
1295
1296
1297/**
1298 * @opcode 0x24
1299 * @opgroup og_gen_arith_bin
1300 * @opflmodify cf,pf,af,zf,sf,of
1301 * @opflundef af
1302 * @opflclear of,cf
1303 */
1304FNIEMOP_DEF(iemOp_and_Al_Ib)
1305{
1306 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1308 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1309}
1310
1311
1312/**
1313 * @opcode 0x25
1314 * @opgroup og_gen_arith_bin
1315 * @opflmodify cf,pf,af,zf,sf,of
1316 * @opflundef af
1317 * @opflclear of,cf
1318 */
1319FNIEMOP_DEF(iemOp_and_eAX_Iz)
1320{
1321 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1323 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1324}
1325
1326
1327/**
1328 * @opcode 0x26
1329 * @opmnemonic SEG
1330 * @op1 ES
1331 * @opgroup og_prefix
1332 * @openc prefix
1333 * @opdisenum OP_SEG
1334 * @ophints harmless
1335 */
1336FNIEMOP_DEF(iemOp_seg_ES)
1337{
1338 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1339 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1340 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1341
1342 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1343 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1344}
1345
1346
1347/**
1348 * @opcode 0x27
1349 * @opfltest af,cf
1350 * @opflmodify cf,pf,af,zf,sf,of
1351 * @opflundef of
1352 */
1353FNIEMOP_DEF(iemOp_daa)
1354{
1355 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1356 IEMOP_HLP_NO_64BIT();
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1359 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1360}
1361
1362
1363/**
1364 * @opcode 0x28
1365 * @opgroup og_gen_arith_bin
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 */
1368FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1369{
1370 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1371 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1372 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1373}
1374
1375
1376/**
1377 * @opcode 0x29
1378 * @opgroup og_gen_arith_bin
1379 * @opflmodify cf,pf,af,zf,sf,of
1380 */
1381FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1382{
1383 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1384 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1385 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1386}
1387
1388
1389/**
1390 * @opcode 0x2a
1391 * @opgroup og_gen_arith_bin
1392 * @opflmodify cf,pf,af,zf,sf,of
1393 */
1394FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1395{
1396 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1397 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1398}
1399
1400
1401/**
1402 * @opcode 0x2b
1403 * @opgroup og_gen_arith_bin
1404 * @opflmodify cf,pf,af,zf,sf,of
1405 */
1406FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1407{
1408 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1409 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1410}
1411
1412
1413/**
1414 * @opcode 0x2c
1415 * @opgroup og_gen_arith_bin
1416 * @opflmodify cf,pf,af,zf,sf,of
1417 */
1418FNIEMOP_DEF(iemOp_sub_Al_Ib)
1419{
1420 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1421 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1422}
1423
1424
1425/**
1426 * @opcode 0x2d
1427 * @opgroup og_gen_arith_bin
1428 * @opflmodify cf,pf,af,zf,sf,of
1429 */
1430FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1431{
1432 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1433 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1434}
1435
1436
1437/**
1438 * @opcode 0x2e
1439 * @opmnemonic SEG
1440 * @op1 CS
1441 * @opgroup og_prefix
1442 * @openc prefix
1443 * @opdisenum OP_SEG
1444 * @ophints harmless
1445 */
1446FNIEMOP_DEF(iemOp_seg_CS)
1447{
1448 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1449 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1450 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1451
1452 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1453 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1454}
1455
1456
1457/**
1458 * @opcode 0x2f
1459 * @opfltest af,cf
1460 * @opflmodify cf,pf,af,zf,sf,of
1461 * @opflundef of
1462 */
1463FNIEMOP_DEF(iemOp_das)
1464{
1465 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1466 IEMOP_HLP_NO_64BIT();
1467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1468 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1469 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1470}
1471
1472
1473/**
1474 * @opcode 0x30
1475 * @opgroup og_gen_arith_bin
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef af
1478 * @opflclear of,cf
1479 */
1480FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1481{
1482 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1483 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1484 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1485 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1486}
1487
1488
1489/**
1490 * @opcode 0x31
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1501 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x32
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1513{
1514 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1517}
1518
1519
1520/**
1521 * @opcode 0x33
1522 * @opgroup og_gen_arith_bin
1523 * @opflmodify cf,pf,af,zf,sf,of
1524 * @opflundef af
1525 * @opflclear of,cf
1526 */
1527FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1528{
1529 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1530 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1531 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1532}
1533
1534
1535/**
1536 * @opcode 0x34
1537 * @opgroup og_gen_arith_bin
1538 * @opflmodify cf,pf,af,zf,sf,of
1539 * @opflundef af
1540 * @opflclear of,cf
1541 */
1542FNIEMOP_DEF(iemOp_xor_Al_Ib)
1543{
1544 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1545 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1546 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1547}
1548
1549
1550/**
1551 * @opcode 0x35
1552 * @opgroup og_gen_arith_bin
1553 * @opflmodify cf,pf,af,zf,sf,of
1554 * @opflundef af
1555 * @opflclear of,cf
1556 */
1557FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1558{
1559 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1560 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1561 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1562}
1563
1564
1565/**
1566 * @opcode 0x36
1567 * @opmnemonic SEG
1568 * @op1 SS
1569 * @opgroup og_prefix
1570 * @openc prefix
1571 * @opdisenum OP_SEG
1572 * @ophints harmless
1573 */
1574FNIEMOP_DEF(iemOp_seg_SS)
1575{
1576 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1577 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1578 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1579
1580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1581 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1582}
1583
1584
1585/**
1586 * @opcode 0x37
1587 * @opfltest af,cf
1588 * @opflmodify cf,pf,af,zf,sf,of
1589 * @opflundef pf,zf,sf,of
1590 * @opgroup og_gen_arith_dec
1591 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1592 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1593 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1594 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1596 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1597 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1598 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1599 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1600 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1601 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1602 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1603 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1604 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1605 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1606 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1607 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1608 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1609 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1611 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1612 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1613 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1615 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1617 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1618 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1619 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1621 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1622 */
1623FNIEMOP_DEF(iemOp_aaa)
1624{
1625 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1626 IEMOP_HLP_NO_64BIT();
1627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1628 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1629
1630 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1631}
1632
1633
1634/**
1635 * @opcode 0x38
1636 */
1637FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1638{
1639 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1640 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1641 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1642}
1643
1644
1645/**
1646 * @opcode 0x39
1647 */
1648FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1649{
1650 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1651 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1652}
1653
1654
1655/**
1656 * @opcode 0x3a
1657 */
1658FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1659{
1660 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1661 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1662}
1663
1664
1665/**
1666 * @opcode 0x3b
1667 */
1668FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1669{
1670 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1671 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1672}
1673
1674
1675/**
1676 * @opcode 0x3c
1677 */
1678FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1679{
1680 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1681 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1682}
1683
1684
1685/**
1686 * @opcode 0x3d
1687 */
1688FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1689{
1690 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1691 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1692}
1693
1694
1695/**
1696 * @opcode 0x3e
1697 */
1698FNIEMOP_DEF(iemOp_seg_DS)
1699{
1700 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1701 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1702 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1703
1704 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1705 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1706}
1707
1708
1709/**
1710 * @opcode 0x3f
1711 * @opfltest af,cf
1712 * @opflmodify cf,pf,af,zf,sf,of
1713 * @opflundef pf,zf,sf,of
1714 * @opgroup og_gen_arith_dec
1715 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1716 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1717 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1718 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1719 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1720 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1721 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1722 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1723 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1724 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1725 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1726 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1731 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1732 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1733 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1734 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1735 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1736 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1744 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1745 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1746 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 */
1751FNIEMOP_DEF(iemOp_aas)
1752{
1753 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1754 IEMOP_HLP_NO_64BIT();
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1757
1758 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1759}
1760
1761
1762/**
1763 * Common 'inc/dec register' helper.
1764 *
1765 * Not for 64-bit code, only for what became the rex prefixes.
1766 */
1767#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1768 switch (pVCpu->iem.s.enmEffOpSize) \
1769 { \
1770 case IEMMODE_16BIT: \
1771 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1773 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1774 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1775 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1776 IEM_MC_REF_EFLAGS(pEFlags); \
1777 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1778 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1779 IEM_MC_END(); \
1780 break; \
1781 \
1782 case IEMMODE_32BIT: \
1783 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1785 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1786 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1787 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1788 IEM_MC_REF_EFLAGS(pEFlags); \
1789 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1790 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1791 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1792 IEM_MC_END(); \
1793 break; \
1794 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1795 } \
1796 (void)0
1797
1798/**
1799 * @opcode 0x40
1800 */
1801FNIEMOP_DEF(iemOp_inc_eAX)
1802{
1803 /*
1804 * This is a REX prefix in 64-bit mode.
1805 */
1806 if (IEM_IS_64BIT_CODE(pVCpu))
1807 {
1808 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1809 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1810
1811 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1812 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1813 }
1814
1815 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1816 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1817}
1818
1819
1820/**
1821 * @opcode 0x41
1822 */
1823FNIEMOP_DEF(iemOp_inc_eCX)
1824{
1825 /*
1826 * This is a REX prefix in 64-bit mode.
1827 */
1828 if (IEM_IS_64BIT_CODE(pVCpu))
1829 {
1830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1831 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1832 pVCpu->iem.s.uRexB = 1 << 3;
1833
1834 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1835 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1836 }
1837
1838 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1839 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1840}
1841
1842
1843/**
1844 * @opcode 0x42
1845 */
1846FNIEMOP_DEF(iemOp_inc_eDX)
1847{
1848 /*
1849 * This is a REX prefix in 64-bit mode.
1850 */
1851 if (IEM_IS_64BIT_CODE(pVCpu))
1852 {
1853 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1854 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1855 pVCpu->iem.s.uRexIndex = 1 << 3;
1856
1857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1858 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1859 }
1860
1861 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1862 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1863}
1864
1865
1866
1867/**
1868 * @opcode 0x43
1869 */
1870FNIEMOP_DEF(iemOp_inc_eBX)
1871{
1872 /*
1873 * This is a REX prefix in 64-bit mode.
1874 */
1875 if (IEM_IS_64BIT_CODE(pVCpu))
1876 {
1877 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1878 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1879 pVCpu->iem.s.uRexB = 1 << 3;
1880 pVCpu->iem.s.uRexIndex = 1 << 3;
1881
1882 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1883 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1884 }
1885
1886 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1887 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1888}
1889
1890
1891/**
1892 * @opcode 0x44
1893 */
1894FNIEMOP_DEF(iemOp_inc_eSP)
1895{
1896 /*
1897 * This is a REX prefix in 64-bit mode.
1898 */
1899 if (IEM_IS_64BIT_CODE(pVCpu))
1900 {
1901 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1902 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1903 pVCpu->iem.s.uRexReg = 1 << 3;
1904
1905 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1906 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1907 }
1908
1909 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1910 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1911}
1912
1913
1914/**
1915 * @opcode 0x45
1916 */
1917FNIEMOP_DEF(iemOp_inc_eBP)
1918{
1919 /*
1920 * This is a REX prefix in 64-bit mode.
1921 */
1922 if (IEM_IS_64BIT_CODE(pVCpu))
1923 {
1924 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1925 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1926 pVCpu->iem.s.uRexReg = 1 << 3;
1927 pVCpu->iem.s.uRexB = 1 << 3;
1928
1929 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1930 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1931 }
1932
1933 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1934 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1935}
1936
1937
1938/**
1939 * @opcode 0x46
1940 */
1941FNIEMOP_DEF(iemOp_inc_eSI)
1942{
1943 /*
1944 * This is a REX prefix in 64-bit mode.
1945 */
1946 if (IEM_IS_64BIT_CODE(pVCpu))
1947 {
1948 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1949 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1950 pVCpu->iem.s.uRexReg = 1 << 3;
1951 pVCpu->iem.s.uRexIndex = 1 << 3;
1952
1953 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1954 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1955 }
1956
1957 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1958 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1959}
1960
1961
1962/**
1963 * @opcode 0x47
1964 */
1965FNIEMOP_DEF(iemOp_inc_eDI)
1966{
1967 /*
1968 * This is a REX prefix in 64-bit mode.
1969 */
1970 if (IEM_IS_64BIT_CODE(pVCpu))
1971 {
1972 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1974 pVCpu->iem.s.uRexReg = 1 << 3;
1975 pVCpu->iem.s.uRexB = 1 << 3;
1976 pVCpu->iem.s.uRexIndex = 1 << 3;
1977
1978 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1979 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1980 }
1981
1982 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1983 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1984}
1985
1986
1987/**
1988 * @opcode 0x48
1989 */
1990FNIEMOP_DEF(iemOp_dec_eAX)
1991{
1992 /*
1993 * This is a REX prefix in 64-bit mode.
1994 */
1995 if (IEM_IS_64BIT_CODE(pVCpu))
1996 {
1997 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1998 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1999 iemRecalEffOpSize(pVCpu);
2000
2001 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2002 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2003 }
2004
2005 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2006 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2007}
2008
2009
2010/**
2011 * @opcode 0x49
2012 */
2013FNIEMOP_DEF(iemOp_dec_eCX)
2014{
2015 /*
2016 * This is a REX prefix in 64-bit mode.
2017 */
2018 if (IEM_IS_64BIT_CODE(pVCpu))
2019 {
2020 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2021 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2022 pVCpu->iem.s.uRexB = 1 << 3;
2023 iemRecalEffOpSize(pVCpu);
2024
2025 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2026 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2027 }
2028
2029 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2030 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2031}
2032
2033
2034/**
2035 * @opcode 0x4a
2036 */
2037FNIEMOP_DEF(iemOp_dec_eDX)
2038{
2039 /*
2040 * This is a REX prefix in 64-bit mode.
2041 */
2042 if (IEM_IS_64BIT_CODE(pVCpu))
2043 {
2044 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2045 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2046 pVCpu->iem.s.uRexIndex = 1 << 3;
2047 iemRecalEffOpSize(pVCpu);
2048
2049 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2050 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2051 }
2052
2053 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2054 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2055}
2056
2057
2058/**
2059 * @opcode 0x4b
2060 */
2061FNIEMOP_DEF(iemOp_dec_eBX)
2062{
2063 /*
2064 * This is a REX prefix in 64-bit mode.
2065 */
2066 if (IEM_IS_64BIT_CODE(pVCpu))
2067 {
2068 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2069 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2070 pVCpu->iem.s.uRexB = 1 << 3;
2071 pVCpu->iem.s.uRexIndex = 1 << 3;
2072 iemRecalEffOpSize(pVCpu);
2073
2074 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2075 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2076 }
2077
2078 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2079 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2080}
2081
2082
2083/**
2084 * @opcode 0x4c
2085 */
2086FNIEMOP_DEF(iemOp_dec_eSP)
2087{
2088 /*
2089 * This is a REX prefix in 64-bit mode.
2090 */
2091 if (IEM_IS_64BIT_CODE(pVCpu))
2092 {
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2094 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2095 pVCpu->iem.s.uRexReg = 1 << 3;
2096 iemRecalEffOpSize(pVCpu);
2097
2098 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2099 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2100 }
2101
2102 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2103 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2104}
2105
2106
2107/**
2108 * @opcode 0x4d
2109 */
2110FNIEMOP_DEF(iemOp_dec_eBP)
2111{
2112 /*
2113 * This is a REX prefix in 64-bit mode.
2114 */
2115 if (IEM_IS_64BIT_CODE(pVCpu))
2116 {
2117 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2118 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2119 pVCpu->iem.s.uRexReg = 1 << 3;
2120 pVCpu->iem.s.uRexB = 1 << 3;
2121 iemRecalEffOpSize(pVCpu);
2122
2123 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2124 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2125 }
2126
2127 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2128 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2129}
2130
2131
2132/**
2133 * @opcode 0x4e
2134 */
2135FNIEMOP_DEF(iemOp_dec_eSI)
2136{
2137 /*
2138 * This is a REX prefix in 64-bit mode.
2139 */
2140 if (IEM_IS_64BIT_CODE(pVCpu))
2141 {
2142 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2144 pVCpu->iem.s.uRexReg = 1 << 3;
2145 pVCpu->iem.s.uRexIndex = 1 << 3;
2146 iemRecalEffOpSize(pVCpu);
2147
2148 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2149 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2150 }
2151
2152 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2153 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2154}
2155
2156
2157/**
2158 * @opcode 0x4f
2159 */
2160FNIEMOP_DEF(iemOp_dec_eDI)
2161{
2162 /*
2163 * This is a REX prefix in 64-bit mode.
2164 */
2165 if (IEM_IS_64BIT_CODE(pVCpu))
2166 {
2167 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2168 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2169 pVCpu->iem.s.uRexReg = 1 << 3;
2170 pVCpu->iem.s.uRexB = 1 << 3;
2171 pVCpu->iem.s.uRexIndex = 1 << 3;
2172 iemRecalEffOpSize(pVCpu);
2173
2174 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2175 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2176 }
2177
2178 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2179 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2180}
2181
2182
2183/**
2184 * Common 'push register' helper.
2185 */
2186FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2187{
2188 if (IEM_IS_64BIT_CODE(pVCpu))
2189 {
2190 iReg |= pVCpu->iem.s.uRexB;
2191 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2192 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2193 }
2194
2195 switch (pVCpu->iem.s.enmEffOpSize)
2196 {
2197 case IEMMODE_16BIT:
2198 IEM_MC_BEGIN(0, 1, 0, 0);
2199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2200 IEM_MC_LOCAL(uint16_t, u16Value);
2201 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2202 IEM_MC_PUSH_U16(u16Value);
2203 IEM_MC_ADVANCE_RIP_AND_FINISH();
2204 IEM_MC_END();
2205 break;
2206
2207 case IEMMODE_32BIT:
2208 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2210 IEM_MC_LOCAL(uint32_t, u32Value);
2211 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2212 IEM_MC_PUSH_U32(u32Value);
2213 IEM_MC_ADVANCE_RIP_AND_FINISH();
2214 IEM_MC_END();
2215 break;
2216
2217 case IEMMODE_64BIT:
2218 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2220 IEM_MC_LOCAL(uint64_t, u64Value);
2221 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2222 IEM_MC_PUSH_U64(u64Value);
2223 IEM_MC_ADVANCE_RIP_AND_FINISH();
2224 IEM_MC_END();
2225 break;
2226
2227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2228 }
2229}
2230
2231
2232/**
2233 * @opcode 0x50
2234 */
2235FNIEMOP_DEF(iemOp_push_eAX)
2236{
2237 IEMOP_MNEMONIC(push_rAX, "push rAX");
2238 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2239}
2240
2241
2242/**
2243 * @opcode 0x51
2244 */
2245FNIEMOP_DEF(iemOp_push_eCX)
2246{
2247 IEMOP_MNEMONIC(push_rCX, "push rCX");
2248 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2249}
2250
2251
2252/**
2253 * @opcode 0x52
2254 */
2255FNIEMOP_DEF(iemOp_push_eDX)
2256{
2257 IEMOP_MNEMONIC(push_rDX, "push rDX");
2258 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2259}
2260
2261
2262/**
2263 * @opcode 0x53
2264 */
2265FNIEMOP_DEF(iemOp_push_eBX)
2266{
2267 IEMOP_MNEMONIC(push_rBX, "push rBX");
2268 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2269}
2270
2271
2272/**
2273 * @opcode 0x54
2274 */
2275FNIEMOP_DEF(iemOp_push_eSP)
2276{
2277 IEMOP_MNEMONIC(push_rSP, "push rSP");
2278 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2279 {
2280 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_LOCAL(uint16_t, u16Value);
2283 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2284 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2285 IEM_MC_PUSH_U16(u16Value);
2286 IEM_MC_ADVANCE_RIP_AND_FINISH();
2287 IEM_MC_END();
2288 }
2289 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2290}
2291
2292
2293/**
2294 * @opcode 0x55
2295 */
2296FNIEMOP_DEF(iemOp_push_eBP)
2297{
2298 IEMOP_MNEMONIC(push_rBP, "push rBP");
2299 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2300}
2301
2302
2303/**
2304 * @opcode 0x56
2305 */
2306FNIEMOP_DEF(iemOp_push_eSI)
2307{
2308 IEMOP_MNEMONIC(push_rSI, "push rSI");
2309 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2310}
2311
2312
2313/**
2314 * @opcode 0x57
2315 */
2316FNIEMOP_DEF(iemOp_push_eDI)
2317{
2318 IEMOP_MNEMONIC(push_rDI, "push rDI");
2319 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2320}
2321
2322
2323/**
2324 * Common 'pop register' helper.
2325 */
2326FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2327{
2328 if (IEM_IS_64BIT_CODE(pVCpu))
2329 {
2330 iReg |= pVCpu->iem.s.uRexB;
2331 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2332 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2333 }
2334
2335 switch (pVCpu->iem.s.enmEffOpSize)
2336 {
2337 case IEMMODE_16BIT:
2338 IEM_MC_BEGIN(0, 1, 0, 0);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2340 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2341 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2342 IEM_MC_POP_U16(pu16Dst);
2343 IEM_MC_ADVANCE_RIP_AND_FINISH();
2344 IEM_MC_END();
2345 break;
2346
2347 case IEMMODE_32BIT:
2348 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2351 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2352 IEM_MC_POP_U32(pu32Dst);
2353 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2354 IEM_MC_ADVANCE_RIP_AND_FINISH();
2355 IEM_MC_END();
2356 break;
2357
2358 case IEMMODE_64BIT:
2359 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2362 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2363 IEM_MC_POP_U64(pu64Dst);
2364 IEM_MC_ADVANCE_RIP_AND_FINISH();
2365 IEM_MC_END();
2366 break;
2367
2368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2369 }
2370}
2371
2372
2373/**
2374 * @opcode 0x58
2375 */
2376FNIEMOP_DEF(iemOp_pop_eAX)
2377{
2378 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2379 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2380}
2381
2382
2383/**
2384 * @opcode 0x59
2385 */
2386FNIEMOP_DEF(iemOp_pop_eCX)
2387{
2388 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2389 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2390}
2391
2392
2393/**
2394 * @opcode 0x5a
2395 */
2396FNIEMOP_DEF(iemOp_pop_eDX)
2397{
2398 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2399 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2400}
2401
2402
2403/**
2404 * @opcode 0x5b
2405 */
2406FNIEMOP_DEF(iemOp_pop_eBX)
2407{
2408 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2409 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2410}
2411
2412
2413/**
2414 * @opcode 0x5c
2415 */
2416FNIEMOP_DEF(iemOp_pop_eSP)
2417{
2418 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2419 if (IEM_IS_64BIT_CODE(pVCpu))
2420 {
2421 if (pVCpu->iem.s.uRexB)
2422 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2423 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2424 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2425 }
2426
2427 /** @todo add testcase for this instruction. */
2428 switch (pVCpu->iem.s.enmEffOpSize)
2429 {
2430 case IEMMODE_16BIT:
2431 IEM_MC_BEGIN(0, 1, 0, 0);
2432 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2433 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2434 IEM_MC_LOCAL(uint16_t, u16Dst);
2435 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2436 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 break;
2440
2441 case IEMMODE_32BIT:
2442 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
2443 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2444 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2445 IEM_MC_LOCAL(uint32_t, u32Dst);
2446 IEM_MC_POP_U32(&u32Dst);
2447 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2448 IEM_MC_ADVANCE_RIP_AND_FINISH();
2449 IEM_MC_END();
2450 break;
2451
2452 case IEMMODE_64BIT:
2453 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2454 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2455 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2456 IEM_MC_LOCAL(uint64_t, u64Dst);
2457 IEM_MC_POP_U64(&u64Dst);
2458 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2459 IEM_MC_ADVANCE_RIP_AND_FINISH();
2460 IEM_MC_END();
2461 break;
2462
2463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2464 }
2465}
2466
2467
2468/**
2469 * @opcode 0x5d
2470 */
2471FNIEMOP_DEF(iemOp_pop_eBP)
2472{
2473 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2474 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2475}
2476
2477
2478/**
2479 * @opcode 0x5e
2480 */
2481FNIEMOP_DEF(iemOp_pop_eSI)
2482{
2483 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2484 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2485}
2486
2487
2488/**
2489 * @opcode 0x5f
2490 */
2491FNIEMOP_DEF(iemOp_pop_eDI)
2492{
2493 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2494 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2495}
2496
2497
2498/**
2499 * @opcode 0x60
2500 */
2501FNIEMOP_DEF(iemOp_pusha)
2502{
2503 IEMOP_MNEMONIC(pusha, "pusha");
2504 IEMOP_HLP_MIN_186();
2505 IEMOP_HLP_NO_64BIT();
2506 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2507 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2508 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2509 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2510}
2511
2512
2513/**
2514 * @opcode 0x61
2515 */
2516FNIEMOP_DEF(iemOp_popa__mvex)
2517{
2518 if (!IEM_IS_64BIT_CODE(pVCpu))
2519 {
2520 IEMOP_MNEMONIC(popa, "popa");
2521 IEMOP_HLP_MIN_186();
2522 IEMOP_HLP_NO_64BIT();
2523 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2524 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2525 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2526 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2527 }
2528 IEMOP_MNEMONIC(mvex, "mvex");
2529 Log(("mvex prefix is not supported!\n"));
2530 IEMOP_RAISE_INVALID_OPCODE_RET();
2531}
2532
2533
2534/**
2535 * @opcode 0x62
2536 * @opmnemonic bound
2537 * @op1 Gv_RO
2538 * @op2 Ma
2539 * @opmincpu 80186
2540 * @ophints harmless x86_invalid_64
2541 * @optest op1=0 op2=0 ->
2542 * @optest op1=1 op2=0 -> value.xcpt=5
2543 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2544 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2545 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2546 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2547 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2548 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2549 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2550 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2551 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2555 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2564 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2565 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2567 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2568 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2569 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2570 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2571 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2572 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2573 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2577 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2584 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2585 */
2586FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2587{
2588 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2589 compatability mode it is invalid with MOD=3.
2590
2591 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2592 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2593 given as R and X without an exact description, so we assume it builds on
2594 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2595 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2596 uint8_t bRm;
2597 if (!IEM_IS_64BIT_CODE(pVCpu))
2598 {
2599 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2600 IEMOP_HLP_MIN_186();
2601 IEM_OPCODE_GET_NEXT_U8(&bRm);
2602 if (IEM_IS_MODRM_MEM_MODE(bRm))
2603 {
2604 /** @todo testcase: check that there are two memory accesses involved. Check
2605 * whether they're both read before the \#BR triggers. */
2606 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2607 {
2608 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2609 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2610 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2611 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2613
2614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616
2617 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2618 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2619 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2620
2621 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2622 IEM_MC_END();
2623 }
2624 else /* 32-bit operands */
2625 {
2626 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2627 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2628 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2629 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2631
2632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2634
2635 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2637 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2638
2639 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2640 IEM_MC_END();
2641 }
2642 }
2643
2644 /*
2645 * @opdone
2646 */
2647 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2648 {
2649 /* Note that there is no need for the CPU to fetch further bytes
2650 here because MODRM.MOD == 3. */
2651 Log(("evex not supported by the guest CPU!\n"));
2652 IEMOP_RAISE_INVALID_OPCODE_RET();
2653 }
2654 }
2655 else
2656 {
2657 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2658 * does modr/m read, whereas AMD probably doesn't... */
2659 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2660 {
2661 Log(("evex not supported by the guest CPU!\n"));
2662 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2663 }
2664 IEM_OPCODE_GET_NEXT_U8(&bRm);
2665 }
2666
2667 IEMOP_MNEMONIC(evex, "evex");
2668 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2669 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2670 Log(("evex prefix is not implemented!\n"));
2671 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2672}
2673
2674
2675/** Opcode 0x63 - non-64-bit modes. */
2676FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2677{
2678 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2679 IEMOP_HLP_MIN_286();
2680 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2682
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /* Register */
2686 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2687 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2688 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2689 IEM_MC_ARG(uint16_t, u16Src, 1);
2690 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2691
2692 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2693 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2694 IEM_MC_REF_EFLAGS(pEFlags);
2695 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2696
2697 IEM_MC_ADVANCE_RIP_AND_FINISH();
2698 IEM_MC_END();
2699 }
2700 else
2701 {
2702 /* Memory */
2703 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2704 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2705 IEM_MC_ARG(uint16_t, u16Src, 1);
2706 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2708 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2709
2710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2711 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2712 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2713 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2714 IEM_MC_FETCH_EFLAGS(EFlags);
2715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2716
2717 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2718 IEM_MC_COMMIT_EFLAGS(EFlags);
2719 IEM_MC_ADVANCE_RIP_AND_FINISH();
2720 IEM_MC_END();
2721 }
2722}
2723
2724
2725/**
2726 * @opcode 0x63
2727 *
2728 * @note This is a weird one. It works like a regular move instruction if
2729 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2730 * @todo This definitely needs a testcase to verify the odd cases. */
2731FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2732{
2733 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2734
2735 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2737
2738 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2739 {
2740 if (IEM_IS_MODRM_REG_MODE(bRm))
2741 {
2742 /*
2743 * Register to register.
2744 */
2745 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_LOCAL(uint64_t, u64Value);
2748 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2749 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2750 IEM_MC_ADVANCE_RIP_AND_FINISH();
2751 IEM_MC_END();
2752 }
2753 else
2754 {
2755 /*
2756 * We're loading a register from memory.
2757 */
2758 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2759 IEM_MC_LOCAL(uint64_t, u64Value);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2764 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2765 IEM_MC_ADVANCE_RIP_AND_FINISH();
2766 IEM_MC_END();
2767 }
2768 }
2769 else
2770 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2771}
2772
2773
2774/**
2775 * @opcode 0x64
2776 * @opmnemonic segfs
2777 * @opmincpu 80386
2778 * @opgroup og_prefixes
2779 */
2780FNIEMOP_DEF(iemOp_seg_FS)
2781{
2782 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2783 IEMOP_HLP_MIN_386();
2784
2785 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2786 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2787
2788 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2789 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2790}
2791
2792
2793/**
2794 * @opcode 0x65
2795 * @opmnemonic seggs
2796 * @opmincpu 80386
2797 * @opgroup og_prefixes
2798 */
2799FNIEMOP_DEF(iemOp_seg_GS)
2800{
2801 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2802 IEMOP_HLP_MIN_386();
2803
2804 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2805 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2806
2807 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2808 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2809}
2810
2811
2812/**
2813 * @opcode 0x66
2814 * @opmnemonic opsize
2815 * @openc prefix
2816 * @opmincpu 80386
2817 * @ophints harmless
2818 * @opgroup og_prefixes
2819 */
2820FNIEMOP_DEF(iemOp_op_size)
2821{
2822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2823 IEMOP_HLP_MIN_386();
2824
2825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2826 iemRecalEffOpSize(pVCpu);
2827
2828 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2829 when REPZ or REPNZ are present. */
2830 if (pVCpu->iem.s.idxPrefix == 0)
2831 pVCpu->iem.s.idxPrefix = 1;
2832
2833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2835}
2836
2837
2838/**
2839 * @opcode 0x67
2840 * @opmnemonic addrsize
2841 * @openc prefix
2842 * @opmincpu 80386
2843 * @ophints harmless
2844 * @opgroup og_prefixes
2845 */
2846FNIEMOP_DEF(iemOp_addr_size)
2847{
2848 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2849 IEMOP_HLP_MIN_386();
2850
2851 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2852 switch (pVCpu->iem.s.enmDefAddrMode)
2853 {
2854 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2855 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2856 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2857 default: AssertFailed();
2858 }
2859
2860 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2861 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2862}
2863
2864
2865/**
2866 * @opcode 0x68
2867 */
2868FNIEMOP_DEF(iemOp_push_Iz)
2869{
2870 IEMOP_MNEMONIC(push_Iz, "push Iz");
2871 IEMOP_HLP_MIN_186();
2872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2873 switch (pVCpu->iem.s.enmEffOpSize)
2874 {
2875 case IEMMODE_16BIT:
2876 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2877 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_PUSH_U16(u16Imm);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 break;
2883
2884 case IEMMODE_32BIT:
2885 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2886 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2888 IEM_MC_PUSH_U32(u32Imm);
2889 IEM_MC_ADVANCE_RIP_AND_FINISH();
2890 IEM_MC_END();
2891 break;
2892
2893 case IEMMODE_64BIT:
2894 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2895 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2897 IEM_MC_PUSH_U64(u64Imm);
2898 IEM_MC_ADVANCE_RIP_AND_FINISH();
2899 IEM_MC_END();
2900 break;
2901
2902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2903 }
2904}
2905
2906
2907/**
2908 * @opcode 0x69
2909 */
2910FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2911{
2912 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2913 IEMOP_HLP_MIN_186();
2914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2916
2917 switch (pVCpu->iem.s.enmEffOpSize)
2918 {
2919 case IEMMODE_16BIT:
2920 {
2921 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2922 if (IEM_IS_MODRM_REG_MODE(bRm))
2923 {
2924 /* register operand */
2925 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2926 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2928 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2929 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2930 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2931 IEM_MC_LOCAL(uint16_t, u16Tmp);
2932
2933 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2934 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2935 IEM_MC_REF_EFLAGS(pEFlags);
2936 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2937 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2938
2939 IEM_MC_ADVANCE_RIP_AND_FINISH();
2940 IEM_MC_END();
2941 }
2942 else
2943 {
2944 /* memory operand */
2945 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2946 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2947 IEM_MC_ARG(uint16_t, u16Src, 1);
2948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2949 IEM_MC_LOCAL(uint16_t, u16Tmp);
2950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2951
2952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2953 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2954 IEM_MC_ASSIGN(u16Src, u16Imm);
2955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2956 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2957 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2958 IEM_MC_REF_EFLAGS(pEFlags);
2959 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2960 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2961
2962 IEM_MC_ADVANCE_RIP_AND_FINISH();
2963 IEM_MC_END();
2964 }
2965 break;
2966 }
2967
2968 case IEMMODE_32BIT:
2969 {
2970 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2971 if (IEM_IS_MODRM_REG_MODE(bRm))
2972 {
2973 /* register operand */
2974 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2975 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2977 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2978 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2979 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2980 IEM_MC_LOCAL(uint32_t, u32Tmp);
2981
2982 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2983 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2984 IEM_MC_REF_EFLAGS(pEFlags);
2985 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2986 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2987
2988 IEM_MC_ADVANCE_RIP_AND_FINISH();
2989 IEM_MC_END();
2990 }
2991 else
2992 {
2993 /* memory operand */
2994 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2995 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2996 IEM_MC_ARG(uint32_t, u32Src, 1);
2997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2998 IEM_MC_LOCAL(uint32_t, u32Tmp);
2999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3000
3001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3002 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3003 IEM_MC_ASSIGN(u32Src, u32Imm);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3006 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3007 IEM_MC_REF_EFLAGS(pEFlags);
3008 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3009 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3010
3011 IEM_MC_ADVANCE_RIP_AND_FINISH();
3012 IEM_MC_END();
3013 }
3014 break;
3015 }
3016
3017 case IEMMODE_64BIT:
3018 {
3019 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3020 if (IEM_IS_MODRM_REG_MODE(bRm))
3021 {
3022 /* register operand */
3023 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3024 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3027 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3029 IEM_MC_LOCAL(uint64_t, u64Tmp);
3030
3031 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3032 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3033 IEM_MC_REF_EFLAGS(pEFlags);
3034 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3035 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3036
3037 IEM_MC_ADVANCE_RIP_AND_FINISH();
3038 IEM_MC_END();
3039 }
3040 else
3041 {
3042 /* memory operand */
3043 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3044 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3045 IEM_MC_ARG(uint64_t, u64Src, 1);
3046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3047 IEM_MC_LOCAL(uint64_t, u64Tmp);
3048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3049
3050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3051 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3052 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
3053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3054 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3055 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3056 IEM_MC_REF_EFLAGS(pEFlags);
3057 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3058 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3059
3060 IEM_MC_ADVANCE_RIP_AND_FINISH();
3061 IEM_MC_END();
3062 }
3063 break;
3064 }
3065
3066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3067 }
3068}
3069
3070
3071/**
3072 * @opcode 0x6a
3073 */
3074FNIEMOP_DEF(iemOp_push_Ib)
3075{
3076 IEMOP_MNEMONIC(push_Ib, "push Ib");
3077 IEMOP_HLP_MIN_186();
3078 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3080
3081 switch (pVCpu->iem.s.enmEffOpSize)
3082 {
3083 case IEMMODE_16BIT:
3084 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3086 IEM_MC_PUSH_U16(i8Imm);
3087 IEM_MC_ADVANCE_RIP_AND_FINISH();
3088 IEM_MC_END();
3089 break;
3090 case IEMMODE_32BIT:
3091 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3093 IEM_MC_PUSH_U32(i8Imm);
3094 IEM_MC_ADVANCE_RIP_AND_FINISH();
3095 IEM_MC_END();
3096 break;
3097 case IEMMODE_64BIT:
3098 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
3099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3100 IEM_MC_PUSH_U64(i8Imm);
3101 IEM_MC_ADVANCE_RIP_AND_FINISH();
3102 IEM_MC_END();
3103 break;
3104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3105 }
3106}
3107
3108
3109/**
3110 * @opcode 0x6b
3111 */
3112FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3113{
3114 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3115 IEMOP_HLP_MIN_186();
3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3117 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3118
3119 switch (pVCpu->iem.s.enmEffOpSize)
3120 {
3121 case IEMMODE_16BIT:
3122 {
3123 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3124 if (IEM_IS_MODRM_REG_MODE(bRm))
3125 {
3126 /* register operand */
3127 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3128 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3131 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3133 IEM_MC_LOCAL(uint16_t, u16Tmp);
3134
3135 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3136 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3137 IEM_MC_REF_EFLAGS(pEFlags);
3138 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3139 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /* memory operand */
3147 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3148 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3149 IEM_MC_ARG(uint16_t, u16Src, 1);
3150 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3151 IEM_MC_LOCAL(uint16_t, u16Tmp);
3152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3153
3154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3155 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3156 IEM_MC_ASSIGN(u16Src, u16Imm);
3157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3158 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3159 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3160 IEM_MC_REF_EFLAGS(pEFlags);
3161 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3162 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3163
3164 IEM_MC_ADVANCE_RIP_AND_FINISH();
3165 IEM_MC_END();
3166 }
3167 break;
3168 }
3169
3170 case IEMMODE_32BIT:
3171 {
3172 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3173 if (IEM_IS_MODRM_REG_MODE(bRm))
3174 {
3175 /* register operand */
3176 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3177 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3179 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3180 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3182 IEM_MC_LOCAL(uint32_t, u32Tmp);
3183
3184 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3185 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3186 IEM_MC_REF_EFLAGS(pEFlags);
3187 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3188 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3189
3190 IEM_MC_ADVANCE_RIP_AND_FINISH();
3191 IEM_MC_END();
3192 }
3193 else
3194 {
3195 /* memory operand */
3196 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3198 IEM_MC_ARG(uint32_t, u32Src, 1);
3199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3200 IEM_MC_LOCAL(uint32_t, u32Tmp);
3201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3202
3203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3204 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3205 IEM_MC_ASSIGN(u32Src, u32Imm);
3206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3207 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3208 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3209 IEM_MC_REF_EFLAGS(pEFlags);
3210 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3211 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3212
3213 IEM_MC_ADVANCE_RIP_AND_FINISH();
3214 IEM_MC_END();
3215 }
3216 break;
3217 }
3218
3219 case IEMMODE_64BIT:
3220 {
3221 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3222 if (IEM_IS_MODRM_REG_MODE(bRm))
3223 {
3224 /* register operand */
3225 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3226 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3229 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3230 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3231 IEM_MC_LOCAL(uint64_t, u64Tmp);
3232
3233 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3234 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3235 IEM_MC_REF_EFLAGS(pEFlags);
3236 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3237 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3238
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242 else
3243 {
3244 /* memory operand */
3245 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3246 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3247 IEM_MC_ARG(uint64_t, u64Src, 1);
3248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3249 IEM_MC_LOCAL(uint64_t, u64Tmp);
3250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3251
3252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3253 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3254 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3256 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3257 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3258 IEM_MC_REF_EFLAGS(pEFlags);
3259 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3260 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3261
3262 IEM_MC_ADVANCE_RIP_AND_FINISH();
3263 IEM_MC_END();
3264 }
3265 break;
3266 }
3267
3268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3269 }
3270}
3271
3272
3273/**
3274 * @opcode 0x6c
3275 */
3276FNIEMOP_DEF(iemOp_insb_Yb_DX)
3277{
3278 IEMOP_HLP_MIN_186();
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3281 {
3282 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3283 switch (pVCpu->iem.s.enmEffAddrMode)
3284 {
3285 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3286 iemCImpl_rep_ins_op8_addr16, false);
3287 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3288 iemCImpl_rep_ins_op8_addr32, false);
3289 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3290 iemCImpl_rep_ins_op8_addr64, false);
3291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3292 }
3293 }
3294 else
3295 {
3296 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3297 switch (pVCpu->iem.s.enmEffAddrMode)
3298 {
3299 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3300 iemCImpl_ins_op8_addr16, false);
3301 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3302 iemCImpl_ins_op8_addr32, false);
3303 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3304 iemCImpl_ins_op8_addr64, false);
3305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3306 }
3307 }
3308}
3309
3310
3311/**
3312 * @opcode 0x6d
3313 */
3314FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3315{
3316 IEMOP_HLP_MIN_186();
3317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3318 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3319 {
3320 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3321 switch (pVCpu->iem.s.enmEffOpSize)
3322 {
3323 case IEMMODE_16BIT:
3324 switch (pVCpu->iem.s.enmEffAddrMode)
3325 {
3326 case IEMMODE_16BIT:
3327 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3328 iemCImpl_rep_ins_op16_addr16, false);
3329 case IEMMODE_32BIT:
3330 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3331 iemCImpl_rep_ins_op16_addr32, false);
3332 case IEMMODE_64BIT:
3333 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3334 iemCImpl_rep_ins_op16_addr64, false);
3335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3336 }
3337 break;
3338 case IEMMODE_64BIT:
3339 case IEMMODE_32BIT:
3340 switch (pVCpu->iem.s.enmEffAddrMode)
3341 {
3342 case IEMMODE_16BIT:
3343 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3344 iemCImpl_rep_ins_op32_addr16, false);
3345 case IEMMODE_32BIT:
3346 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3347 iemCImpl_rep_ins_op32_addr32, false);
3348 case IEMMODE_64BIT:
3349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3350 iemCImpl_rep_ins_op32_addr64, false);
3351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3352 }
3353 break;
3354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3355 }
3356 }
3357 else
3358 {
3359 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3360 switch (pVCpu->iem.s.enmEffOpSize)
3361 {
3362 case IEMMODE_16BIT:
3363 switch (pVCpu->iem.s.enmEffAddrMode)
3364 {
3365 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3366 iemCImpl_ins_op16_addr16, false);
3367 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3368 iemCImpl_ins_op16_addr32, false);
3369 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3370 iemCImpl_ins_op16_addr64, false);
3371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3372 }
3373 break;
3374 case IEMMODE_64BIT:
3375 case IEMMODE_32BIT:
3376 switch (pVCpu->iem.s.enmEffAddrMode)
3377 {
3378 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3379 iemCImpl_ins_op32_addr16, false);
3380 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3381 iemCImpl_ins_op32_addr32, false);
3382 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3383 iemCImpl_ins_op32_addr64, false);
3384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3385 }
3386 break;
3387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3388 }
3389 }
3390}
3391
3392
3393/**
3394 * @opcode 0x6e
3395 */
3396FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3397{
3398 IEMOP_HLP_MIN_186();
3399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3400 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3401 {
3402 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3403 switch (pVCpu->iem.s.enmEffAddrMode)
3404 {
3405 case IEMMODE_16BIT:
3406 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3407 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3408 case IEMMODE_32BIT:
3409 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3410 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3411 case IEMMODE_64BIT:
3412 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3413 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3415 }
3416 }
3417 else
3418 {
3419 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3420 switch (pVCpu->iem.s.enmEffAddrMode)
3421 {
3422 case IEMMODE_16BIT:
3423 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3424 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3425 case IEMMODE_32BIT:
3426 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3427 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3428 case IEMMODE_64BIT:
3429 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3430 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3432 }
3433 }
3434}
3435
3436
3437/**
3438 * @opcode 0x6f
3439 */
3440FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3441{
3442 IEMOP_HLP_MIN_186();
3443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3444 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3445 {
3446 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3447 switch (pVCpu->iem.s.enmEffOpSize)
3448 {
3449 case IEMMODE_16BIT:
3450 switch (pVCpu->iem.s.enmEffAddrMode)
3451 {
3452 case IEMMODE_16BIT:
3453 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3454 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3455 case IEMMODE_32BIT:
3456 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3457 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3458 case IEMMODE_64BIT:
3459 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3460 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3462 }
3463 break;
3464 case IEMMODE_64BIT:
3465 case IEMMODE_32BIT:
3466 switch (pVCpu->iem.s.enmEffAddrMode)
3467 {
3468 case IEMMODE_16BIT:
3469 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3470 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3471 case IEMMODE_32BIT:
3472 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3473 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3474 case IEMMODE_64BIT:
3475 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3476 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3478 }
3479 break;
3480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3481 }
3482 }
3483 else
3484 {
3485 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3486 switch (pVCpu->iem.s.enmEffOpSize)
3487 {
3488 case IEMMODE_16BIT:
3489 switch (pVCpu->iem.s.enmEffAddrMode)
3490 {
3491 case IEMMODE_16BIT:
3492 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3493 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3494 case IEMMODE_32BIT:
3495 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3496 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3497 case IEMMODE_64BIT:
3498 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3499 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3501 }
3502 break;
3503 case IEMMODE_64BIT:
3504 case IEMMODE_32BIT:
3505 switch (pVCpu->iem.s.enmEffAddrMode)
3506 {
3507 case IEMMODE_16BIT:
3508 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3509 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3510 case IEMMODE_32BIT:
3511 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3512 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3513 case IEMMODE_64BIT:
3514 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3515 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3517 }
3518 break;
3519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3520 }
3521 }
3522}
3523
3524
3525/**
3526 * @opcode 0x70
3527 */
3528FNIEMOP_DEF(iemOp_jo_Jb)
3529{
3530 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3531 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3532 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3533
3534 IEM_MC_BEGIN(0, 0, 0, 0);
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3537 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3538 } IEM_MC_ELSE() {
3539 IEM_MC_ADVANCE_RIP_AND_FINISH();
3540 } IEM_MC_ENDIF();
3541 IEM_MC_END();
3542}
3543
3544
3545/**
3546 * @opcode 0x71
3547 */
3548FNIEMOP_DEF(iemOp_jno_Jb)
3549{
3550 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3551 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3552 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3553
3554 IEM_MC_BEGIN(0, 0, 0, 0);
3555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3556 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3557 IEM_MC_ADVANCE_RIP_AND_FINISH();
3558 } IEM_MC_ELSE() {
3559 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3560 } IEM_MC_ENDIF();
3561 IEM_MC_END();
3562}
3563
3564/**
3565 * @opcode 0x72
3566 */
3567FNIEMOP_DEF(iemOp_jc_Jb)
3568{
3569 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3570 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3571 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3572
3573 IEM_MC_BEGIN(0, 0, 0, 0);
3574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3576 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3577 } IEM_MC_ELSE() {
3578 IEM_MC_ADVANCE_RIP_AND_FINISH();
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581}
3582
3583
3584/**
3585 * @opcode 0x73
3586 */
3587FNIEMOP_DEF(iemOp_jnc_Jb)
3588{
3589 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3590 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3592
3593 IEM_MC_BEGIN(0, 0, 0, 0);
3594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3596 IEM_MC_ADVANCE_RIP_AND_FINISH();
3597 } IEM_MC_ELSE() {
3598 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3599 } IEM_MC_ENDIF();
3600 IEM_MC_END();
3601}
3602
3603
3604/**
3605 * @opcode 0x74
3606 */
3607FNIEMOP_DEF(iemOp_je_Jb)
3608{
3609 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3610 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0, 0, 0);
3614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3616 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3617 } IEM_MC_ELSE() {
3618 IEM_MC_ADVANCE_RIP_AND_FINISH();
3619 } IEM_MC_ENDIF();
3620 IEM_MC_END();
3621}
3622
3623
3624/**
3625 * @opcode 0x75
3626 */
3627FNIEMOP_DEF(iemOp_jne_Jb)
3628{
3629 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3630 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3631 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0, 0, 0);
3634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3636 IEM_MC_ADVANCE_RIP_AND_FINISH();
3637 } IEM_MC_ELSE() {
3638 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3639 } IEM_MC_ENDIF();
3640 IEM_MC_END();
3641}
3642
3643
3644/**
3645 * @opcode 0x76
3646 */
3647FNIEMOP_DEF(iemOp_jbe_Jb)
3648{
3649 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3650 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3651 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3652
3653 IEM_MC_BEGIN(0, 0, 0, 0);
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3655 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3656 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3657 } IEM_MC_ELSE() {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ENDIF();
3660 IEM_MC_END();
3661}
3662
3663
3664/**
3665 * @opcode 0x77
3666 */
3667FNIEMOP_DEF(iemOp_jnbe_Jb)
3668{
3669 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3670 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3672
3673 IEM_MC_BEGIN(0, 0, 0, 0);
3674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3675 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3676 IEM_MC_ADVANCE_RIP_AND_FINISH();
3677 } IEM_MC_ELSE() {
3678 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3679 } IEM_MC_ENDIF();
3680 IEM_MC_END();
3681}
3682
3683
3684/**
3685 * @opcode 0x78
3686 */
3687FNIEMOP_DEF(iemOp_js_Jb)
3688{
3689 IEMOP_MNEMONIC(js_Jb, "js Jb");
3690 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3691 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3692
3693 IEM_MC_BEGIN(0, 0, 0, 0);
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3696 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3697 } IEM_MC_ELSE() {
3698 IEM_MC_ADVANCE_RIP_AND_FINISH();
3699 } IEM_MC_ENDIF();
3700 IEM_MC_END();
3701}
3702
3703
3704/**
3705 * @opcode 0x79
3706 */
3707FNIEMOP_DEF(iemOp_jns_Jb)
3708{
3709 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3710 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3711 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3712
3713 IEM_MC_BEGIN(0, 0, 0, 0);
3714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3715 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3716 IEM_MC_ADVANCE_RIP_AND_FINISH();
3717 } IEM_MC_ELSE() {
3718 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3719 } IEM_MC_ENDIF();
3720 IEM_MC_END();
3721}
3722
3723
3724/**
3725 * @opcode 0x7a
3726 */
3727FNIEMOP_DEF(iemOp_jp_Jb)
3728{
3729 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3730 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3731 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3732
3733 IEM_MC_BEGIN(0, 0, 0, 0);
3734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3736 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3737 } IEM_MC_ELSE() {
3738 IEM_MC_ADVANCE_RIP_AND_FINISH();
3739 } IEM_MC_ENDIF();
3740 IEM_MC_END();
3741}
3742
3743
3744/**
3745 * @opcode 0x7b
3746 */
3747FNIEMOP_DEF(iemOp_jnp_Jb)
3748{
3749 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3750 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3751 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3752
3753 IEM_MC_BEGIN(0, 0, 0, 0);
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3756 IEM_MC_ADVANCE_RIP_AND_FINISH();
3757 } IEM_MC_ELSE() {
3758 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3759 } IEM_MC_ENDIF();
3760 IEM_MC_END();
3761}
3762
3763
3764/**
3765 * @opcode 0x7c
3766 */
3767FNIEMOP_DEF(iemOp_jl_Jb)
3768{
3769 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3770 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3772
3773 IEM_MC_BEGIN(0, 0, 0, 0);
3774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3775 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3776 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3777 } IEM_MC_ELSE() {
3778 IEM_MC_ADVANCE_RIP_AND_FINISH();
3779 } IEM_MC_ENDIF();
3780 IEM_MC_END();
3781}
3782
3783
3784/**
3785 * @opcode 0x7d
3786 */
3787FNIEMOP_DEF(iemOp_jnl_Jb)
3788{
3789 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3790 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3791 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3792
3793 IEM_MC_BEGIN(0, 0, 0, 0);
3794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3795 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3796 IEM_MC_ADVANCE_RIP_AND_FINISH();
3797 } IEM_MC_ELSE() {
3798 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3799 } IEM_MC_ENDIF();
3800 IEM_MC_END();
3801}
3802
3803
3804/**
3805 * @opcode 0x7e
3806 */
3807FNIEMOP_DEF(iemOp_jle_Jb)
3808{
3809 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3810 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3811 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3812
3813 IEM_MC_BEGIN(0, 0, 0, 0);
3814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3815 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3816 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3817 } IEM_MC_ELSE() {
3818 IEM_MC_ADVANCE_RIP_AND_FINISH();
3819 } IEM_MC_ENDIF();
3820 IEM_MC_END();
3821}
3822
3823
3824/**
3825 * @opcode 0x7f
3826 */
3827FNIEMOP_DEF(iemOp_jnle_Jb)
3828{
3829 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3830 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3831 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3832
3833 IEM_MC_BEGIN(0, 0, 0, 0);
3834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3835 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3836 IEM_MC_ADVANCE_RIP_AND_FINISH();
3837 } IEM_MC_ELSE() {
3838 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3839 } IEM_MC_ENDIF();
3840 IEM_MC_END();
3841}
3842
3843
3844/**
3845 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3846 * iemOp_Grp1_Eb_Ib_80.
3847 */
3848#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3849 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3850 { \
3851 /* register target */ \
3852 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3853 IEM_MC_BEGIN(3, 0, 0, 0); \
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3855 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3856 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3857 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3858 \
3859 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3860 IEM_MC_REF_EFLAGS(pEFlags); \
3861 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3862 \
3863 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3864 IEM_MC_END(); \
3865 } \
3866 else \
3867 { \
3868 /* memory target */ \
3869 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3870 { \
3871 IEM_MC_BEGIN(3, 3, 0, 0); \
3872 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3873 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3875 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3876 \
3877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3878 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3879 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3880 IEMOP_HLP_DONE_DECODING(); \
3881 \
3882 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3883 IEM_MC_FETCH_EFLAGS(EFlags); \
3884 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3885 \
3886 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3887 IEM_MC_COMMIT_EFLAGS(EFlags); \
3888 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3889 IEM_MC_END(); \
3890 } \
3891 else \
3892 { \
3893 (void)0
3894
3895#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3896 IEM_MC_BEGIN(3, 3, 0, 0); \
3897 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3898 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3900 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3901 \
3902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3903 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3904 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3905 IEMOP_HLP_DONE_DECODING(); \
3906 \
3907 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3908 IEM_MC_FETCH_EFLAGS(EFlags); \
3909 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3910 \
3911 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3912 IEM_MC_COMMIT_EFLAGS(EFlags); \
3913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3914 IEM_MC_END(); \
3915 } \
3916 } \
3917 (void)0
3918
3919#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3920 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3921 { \
3922 /* register target */ \
3923 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3924 IEM_MC_BEGIN(3, 0, 0, 0); \
3925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3926 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3927 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3928 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3929 \
3930 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3931 IEM_MC_REF_EFLAGS(pEFlags); \
3932 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3933 \
3934 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3935 IEM_MC_END(); \
3936 } \
3937 else \
3938 { \
3939 /* memory target */ \
3940 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3941 { \
3942 IEM_MC_BEGIN(3, 3, 0, 0); \
3943 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3944 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3946 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3947 \
3948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3949 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3950 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3951 IEMOP_HLP_DONE_DECODING(); \
3952 \
3953 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3954 IEM_MC_FETCH_EFLAGS(EFlags); \
3955 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3956 \
3957 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3958 IEM_MC_COMMIT_EFLAGS(EFlags); \
3959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3960 IEM_MC_END(); \
3961 } \
3962 else \
3963 { \
3964 (void)0
3965
3966#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3967 IEMOP_HLP_DONE_DECODING(); \
3968 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3969 } \
3970 } \
3971 (void)0
3972
3973
3974
3975/**
3976 * @opmaps grp1_80,grp1_83
3977 * @opcode /0
3978 */
3979FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3980{
3981 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3982 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3983 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3984}
3985
3986
3987/**
3988 * @opmaps grp1_80,grp1_83
3989 * @opcode /1
3990 */
3991FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3992{
3993 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3994 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
3995 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3996}
3997
3998
3999/**
4000 * @opmaps grp1_80,grp1_83
4001 * @opcode /2
4002 */
4003FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4004{
4005 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4006 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4007 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4008}
4009
4010
4011/**
4012 * @opmaps grp1_80,grp1_83
4013 * @opcode /3
4014 */
4015FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4016{
4017 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4018 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4019 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4020}
4021
4022
4023/**
4024 * @opmaps grp1_80,grp1_83
4025 * @opcode /4
4026 */
4027FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4028{
4029 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4030 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4031 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4032}
4033
4034
4035/**
4036 * @opmaps grp1_80,grp1_83
4037 * @opcode /5
4038 */
4039FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4040{
4041 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4042 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4043 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4044}
4045
4046
4047/**
4048 * @opmaps grp1_80,grp1_83
4049 * @opcode /6
4050 */
4051FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4052{
4053 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4054 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4055 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4056}
4057
4058
4059/**
4060 * @opmaps grp1_80,grp1_83
4061 * @opcode /7
4062 */
4063FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4064{
4065 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4066 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4067 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4068}
4069
4070
4071/**
4072 * @opcode 0x80
4073 */
4074FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4075{
4076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4077 switch (IEM_GET_MODRM_REG_8(bRm))
4078 {
4079 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4080 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4081 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4082 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4083 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4084 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4085 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4086 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4088 }
4089}
4090
4091
4092/**
4093 * Body for a group 1 binary operator.
4094 */
4095#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4096 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4097 { \
4098 /* register target */ \
4099 switch (pVCpu->iem.s.enmEffOpSize) \
4100 { \
4101 case IEMMODE_16BIT: \
4102 { \
4103 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4104 IEM_MC_BEGIN(3, 0, 0, 0); \
4105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4106 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4107 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4108 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4109 \
4110 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4111 IEM_MC_REF_EFLAGS(pEFlags); \
4112 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4113 \
4114 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4115 IEM_MC_END(); \
4116 break; \
4117 } \
4118 \
4119 case IEMMODE_32BIT: \
4120 { \
4121 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4122 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4124 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4125 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4126 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4127 \
4128 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4129 IEM_MC_REF_EFLAGS(pEFlags); \
4130 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4131 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4132 \
4133 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4134 IEM_MC_END(); \
4135 break; \
4136 } \
4137 \
4138 case IEMMODE_64BIT: \
4139 { \
4140 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4141 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4143 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4144 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4145 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4146 \
4147 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4148 IEM_MC_REF_EFLAGS(pEFlags); \
4149 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4150 \
4151 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4152 IEM_MC_END(); \
4153 break; \
4154 } \
4155 \
4156 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4157 } \
4158 } \
4159 else \
4160 { \
4161 /* memory target */ \
4162 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4163 { \
4164 switch (pVCpu->iem.s.enmEffOpSize) \
4165 { \
4166 case IEMMODE_16BIT: \
4167 { \
4168 IEM_MC_BEGIN(3, 3, 0, 0); \
4169 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4170 IEM_MC_ARG(uint16_t, u16Src, 1); \
4171 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4173 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4174 \
4175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4176 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4177 IEM_MC_ASSIGN(u16Src, u16Imm); \
4178 IEMOP_HLP_DONE_DECODING(); \
4179 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4180 IEM_MC_FETCH_EFLAGS(EFlags); \
4181 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4182 \
4183 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4184 IEM_MC_COMMIT_EFLAGS(EFlags); \
4185 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4186 IEM_MC_END(); \
4187 break; \
4188 } \
4189 \
4190 case IEMMODE_32BIT: \
4191 { \
4192 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4193 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4194 IEM_MC_ARG(uint32_t, u32Src, 1); \
4195 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4197 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4198 \
4199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4200 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4201 IEM_MC_ASSIGN(u32Src, u32Imm); \
4202 IEMOP_HLP_DONE_DECODING(); \
4203 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4204 IEM_MC_FETCH_EFLAGS(EFlags); \
4205 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4206 \
4207 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4208 IEM_MC_COMMIT_EFLAGS(EFlags); \
4209 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4210 IEM_MC_END(); \
4211 break; \
4212 } \
4213 \
4214 case IEMMODE_64BIT: \
4215 { \
4216 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4217 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4218 IEM_MC_ARG(uint64_t, u64Src, 1); \
4219 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4221 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4222 \
4223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4224 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4225 IEMOP_HLP_DONE_DECODING(); \
4226 IEM_MC_ASSIGN(u64Src, u64Imm); \
4227 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4228 IEM_MC_FETCH_EFLAGS(EFlags); \
4229 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4230 \
4231 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4232 IEM_MC_COMMIT_EFLAGS(EFlags); \
4233 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4234 IEM_MC_END(); \
4235 break; \
4236 } \
4237 \
4238 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4239 } \
4240 } \
4241 else \
4242 { \
4243 (void)0
4244/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4245#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4246 switch (pVCpu->iem.s.enmEffOpSize) \
4247 { \
4248 case IEMMODE_16BIT: \
4249 { \
4250 IEM_MC_BEGIN(3, 3, 0, 0); \
4251 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4252 IEM_MC_ARG(uint16_t, u16Src, 1); \
4253 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4255 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4256 \
4257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4258 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4259 IEM_MC_ASSIGN(u16Src, u16Imm); \
4260 IEMOP_HLP_DONE_DECODING(); \
4261 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4262 IEM_MC_FETCH_EFLAGS(EFlags); \
4263 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4264 \
4265 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4266 IEM_MC_COMMIT_EFLAGS(EFlags); \
4267 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4268 IEM_MC_END(); \
4269 break; \
4270 } \
4271 \
4272 case IEMMODE_32BIT: \
4273 { \
4274 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4275 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4276 IEM_MC_ARG(uint32_t, u32Src, 1); \
4277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4279 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4280 \
4281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4282 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4283 IEM_MC_ASSIGN(u32Src, u32Imm); \
4284 IEMOP_HLP_DONE_DECODING(); \
4285 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4286 IEM_MC_FETCH_EFLAGS(EFlags); \
4287 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4288 \
4289 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4290 IEM_MC_COMMIT_EFLAGS(EFlags); \
4291 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4292 IEM_MC_END(); \
4293 break; \
4294 } \
4295 \
4296 case IEMMODE_64BIT: \
4297 { \
4298 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4300 IEM_MC_ARG(uint64_t, u64Src, 1); \
4301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4303 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4304 \
4305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4306 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4307 IEMOP_HLP_DONE_DECODING(); \
4308 IEM_MC_ASSIGN(u64Src, u64Imm); \
4309 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4310 IEM_MC_FETCH_EFLAGS(EFlags); \
4311 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4312 \
4313 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4314 IEM_MC_COMMIT_EFLAGS(EFlags); \
4315 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4316 IEM_MC_END(); \
4317 break; \
4318 } \
4319 \
4320 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4321 } \
4322 } \
4323 } \
4324 (void)0
4325
4326/* read-only version */
4327#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4328 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4329 { \
4330 /* register target */ \
4331 switch (pVCpu->iem.s.enmEffOpSize) \
4332 { \
4333 case IEMMODE_16BIT: \
4334 { \
4335 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4336 IEM_MC_BEGIN(3, 0, 0, 0); \
4337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4339 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4340 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4341 \
4342 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4343 IEM_MC_REF_EFLAGS(pEFlags); \
4344 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4345 \
4346 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4347 IEM_MC_END(); \
4348 break; \
4349 } \
4350 \
4351 case IEMMODE_32BIT: \
4352 { \
4353 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4354 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4356 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4357 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4358 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4359 \
4360 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4361 IEM_MC_REF_EFLAGS(pEFlags); \
4362 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4363 \
4364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4365 IEM_MC_END(); \
4366 break; \
4367 } \
4368 \
4369 case IEMMODE_64BIT: \
4370 { \
4371 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4372 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4374 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4375 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4376 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4377 \
4378 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4379 IEM_MC_REF_EFLAGS(pEFlags); \
4380 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4381 \
4382 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4383 IEM_MC_END(); \
4384 break; \
4385 } \
4386 \
4387 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4388 } \
4389 } \
4390 else \
4391 { \
4392 /* memory target */ \
4393 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4394 { \
4395 switch (pVCpu->iem.s.enmEffOpSize) \
4396 { \
4397 case IEMMODE_16BIT: \
4398 { \
4399 IEM_MC_BEGIN(3, 3, 0, 0); \
4400 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4401 IEM_MC_ARG(uint16_t, u16Src, 1); \
4402 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4404 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4405 \
4406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4407 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4408 IEM_MC_ASSIGN(u16Src, u16Imm); \
4409 IEMOP_HLP_DONE_DECODING(); \
4410 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4411 IEM_MC_FETCH_EFLAGS(EFlags); \
4412 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4413 \
4414 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4415 IEM_MC_COMMIT_EFLAGS(EFlags); \
4416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4417 IEM_MC_END(); \
4418 break; \
4419 } \
4420 \
4421 case IEMMODE_32BIT: \
4422 { \
4423 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4424 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4425 IEM_MC_ARG(uint32_t, u32Src, 1); \
4426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4428 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4429 \
4430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4431 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4432 IEM_MC_ASSIGN(u32Src, u32Imm); \
4433 IEMOP_HLP_DONE_DECODING(); \
4434 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4435 IEM_MC_FETCH_EFLAGS(EFlags); \
4436 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4437 \
4438 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4439 IEM_MC_COMMIT_EFLAGS(EFlags); \
4440 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4441 IEM_MC_END(); \
4442 break; \
4443 } \
4444 \
4445 case IEMMODE_64BIT: \
4446 { \
4447 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4448 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4449 IEM_MC_ARG(uint64_t, u64Src, 1); \
4450 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4452 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4453 \
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4455 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4456 IEMOP_HLP_DONE_DECODING(); \
4457 IEM_MC_ASSIGN(u64Src, u64Imm); \
4458 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4459 IEM_MC_FETCH_EFLAGS(EFlags); \
4460 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4461 \
4462 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4463 IEM_MC_COMMIT_EFLAGS(EFlags); \
4464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4465 IEM_MC_END(); \
4466 break; \
4467 } \
4468 \
4469 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4470 } \
4471 } \
4472 else \
4473 { \
4474 IEMOP_HLP_DONE_DECODING(); \
4475 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4476 } \
4477 } \
4478 (void)0
4479
4480
4481/**
4482 * @opmaps grp1_81
4483 * @opcode /0
4484 */
4485FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4486{
4487 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4488 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4489 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4490}
4491
4492
4493/**
4494 * @opmaps grp1_81
4495 * @opcode /1
4496 */
4497FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4498{
4499 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4500 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4501 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4502}
4503
4504
4505/**
4506 * @opmaps grp1_81
4507 * @opcode /2
4508 */
4509FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4510{
4511 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4512 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4513 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4514}
4515
4516
4517/**
4518 * @opmaps grp1_81
4519 * @opcode /3
4520 */
4521FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4522{
4523 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4524 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4525 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4526}
4527
4528
4529/**
4530 * @opmaps grp1_81
4531 * @opcode /4
4532 */
4533FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4534{
4535 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4536 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4537 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4538}
4539
4540
4541/**
4542 * @opmaps grp1_81
4543 * @opcode /5
4544 */
4545FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4546{
4547 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4548 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4549 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4550}
4551
4552
4553/**
4554 * @opmaps grp1_81
4555 * @opcode /6
4556 */
4557FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4558{
4559 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4560 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4561 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4562}
4563
4564
4565/**
4566 * @opmaps grp1_81
4567 * @opcode /7
4568 */
4569FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4570{
4571 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4572 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4573}
4574
4575
4576/**
4577 * @opcode 0x81
4578 */
4579FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4580{
4581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4582 switch (IEM_GET_MODRM_REG_8(bRm))
4583 {
4584 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4585 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4586 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4587 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4588 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4589 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4590 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4591 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4593 }
4594}
4595
4596
4597/**
4598 * @opcode 0x82
4599 * @opmnemonic grp1_82
4600 * @opgroup og_groups
4601 */
4602FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4603{
4604 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4605 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4606}
4607
4608
4609/**
4610 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4611 * iemOp_Grp1_Ev_Ib.
4612 */
4613#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4614 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4615 { \
4616 /* \
4617 * Register target \
4618 */ \
4619 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4620 switch (pVCpu->iem.s.enmEffOpSize) \
4621 { \
4622 case IEMMODE_16BIT: \
4623 IEM_MC_BEGIN(3, 0, 0, 0); \
4624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4625 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4626 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4627 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4628 \
4629 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4630 IEM_MC_REF_EFLAGS(pEFlags); \
4631 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4632 \
4633 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4634 IEM_MC_END(); \
4635 break; \
4636 \
4637 case IEMMODE_32BIT: \
4638 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4640 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4641 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4642 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4643 \
4644 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4645 IEM_MC_REF_EFLAGS(pEFlags); \
4646 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4647 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4648 \
4649 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4650 IEM_MC_END(); \
4651 break; \
4652 \
4653 case IEMMODE_64BIT: \
4654 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4656 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4657 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4658 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4659 \
4660 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4661 IEM_MC_REF_EFLAGS(pEFlags); \
4662 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4663 \
4664 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4665 IEM_MC_END(); \
4666 break; \
4667 \
4668 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4669 } \
4670 } \
4671 else \
4672 { \
4673 /* \
4674 * Memory target. \
4675 */ \
4676 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4677 { \
4678 switch (pVCpu->iem.s.enmEffOpSize) \
4679 { \
4680 case IEMMODE_16BIT: \
4681 IEM_MC_BEGIN(3, 3, 0, 0); \
4682 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4683 IEM_MC_ARG(uint16_t, u16Src, 1); \
4684 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4686 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4687 \
4688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4689 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4690 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4691 IEMOP_HLP_DONE_DECODING(); \
4692 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4693 IEM_MC_FETCH_EFLAGS(EFlags); \
4694 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4695 \
4696 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4697 IEM_MC_COMMIT_EFLAGS(EFlags); \
4698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4699 IEM_MC_END(); \
4700 break; \
4701 \
4702 case IEMMODE_32BIT: \
4703 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4704 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4705 IEM_MC_ARG(uint32_t, u32Src, 1); \
4706 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4708 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4709 \
4710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4711 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4712 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4713 IEMOP_HLP_DONE_DECODING(); \
4714 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4715 IEM_MC_FETCH_EFLAGS(EFlags); \
4716 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4717 \
4718 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4719 IEM_MC_COMMIT_EFLAGS(EFlags); \
4720 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4721 IEM_MC_END(); \
4722 break; \
4723 \
4724 case IEMMODE_64BIT: \
4725 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4726 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4727 IEM_MC_ARG(uint64_t, u64Src, 1); \
4728 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4730 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4731 \
4732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4733 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4734 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4735 IEMOP_HLP_DONE_DECODING(); \
4736 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4737 IEM_MC_FETCH_EFLAGS(EFlags); \
4738 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4739 \
4740 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4741 IEM_MC_COMMIT_EFLAGS(EFlags); \
4742 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4743 IEM_MC_END(); \
4744 break; \
4745 \
4746 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4747 } \
4748 } \
4749 else \
4750 { \
4751 (void)0
4752/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4753#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4754 switch (pVCpu->iem.s.enmEffOpSize) \
4755 { \
4756 case IEMMODE_16BIT: \
4757 IEM_MC_BEGIN(3, 3, 0, 0); \
4758 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4759 IEM_MC_ARG(uint16_t, u16Src, 1); \
4760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4762 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4763 \
4764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4765 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4766 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4767 IEMOP_HLP_DONE_DECODING(); \
4768 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4769 IEM_MC_FETCH_EFLAGS(EFlags); \
4770 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4771 \
4772 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4773 IEM_MC_COMMIT_EFLAGS(EFlags); \
4774 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4775 IEM_MC_END(); \
4776 break; \
4777 \
4778 case IEMMODE_32BIT: \
4779 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4780 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4781 IEM_MC_ARG(uint32_t, u32Src, 1); \
4782 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4784 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4785 \
4786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4787 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4788 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4789 IEMOP_HLP_DONE_DECODING(); \
4790 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4791 IEM_MC_FETCH_EFLAGS(EFlags); \
4792 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4793 \
4794 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4795 IEM_MC_COMMIT_EFLAGS(EFlags); \
4796 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4797 IEM_MC_END(); \
4798 break; \
4799 \
4800 case IEMMODE_64BIT: \
4801 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4802 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4803 IEM_MC_ARG(uint64_t, u64Src, 1); \
4804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4806 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4807 \
4808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4809 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4810 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4811 IEMOP_HLP_DONE_DECODING(); \
4812 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4813 IEM_MC_FETCH_EFLAGS(EFlags); \
4814 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4815 \
4816 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4817 IEM_MC_COMMIT_EFLAGS(EFlags); \
4818 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4819 IEM_MC_END(); \
4820 break; \
4821 \
4822 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4823 } \
4824 } \
4825 } \
4826 (void)0
4827
4828/* read-only variant */
4829#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4830 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4831 { \
4832 /* \
4833 * Register target \
4834 */ \
4835 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4836 switch (pVCpu->iem.s.enmEffOpSize) \
4837 { \
4838 case IEMMODE_16BIT: \
4839 IEM_MC_BEGIN(3, 0, 0, 0); \
4840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4841 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4842 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4843 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4844 \
4845 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4846 IEM_MC_REF_EFLAGS(pEFlags); \
4847 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4848 \
4849 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4850 IEM_MC_END(); \
4851 break; \
4852 \
4853 case IEMMODE_32BIT: \
4854 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4856 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4857 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4858 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4859 \
4860 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4861 IEM_MC_REF_EFLAGS(pEFlags); \
4862 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4863 \
4864 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4865 IEM_MC_END(); \
4866 break; \
4867 \
4868 case IEMMODE_64BIT: \
4869 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4871 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4872 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4873 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4874 \
4875 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4876 IEM_MC_REF_EFLAGS(pEFlags); \
4877 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4878 \
4879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4880 IEM_MC_END(); \
4881 break; \
4882 \
4883 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4884 } \
4885 } \
4886 else \
4887 { \
4888 /* \
4889 * Memory target. \
4890 */ \
4891 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4892 { \
4893 switch (pVCpu->iem.s.enmEffOpSize) \
4894 { \
4895 case IEMMODE_16BIT: \
4896 IEM_MC_BEGIN(3, 3, 0, 0); \
4897 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4898 IEM_MC_ARG(uint16_t, u16Src, 1); \
4899 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4901 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4902 \
4903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4904 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4905 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4906 IEMOP_HLP_DONE_DECODING(); \
4907 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4908 IEM_MC_FETCH_EFLAGS(EFlags); \
4909 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4910 \
4911 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4912 IEM_MC_COMMIT_EFLAGS(EFlags); \
4913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4914 IEM_MC_END(); \
4915 break; \
4916 \
4917 case IEMMODE_32BIT: \
4918 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4919 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4920 IEM_MC_ARG(uint32_t, u32Src, 1); \
4921 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4923 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4924 \
4925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4926 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4927 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4928 IEMOP_HLP_DONE_DECODING(); \
4929 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4930 IEM_MC_FETCH_EFLAGS(EFlags); \
4931 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4932 \
4933 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4934 IEM_MC_COMMIT_EFLAGS(EFlags); \
4935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4936 IEM_MC_END(); \
4937 break; \
4938 \
4939 case IEMMODE_64BIT: \
4940 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4941 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4942 IEM_MC_ARG(uint64_t, u64Src, 1); \
4943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4945 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4946 \
4947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4948 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4949 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4950 IEMOP_HLP_DONE_DECODING(); \
4951 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4952 IEM_MC_FETCH_EFLAGS(EFlags); \
4953 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4954 \
4955 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4956 IEM_MC_COMMIT_EFLAGS(EFlags); \
4957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4958 IEM_MC_END(); \
4959 break; \
4960 \
4961 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4962 } \
4963 } \
4964 else \
4965 { \
4966 IEMOP_HLP_DONE_DECODING(); \
4967 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4968 } \
4969 } \
4970 (void)0
4971
4972/**
4973 * @opmaps grp1_83
4974 * @opcode /0
4975 */
4976FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4977{
4978 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4979 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4980 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4981}
4982
4983
4984/**
4985 * @opmaps grp1_83
4986 * @opcode /1
4987 */
4988FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4989{
4990 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4991 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4992 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4993}
4994
4995
4996/**
4997 * @opmaps grp1_83
4998 * @opcode /2
4999 */
5000FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5001{
5002 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5003 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5004 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5005}
5006
5007
5008/**
5009 * @opmaps grp1_83
5010 * @opcode /3
5011 */
5012FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5013{
5014 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5015 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5016 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5017}
5018
5019
5020/**
5021 * @opmaps grp1_83
5022 * @opcode /4
5023 */
5024FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5025{
5026 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5027 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5028 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5029}
5030
5031
5032/**
5033 * @opmaps grp1_83
5034 * @opcode /5
5035 */
5036FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5037{
5038 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5039 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5040 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5041}
5042
5043
5044/**
5045 * @opmaps grp1_83
5046 * @opcode /6
5047 */
5048FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5049{
5050 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5051 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5052 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5053}
5054
5055
5056/**
5057 * @opmaps grp1_83
5058 * @opcode /7
5059 */
5060FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5061{
5062 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5063 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5064}
5065
5066
5067/**
5068 * @opcode 0x83
5069 */
5070FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5071{
5072 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5073 to the 386 even if absent in the intel reference manuals and some
5074 3rd party opcode listings. */
5075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5076 switch (IEM_GET_MODRM_REG_8(bRm))
5077 {
5078 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5079 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5080 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5081 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5082 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5083 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5084 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5085 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5087 }
5088}
5089
5090
5091/**
5092 * @opcode 0x84
5093 */
5094FNIEMOP_DEF(iemOp_test_Eb_Gb)
5095{
5096 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5097 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5098 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5099 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5100}
5101
5102
5103/**
5104 * @opcode 0x85
5105 */
5106FNIEMOP_DEF(iemOp_test_Ev_Gv)
5107{
5108 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5110 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5111}
5112
5113
5114/**
5115 * @opcode 0x86
5116 */
5117FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5118{
5119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5120 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5121
5122 /*
5123 * If rm is denoting a register, no more instruction bytes.
5124 */
5125 if (IEM_IS_MODRM_REG_MODE(bRm))
5126 {
5127 IEM_MC_BEGIN(0, 2, 0, 0);
5128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5129 IEM_MC_LOCAL(uint8_t, uTmp1);
5130 IEM_MC_LOCAL(uint8_t, uTmp2);
5131
5132 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5133 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5134 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5135 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5136
5137 IEM_MC_ADVANCE_RIP_AND_FINISH();
5138 IEM_MC_END();
5139 }
5140 else
5141 {
5142 /*
5143 * We're accessing memory.
5144 */
5145 IEM_MC_BEGIN(2, 4, 0, 0);
5146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5147 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5148 IEM_MC_LOCAL(uint8_t, uTmpReg);
5149 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5150 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5151
5152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5154 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5155 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5156 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5157 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5158 else
5159 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5160 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5161 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5162
5163 IEM_MC_ADVANCE_RIP_AND_FINISH();
5164 IEM_MC_END();
5165 }
5166}
5167
5168
5169/**
5170 * @opcode 0x87
5171 */
5172FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5173{
5174 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5176
5177 /*
5178 * If rm is denoting a register, no more instruction bytes.
5179 */
5180 if (IEM_IS_MODRM_REG_MODE(bRm))
5181 {
5182 switch (pVCpu->iem.s.enmEffOpSize)
5183 {
5184 case IEMMODE_16BIT:
5185 IEM_MC_BEGIN(0, 2, 0, 0);
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187 IEM_MC_LOCAL(uint16_t, uTmp1);
5188 IEM_MC_LOCAL(uint16_t, uTmp2);
5189
5190 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5191 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5192 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5193 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5194
5195 IEM_MC_ADVANCE_RIP_AND_FINISH();
5196 IEM_MC_END();
5197 break;
5198
5199 case IEMMODE_32BIT:
5200 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5202 IEM_MC_LOCAL(uint32_t, uTmp1);
5203 IEM_MC_LOCAL(uint32_t, uTmp2);
5204
5205 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5206 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5207 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5208 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5209
5210 IEM_MC_ADVANCE_RIP_AND_FINISH();
5211 IEM_MC_END();
5212 break;
5213
5214 case IEMMODE_64BIT:
5215 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5217 IEM_MC_LOCAL(uint64_t, uTmp1);
5218 IEM_MC_LOCAL(uint64_t, uTmp2);
5219
5220 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5221 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5222 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5223 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5224
5225 IEM_MC_ADVANCE_RIP_AND_FINISH();
5226 IEM_MC_END();
5227 break;
5228
5229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5230 }
5231 }
5232 else
5233 {
5234 /*
5235 * We're accessing memory.
5236 */
5237 switch (pVCpu->iem.s.enmEffOpSize)
5238 {
5239 case IEMMODE_16BIT:
5240 IEM_MC_BEGIN(2, 4, 0, 0);
5241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5242 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5243 IEM_MC_LOCAL(uint16_t, uTmpReg);
5244 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5245 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5246
5247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5249 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5250 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5251 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5252 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5253 else
5254 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5255 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5256 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5257
5258 IEM_MC_ADVANCE_RIP_AND_FINISH();
5259 IEM_MC_END();
5260 break;
5261
5262 case IEMMODE_32BIT:
5263 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0);
5264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5265 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5266 IEM_MC_LOCAL(uint32_t, uTmpReg);
5267 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5268 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5269
5270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5272 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5273 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5274 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5275 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5276 else
5277 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5278 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5279 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5280
5281 IEM_MC_ADVANCE_RIP_AND_FINISH();
5282 IEM_MC_END();
5283 break;
5284
5285 case IEMMODE_64BIT:
5286 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0);
5287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5288 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5289 IEM_MC_LOCAL(uint64_t, uTmpReg);
5290 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5291 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5292
5293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5295 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5296 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5297 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5298 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5299 else
5300 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5301 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5302 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5303
5304 IEM_MC_ADVANCE_RIP_AND_FINISH();
5305 IEM_MC_END();
5306 break;
5307
5308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5309 }
5310 }
5311}
5312
5313
5314/**
5315 * @opcode 0x88
5316 */
5317FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5318{
5319 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5320
5321 uint8_t bRm;
5322 IEM_OPCODE_GET_NEXT_U8(&bRm);
5323
5324 /*
5325 * If rm is denoting a register, no more instruction bytes.
5326 */
5327 if (IEM_IS_MODRM_REG_MODE(bRm))
5328 {
5329 IEM_MC_BEGIN(0, 1, 0, 0);
5330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5331 IEM_MC_LOCAL(uint8_t, u8Value);
5332 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5333 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5334 IEM_MC_ADVANCE_RIP_AND_FINISH();
5335 IEM_MC_END();
5336 }
5337 else
5338 {
5339 /*
5340 * We're writing a register to memory.
5341 */
5342 IEM_MC_BEGIN(0, 2, 0, 0);
5343 IEM_MC_LOCAL(uint8_t, u8Value);
5344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5347 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5348 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5349 IEM_MC_ADVANCE_RIP_AND_FINISH();
5350 IEM_MC_END();
5351 }
5352}
5353
5354
5355/**
5356 * @opcode 0x89
5357 */
5358FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5359{
5360 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5361
5362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5363
5364 /*
5365 * If rm is denoting a register, no more instruction bytes.
5366 */
5367 if (IEM_IS_MODRM_REG_MODE(bRm))
5368 {
5369 switch (pVCpu->iem.s.enmEffOpSize)
5370 {
5371 case IEMMODE_16BIT:
5372 IEM_MC_BEGIN(0, 1, 0, 0);
5373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5374 IEM_MC_LOCAL(uint16_t, u16Value);
5375 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5376 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5377 IEM_MC_ADVANCE_RIP_AND_FINISH();
5378 IEM_MC_END();
5379 break;
5380
5381 case IEMMODE_32BIT:
5382 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5384 IEM_MC_LOCAL(uint32_t, u32Value);
5385 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5386 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5387 IEM_MC_ADVANCE_RIP_AND_FINISH();
5388 IEM_MC_END();
5389 break;
5390
5391 case IEMMODE_64BIT:
5392 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5394 IEM_MC_LOCAL(uint64_t, u64Value);
5395 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5396 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5397 IEM_MC_ADVANCE_RIP_AND_FINISH();
5398 IEM_MC_END();
5399 break;
5400
5401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5402 }
5403 }
5404 else
5405 {
5406 /*
5407 * We're writing a register to memory.
5408 */
5409 switch (pVCpu->iem.s.enmEffOpSize)
5410 {
5411 case IEMMODE_16BIT:
5412 IEM_MC_BEGIN(0, 2, 0, 0);
5413 IEM_MC_LOCAL(uint16_t, u16Value);
5414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5417 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5418 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5419 IEM_MC_ADVANCE_RIP_AND_FINISH();
5420 IEM_MC_END();
5421 break;
5422
5423 case IEMMODE_32BIT:
5424 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5425 IEM_MC_LOCAL(uint32_t, u32Value);
5426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5429 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5430 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5431 IEM_MC_ADVANCE_RIP_AND_FINISH();
5432 IEM_MC_END();
5433 break;
5434
5435 case IEMMODE_64BIT:
5436 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5437 IEM_MC_LOCAL(uint64_t, u64Value);
5438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5441 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5442 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5443 IEM_MC_ADVANCE_RIP_AND_FINISH();
5444 IEM_MC_END();
5445 break;
5446
5447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5448 }
5449 }
5450}
5451
5452
5453/**
5454 * @opcode 0x8a
5455 */
5456FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5457{
5458 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5459
5460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5461
5462 /*
5463 * If rm is denoting a register, no more instruction bytes.
5464 */
5465 if (IEM_IS_MODRM_REG_MODE(bRm))
5466 {
5467 IEM_MC_BEGIN(0, 1, 0, 0);
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469 IEM_MC_LOCAL(uint8_t, u8Value);
5470 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5471 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5472 IEM_MC_ADVANCE_RIP_AND_FINISH();
5473 IEM_MC_END();
5474 }
5475 else
5476 {
5477 /*
5478 * We're loading a register from memory.
5479 */
5480 IEM_MC_BEGIN(0, 2, 0, 0);
5481 IEM_MC_LOCAL(uint8_t, u8Value);
5482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5485 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5486 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5487 IEM_MC_ADVANCE_RIP_AND_FINISH();
5488 IEM_MC_END();
5489 }
5490}
5491
5492
5493/**
5494 * @opcode 0x8b
5495 */
5496FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5497{
5498 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5499
5500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5501
5502 /*
5503 * If rm is denoting a register, no more instruction bytes.
5504 */
5505 if (IEM_IS_MODRM_REG_MODE(bRm))
5506 {
5507 switch (pVCpu->iem.s.enmEffOpSize)
5508 {
5509 case IEMMODE_16BIT:
5510 IEM_MC_BEGIN(0, 1, 0, 0);
5511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5512 IEM_MC_LOCAL(uint16_t, u16Value);
5513 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5514 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5515 IEM_MC_ADVANCE_RIP_AND_FINISH();
5516 IEM_MC_END();
5517 break;
5518
5519 case IEMMODE_32BIT:
5520 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5522 IEM_MC_LOCAL(uint32_t, u32Value);
5523 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5524 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5525 IEM_MC_ADVANCE_RIP_AND_FINISH();
5526 IEM_MC_END();
5527 break;
5528
5529 case IEMMODE_64BIT:
5530 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5532 IEM_MC_LOCAL(uint64_t, u64Value);
5533 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5534 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5535 IEM_MC_ADVANCE_RIP_AND_FINISH();
5536 IEM_MC_END();
5537 break;
5538
5539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5540 }
5541 }
5542 else
5543 {
5544 /*
5545 * We're loading a register from memory.
5546 */
5547 switch (pVCpu->iem.s.enmEffOpSize)
5548 {
5549 case IEMMODE_16BIT:
5550 IEM_MC_BEGIN(0, 2, 0, 0);
5551 IEM_MC_LOCAL(uint16_t, u16Value);
5552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5556 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5557 IEM_MC_ADVANCE_RIP_AND_FINISH();
5558 IEM_MC_END();
5559 break;
5560
5561 case IEMMODE_32BIT:
5562 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5563 IEM_MC_LOCAL(uint32_t, u32Value);
5564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5567 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5568 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5569 IEM_MC_ADVANCE_RIP_AND_FINISH();
5570 IEM_MC_END();
5571 break;
5572
5573 case IEMMODE_64BIT:
5574 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5575 IEM_MC_LOCAL(uint64_t, u64Value);
5576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5579 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5580 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5581 IEM_MC_ADVANCE_RIP_AND_FINISH();
5582 IEM_MC_END();
5583 break;
5584
5585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5586 }
5587 }
5588}
5589
5590
5591/**
5592 * opcode 0x63
5593 * @todo Table fixme
5594 */
5595FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5596{
5597 if (!IEM_IS_64BIT_CODE(pVCpu))
5598 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5599 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5600 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5601 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5602}
5603
5604
5605/**
5606 * @opcode 0x8c
5607 */
5608FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5609{
5610 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5611
5612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5613
5614 /*
5615 * Check that the destination register exists. The REX.R prefix is ignored.
5616 */
5617 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5618 if (iSegReg > X86_SREG_GS)
5619 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5620
5621 /*
5622 * If rm is denoting a register, no more instruction bytes.
5623 * In that case, the operand size is respected and the upper bits are
5624 * cleared (starting with some pentium).
5625 */
5626 if (IEM_IS_MODRM_REG_MODE(bRm))
5627 {
5628 switch (pVCpu->iem.s.enmEffOpSize)
5629 {
5630 case IEMMODE_16BIT:
5631 IEM_MC_BEGIN(0, 1, 0, 0);
5632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5633 IEM_MC_LOCAL(uint16_t, u16Value);
5634 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5635 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5636 IEM_MC_ADVANCE_RIP_AND_FINISH();
5637 IEM_MC_END();
5638 break;
5639
5640 case IEMMODE_32BIT:
5641 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5643 IEM_MC_LOCAL(uint32_t, u32Value);
5644 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5645 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5646 IEM_MC_ADVANCE_RIP_AND_FINISH();
5647 IEM_MC_END();
5648 break;
5649
5650 case IEMMODE_64BIT:
5651 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5653 IEM_MC_LOCAL(uint64_t, u64Value);
5654 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5655 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5656 IEM_MC_ADVANCE_RIP_AND_FINISH();
5657 IEM_MC_END();
5658 break;
5659
5660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5661 }
5662 }
5663 else
5664 {
5665 /*
5666 * We're saving the register to memory. The access is word sized
5667 * regardless of operand size prefixes.
5668 */
5669#if 0 /* not necessary */
5670 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5671#endif
5672 IEM_MC_BEGIN(0, 2, 0, 0);
5673 IEM_MC_LOCAL(uint16_t, u16Value);
5674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5678 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5679 IEM_MC_ADVANCE_RIP_AND_FINISH();
5680 IEM_MC_END();
5681 }
5682}
5683
5684
5685
5686
5687/**
5688 * @opcode 0x8d
5689 */
5690FNIEMOP_DEF(iemOp_lea_Gv_M)
5691{
5692 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5694 if (IEM_IS_MODRM_REG_MODE(bRm))
5695 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5696
5697 switch (pVCpu->iem.s.enmEffOpSize)
5698 {
5699 case IEMMODE_16BIT:
5700 IEM_MC_BEGIN(0, 2, 0, 0);
5701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5702 IEM_MC_LOCAL(uint16_t, u16Cast);
5703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5705 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5706 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5707 IEM_MC_ADVANCE_RIP_AND_FINISH();
5708 IEM_MC_END();
5709 break;
5710
5711 case IEMMODE_32BIT:
5712 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5714 IEM_MC_LOCAL(uint32_t, u32Cast);
5715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5717 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5718 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5719 IEM_MC_ADVANCE_RIP_AND_FINISH();
5720 IEM_MC_END();
5721 break;
5722
5723 case IEMMODE_64BIT:
5724 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5729 IEM_MC_ADVANCE_RIP_AND_FINISH();
5730 IEM_MC_END();
5731 break;
5732
5733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5734 }
5735}
5736
5737
5738/**
5739 * @opcode 0x8e
5740 */
5741FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5742{
5743 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5744
5745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5746
5747 /*
5748 * The practical operand size is 16-bit.
5749 */
5750#if 0 /* not necessary */
5751 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5752#endif
5753
5754 /*
5755 * Check that the destination register exists and can be used with this
5756 * instruction. The REX.R prefix is ignored.
5757 */
5758 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5759 /** @todo r=bird: What does 8086 do here wrt CS? */
5760 if ( iSegReg == X86_SREG_CS
5761 || iSegReg > X86_SREG_GS)
5762 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5763
5764 /*
5765 * If rm is denoting a register, no more instruction bytes.
5766 */
5767 if (IEM_IS_MODRM_REG_MODE(bRm))
5768 {
5769 /** @todo Only set IEM_CIMPL_F_INHIBIT_SHADOW when it actually applies... */
5770 IEM_MC_BEGIN(2, 0, 0, 0);
5771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5772 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5773 IEM_MC_ARG(uint16_t, u16Value, 1);
5774 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5775 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5776 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_INHIBIT_SHADOW, iemCImpl_load_SReg, iSRegArg, u16Value);
5777 else
5778 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5779 IEM_MC_END();
5780 }
5781 else
5782 {
5783 /*
5784 * We're loading the register from memory. The access is word sized
5785 * regardless of operand size prefixes.
5786 */
5787 /** @todo Only set IEM_CIMPL_F_INHIBIT_SHADOW when it actually applies... */
5788 IEM_MC_BEGIN(2, 1, 0, 0);
5789 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5790 IEM_MC_ARG(uint16_t, u16Value, 1);
5791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5794 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5795 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5796 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_INHIBIT_SHADOW, iemCImpl_load_SReg, iSRegArg, u16Value);
5797 else
5798 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5799 IEM_MC_END();
5800 }
5801}
5802
5803
5804/** Opcode 0x8f /0. */
5805FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5806{
5807 /* This bugger is rather annoying as it requires rSP to be updated before
5808 doing the effective address calculations. Will eventually require a
5809 split between the R/M+SIB decoding and the effective address
5810 calculation - which is something that is required for any attempt at
5811 reusing this code for a recompiler. It may also be good to have if we
5812 need to delay #UD exception caused by invalid lock prefixes.
5813
5814 For now, we'll do a mostly safe interpreter-only implementation here. */
5815 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5816 * now until tests show it's checked.. */
5817 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5818
5819 /* Register access is relatively easy and can share code. */
5820 if (IEM_IS_MODRM_REG_MODE(bRm))
5821 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5822
5823 /*
5824 * Memory target.
5825 *
5826 * Intel says that RSP is incremented before it's used in any effective
5827 * address calcuations. This means some serious extra annoyance here since
5828 * we decode and calculate the effective address in one step and like to
5829 * delay committing registers till everything is done.
5830 *
5831 * So, we'll decode and calculate the effective address twice. This will
5832 * require some recoding if turned into a recompiler.
5833 */
5834 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5835
5836#if 1 /* This can be compiled, optimize later if needed. */
5837 switch (pVCpu->iem.s.enmEffOpSize)
5838 {
5839 case IEMMODE_16BIT:
5840 IEM_MC_BEGIN(2, 0, 0, 0);
5841 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5842 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5845 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5846 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5847 IEM_MC_END();
5848 break;
5849
5850 case IEMMODE_32BIT:
5851 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5852 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5853 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5856 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5857 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5858 IEM_MC_END();
5859 break;
5860
5861 case IEMMODE_64BIT:
5862 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
5863 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5864 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5867 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5868 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5869 IEM_MC_END();
5870 break;
5871
5872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5873 }
5874
5875#else
5876# ifndef TST_IEM_CHECK_MC
5877 /* Calc effective address with modified ESP. */
5878/** @todo testcase */
5879 RTGCPTR GCPtrEff;
5880 VBOXSTRICTRC rcStrict;
5881 switch (pVCpu->iem.s.enmEffOpSize)
5882 {
5883 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5884 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5885 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5887 }
5888 if (rcStrict != VINF_SUCCESS)
5889 return rcStrict;
5890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5891
5892 /* Perform the operation - this should be CImpl. */
5893 RTUINT64U TmpRsp;
5894 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5895 switch (pVCpu->iem.s.enmEffOpSize)
5896 {
5897 case IEMMODE_16BIT:
5898 {
5899 uint16_t u16Value;
5900 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5901 if (rcStrict == VINF_SUCCESS)
5902 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5903 break;
5904 }
5905
5906 case IEMMODE_32BIT:
5907 {
5908 uint32_t u32Value;
5909 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5910 if (rcStrict == VINF_SUCCESS)
5911 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5912 break;
5913 }
5914
5915 case IEMMODE_64BIT:
5916 {
5917 uint64_t u64Value;
5918 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5919 if (rcStrict == VINF_SUCCESS)
5920 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5921 break;
5922 }
5923
5924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5925 }
5926 if (rcStrict == VINF_SUCCESS)
5927 {
5928 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5929 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5930 }
5931 return rcStrict;
5932
5933# else
5934 return VERR_IEM_IPE_2;
5935# endif
5936#endif
5937}
5938
5939
5940/**
5941 * @opcode 0x8f
5942 */
5943FNIEMOP_DEF(iemOp_Grp1A__xop)
5944{
5945 /*
5946 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5947 * three byte VEX prefix, except that the mmmmm field cannot have the values
5948 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5949 */
5950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5951 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5952 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5953
5954 IEMOP_MNEMONIC(xop, "xop");
5955 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5956 {
5957 /** @todo Test when exctly the XOP conformance checks kick in during
5958 * instruction decoding and fetching (using \#PF). */
5959 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5960 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5961 if ( ( pVCpu->iem.s.fPrefixes
5962 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5963 == 0)
5964 {
5965 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5966 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
5967 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5968 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5969 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5970 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5971 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5972 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5973 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5974
5975 /** @todo XOP: Just use new tables and decoders. */
5976 switch (bRm & 0x1f)
5977 {
5978 case 8: /* xop opcode map 8. */
5979 IEMOP_BITCH_ABOUT_STUB();
5980 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5981
5982 case 9: /* xop opcode map 9. */
5983 IEMOP_BITCH_ABOUT_STUB();
5984 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5985
5986 case 10: /* xop opcode map 10. */
5987 IEMOP_BITCH_ABOUT_STUB();
5988 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5989
5990 default:
5991 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5992 IEMOP_RAISE_INVALID_OPCODE_RET();
5993 }
5994 }
5995 else
5996 Log(("XOP: Invalid prefix mix!\n"));
5997 }
5998 else
5999 Log(("XOP: XOP support disabled!\n"));
6000 IEMOP_RAISE_INVALID_OPCODE_RET();
6001}
6002
6003
6004/**
6005 * Common 'xchg reg,rAX' helper.
6006 */
6007FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6008{
6009 iReg |= pVCpu->iem.s.uRexB;
6010 switch (pVCpu->iem.s.enmEffOpSize)
6011 {
6012 case IEMMODE_16BIT:
6013 IEM_MC_BEGIN(0, 2, 0, 0);
6014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6015 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6016 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6017 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6018 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6019 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6020 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6021 IEM_MC_ADVANCE_RIP_AND_FINISH();
6022 IEM_MC_END();
6023 break;
6024
6025 case IEMMODE_32BIT:
6026 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6029 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6030 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6031 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6032 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6033 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6034 IEM_MC_ADVANCE_RIP_AND_FINISH();
6035 IEM_MC_END();
6036 break;
6037
6038 case IEMMODE_64BIT:
6039 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6041 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6042 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6043 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6044 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6045 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6046 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6047 IEM_MC_ADVANCE_RIP_AND_FINISH();
6048 IEM_MC_END();
6049 break;
6050
6051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6052 }
6053}
6054
6055
6056/**
6057 * @opcode 0x90
6058 */
6059FNIEMOP_DEF(iemOp_nop)
6060{
6061 /* R8/R8D and RAX/EAX can be exchanged. */
6062 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6063 {
6064 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6065 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6066 }
6067
6068 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6069 {
6070 IEMOP_MNEMONIC(pause, "pause");
6071 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6072 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6073 if (!IEM_IS_IN_GUEST(pVCpu))
6074 { /* probable */ }
6075#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6076 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6077 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6078#endif
6079#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6080 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6081 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6082#endif
6083 }
6084 else
6085 IEMOP_MNEMONIC(nop, "nop");
6086 /** @todo testcase: lock nop; lock pause */
6087 IEM_MC_BEGIN(0, 0, 0, 0);
6088 IEMOP_HLP_DONE_DECODING();
6089 IEM_MC_ADVANCE_RIP_AND_FINISH();
6090 IEM_MC_END();
6091}
6092
6093
6094/**
6095 * @opcode 0x91
6096 */
6097FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6098{
6099 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6100 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6101}
6102
6103
6104/**
6105 * @opcode 0x92
6106 */
6107FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6108{
6109 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6110 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6111}
6112
6113
6114/**
6115 * @opcode 0x93
6116 */
6117FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6118{
6119 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6120 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6121}
6122
6123
6124/**
6125 * @opcode 0x94
6126 */
6127FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6128{
6129 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6130 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6131}
6132
6133
6134/**
6135 * @opcode 0x95
6136 */
6137FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6138{
6139 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6140 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6141}
6142
6143
6144/**
6145 * @opcode 0x96
6146 */
6147FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6148{
6149 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6150 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6151}
6152
6153
6154/**
6155 * @opcode 0x97
6156 */
6157FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6158{
6159 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6160 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6161}
6162
6163
6164/**
6165 * @opcode 0x98
6166 */
6167FNIEMOP_DEF(iemOp_cbw)
6168{
6169 switch (pVCpu->iem.s.enmEffOpSize)
6170 {
6171 case IEMMODE_16BIT:
6172 IEMOP_MNEMONIC(cbw, "cbw");
6173 IEM_MC_BEGIN(0, 1, 0, 0);
6174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6175 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6176 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6177 } IEM_MC_ELSE() {
6178 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6179 } IEM_MC_ENDIF();
6180 IEM_MC_ADVANCE_RIP_AND_FINISH();
6181 IEM_MC_END();
6182 break;
6183
6184 case IEMMODE_32BIT:
6185 IEMOP_MNEMONIC(cwde, "cwde");
6186 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6188 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6189 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6190 } IEM_MC_ELSE() {
6191 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6192 } IEM_MC_ENDIF();
6193 IEM_MC_ADVANCE_RIP_AND_FINISH();
6194 IEM_MC_END();
6195 break;
6196
6197 case IEMMODE_64BIT:
6198 IEMOP_MNEMONIC(cdqe, "cdqe");
6199 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6201 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6202 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6203 } IEM_MC_ELSE() {
6204 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6205 } IEM_MC_ENDIF();
6206 IEM_MC_ADVANCE_RIP_AND_FINISH();
6207 IEM_MC_END();
6208 break;
6209
6210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6211 }
6212}
6213
6214
6215/**
6216 * @opcode 0x99
6217 */
6218FNIEMOP_DEF(iemOp_cwd)
6219{
6220 switch (pVCpu->iem.s.enmEffOpSize)
6221 {
6222 case IEMMODE_16BIT:
6223 IEMOP_MNEMONIC(cwd, "cwd");
6224 IEM_MC_BEGIN(0, 1, 0, 0);
6225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6226 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6227 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6228 } IEM_MC_ELSE() {
6229 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6230 } IEM_MC_ENDIF();
6231 IEM_MC_ADVANCE_RIP_AND_FINISH();
6232 IEM_MC_END();
6233 break;
6234
6235 case IEMMODE_32BIT:
6236 IEMOP_MNEMONIC(cdq, "cdq");
6237 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6239 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6240 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6241 } IEM_MC_ELSE() {
6242 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6243 } IEM_MC_ENDIF();
6244 IEM_MC_ADVANCE_RIP_AND_FINISH();
6245 IEM_MC_END();
6246 break;
6247
6248 case IEMMODE_64BIT:
6249 IEMOP_MNEMONIC(cqo, "cqo");
6250 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6252 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6253 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6254 } IEM_MC_ELSE() {
6255 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6256 } IEM_MC_ENDIF();
6257 IEM_MC_ADVANCE_RIP_AND_FINISH();
6258 IEM_MC_END();
6259 break;
6260
6261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6262 }
6263}
6264
6265
6266/**
6267 * @opcode 0x9a
6268 */
6269FNIEMOP_DEF(iemOp_call_Ap)
6270{
6271 IEMOP_MNEMONIC(call_Ap, "call Ap");
6272 IEMOP_HLP_NO_64BIT();
6273
6274 /* Decode the far pointer address and pass it on to the far call C implementation. */
6275 uint32_t off32Seg;
6276 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6277 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6278 else
6279 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6280 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6282 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6283 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6284 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6285}
6286
6287
6288/** Opcode 0x9b. (aka fwait) */
6289FNIEMOP_DEF(iemOp_wait)
6290{
6291 IEMOP_MNEMONIC(wait, "wait");
6292 IEM_MC_BEGIN(0, 0, 0, 0);
6293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6294 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6295 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6296 IEM_MC_ADVANCE_RIP_AND_FINISH();
6297 IEM_MC_END();
6298}
6299
6300
6301/**
6302 * @opcode 0x9c
6303 */
6304FNIEMOP_DEF(iemOp_pushf_Fv)
6305{
6306 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6308 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6309 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6310}
6311
6312
6313/**
6314 * @opcode 0x9d
6315 */
6316FNIEMOP_DEF(iemOp_popf_Fv)
6317{
6318 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6321 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6322 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6323}
6324
6325
6326/**
6327 * @opcode 0x9e
6328 */
6329FNIEMOP_DEF(iemOp_sahf)
6330{
6331 IEMOP_MNEMONIC(sahf, "sahf");
6332 if ( IEM_IS_64BIT_CODE(pVCpu)
6333 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6334 IEMOP_RAISE_INVALID_OPCODE_RET();
6335 IEM_MC_BEGIN(0, 2, 0, 0);
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6337 IEM_MC_LOCAL(uint32_t, u32Flags);
6338 IEM_MC_LOCAL(uint32_t, EFlags);
6339 IEM_MC_FETCH_EFLAGS(EFlags);
6340 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6341 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6342 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6343 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6344 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6345 IEM_MC_COMMIT_EFLAGS(EFlags);
6346 IEM_MC_ADVANCE_RIP_AND_FINISH();
6347 IEM_MC_END();
6348}
6349
6350
6351/**
6352 * @opcode 0x9f
6353 */
6354FNIEMOP_DEF(iemOp_lahf)
6355{
6356 IEMOP_MNEMONIC(lahf, "lahf");
6357 if ( IEM_IS_64BIT_CODE(pVCpu)
6358 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6359 IEMOP_RAISE_INVALID_OPCODE_RET();
6360 IEM_MC_BEGIN(0, 1, 0, 0);
6361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6362 IEM_MC_LOCAL(uint8_t, u8Flags);
6363 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6364 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6365 IEM_MC_ADVANCE_RIP_AND_FINISH();
6366 IEM_MC_END();
6367}
6368
6369
6370/**
6371 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6372 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6373 * Will return/throw on failures.
6374 * @param a_GCPtrMemOff The variable to store the offset in.
6375 */
6376#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6377 do \
6378 { \
6379 switch (pVCpu->iem.s.enmEffAddrMode) \
6380 { \
6381 case IEMMODE_16BIT: \
6382 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6383 break; \
6384 case IEMMODE_32BIT: \
6385 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6386 break; \
6387 case IEMMODE_64BIT: \
6388 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6389 break; \
6390 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6391 } \
6392 } while (0)
6393
6394/**
6395 * @opcode 0xa0
6396 */
6397FNIEMOP_DEF(iemOp_mov_AL_Ob)
6398{
6399 /*
6400 * Get the offset.
6401 */
6402 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6403 RTGCPTR GCPtrMemOff;
6404 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6405
6406 /*
6407 * Fetch AL.
6408 */
6409 IEM_MC_BEGIN(0, 1, 0, 0);
6410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6411 IEM_MC_LOCAL(uint8_t, u8Tmp);
6412 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6413 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6414 IEM_MC_ADVANCE_RIP_AND_FINISH();
6415 IEM_MC_END();
6416}
6417
6418
6419/**
6420 * @opcode 0xa1
6421 */
6422FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6423{
6424 /*
6425 * Get the offset.
6426 */
6427 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6428 RTGCPTR GCPtrMemOff;
6429 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6430
6431 /*
6432 * Fetch rAX.
6433 */
6434 switch (pVCpu->iem.s.enmEffOpSize)
6435 {
6436 case IEMMODE_16BIT:
6437 IEM_MC_BEGIN(0, 1, 0, 0);
6438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6439 IEM_MC_LOCAL(uint16_t, u16Tmp);
6440 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6441 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6442 IEM_MC_ADVANCE_RIP_AND_FINISH();
6443 IEM_MC_END();
6444 break;
6445
6446 case IEMMODE_32BIT:
6447 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449 IEM_MC_LOCAL(uint32_t, u32Tmp);
6450 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6451 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6452 IEM_MC_ADVANCE_RIP_AND_FINISH();
6453 IEM_MC_END();
6454 break;
6455
6456 case IEMMODE_64BIT:
6457 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6459 IEM_MC_LOCAL(uint64_t, u64Tmp);
6460 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6461 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6462 IEM_MC_ADVANCE_RIP_AND_FINISH();
6463 IEM_MC_END();
6464 break;
6465
6466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6467 }
6468}
6469
6470
6471/**
6472 * @opcode 0xa2
6473 */
6474FNIEMOP_DEF(iemOp_mov_Ob_AL)
6475{
6476 /*
6477 * Get the offset.
6478 */
6479 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6480 RTGCPTR GCPtrMemOff;
6481 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6482
6483 /*
6484 * Store AL.
6485 */
6486 IEM_MC_BEGIN(0, 1, 0, 0);
6487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6488 IEM_MC_LOCAL(uint8_t, u8Tmp);
6489 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6490 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6491 IEM_MC_ADVANCE_RIP_AND_FINISH();
6492 IEM_MC_END();
6493}
6494
6495
6496/**
6497 * @opcode 0xa3
6498 */
6499FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6500{
6501 /*
6502 * Get the offset.
6503 */
6504 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6505 RTGCPTR GCPtrMemOff;
6506 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6507
6508 /*
6509 * Store rAX.
6510 */
6511 switch (pVCpu->iem.s.enmEffOpSize)
6512 {
6513 case IEMMODE_16BIT:
6514 IEM_MC_BEGIN(0, 1, 0, 0);
6515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6516 IEM_MC_LOCAL(uint16_t, u16Tmp);
6517 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6518 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6519 IEM_MC_ADVANCE_RIP_AND_FINISH();
6520 IEM_MC_END();
6521 break;
6522
6523 case IEMMODE_32BIT:
6524 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6526 IEM_MC_LOCAL(uint32_t, u32Tmp);
6527 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6528 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6529 IEM_MC_ADVANCE_RIP_AND_FINISH();
6530 IEM_MC_END();
6531 break;
6532
6533 case IEMMODE_64BIT:
6534 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6536 IEM_MC_LOCAL(uint64_t, u64Tmp);
6537 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6538 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6539 IEM_MC_ADVANCE_RIP_AND_FINISH();
6540 IEM_MC_END();
6541 break;
6542
6543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6544 }
6545}
6546
6547/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6548#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6549 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6551 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6552 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6553 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6554 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6555 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6556 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6557 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6558 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6559 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6560 } IEM_MC_ELSE() { \
6561 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6562 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6563 } IEM_MC_ENDIF(); \
6564 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6565 IEM_MC_END() \
6566
6567/**
6568 * @opcode 0xa4
6569 */
6570FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6571{
6572 /*
6573 * Use the C implementation if a repeat prefix is encountered.
6574 */
6575 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6576 {
6577 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6579 switch (pVCpu->iem.s.enmEffAddrMode)
6580 {
6581 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6582 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6583 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6585 }
6586 }
6587
6588 /*
6589 * Sharing case implementation with movs[wdq] below.
6590 */
6591 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6592 switch (pVCpu->iem.s.enmEffAddrMode)
6593 {
6594 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6595 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6596 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6598 }
6599}
6600
6601
6602/**
6603 * @opcode 0xa5
6604 */
6605FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6606{
6607
6608 /*
6609 * Use the C implementation if a repeat prefix is encountered.
6610 */
6611 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6612 {
6613 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6615 switch (pVCpu->iem.s.enmEffOpSize)
6616 {
6617 case IEMMODE_16BIT:
6618 switch (pVCpu->iem.s.enmEffAddrMode)
6619 {
6620 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6621 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6622 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6624 }
6625 break;
6626 case IEMMODE_32BIT:
6627 switch (pVCpu->iem.s.enmEffAddrMode)
6628 {
6629 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6630 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6631 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6633 }
6634 case IEMMODE_64BIT:
6635 switch (pVCpu->iem.s.enmEffAddrMode)
6636 {
6637 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6638 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6639 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6641 }
6642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6643 }
6644 }
6645
6646 /*
6647 * Annoying double switch here.
6648 * Using ugly macro for implementing the cases, sharing it with movsb.
6649 */
6650 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6651 switch (pVCpu->iem.s.enmEffOpSize)
6652 {
6653 case IEMMODE_16BIT:
6654 switch (pVCpu->iem.s.enmEffAddrMode)
6655 {
6656 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6657 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6658 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6660 }
6661 break;
6662
6663 case IEMMODE_32BIT:
6664 switch (pVCpu->iem.s.enmEffAddrMode)
6665 {
6666 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6667 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6668 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6670 }
6671 break;
6672
6673 case IEMMODE_64BIT:
6674 switch (pVCpu->iem.s.enmEffAddrMode)
6675 {
6676 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6677 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6678 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6680 }
6681 break;
6682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6683 }
6684}
6685
6686#undef IEM_MOVS_CASE
6687
6688/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6689#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6690 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6692 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6693 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6694 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6695 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6696 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6697 \
6698 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6699 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6700 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6701 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6702 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6703 IEM_MC_REF_EFLAGS(pEFlags); \
6704 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6705 \
6706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6707 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6708 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6709 } IEM_MC_ELSE() { \
6710 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6711 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6712 } IEM_MC_ENDIF(); \
6713 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6714 IEM_MC_END() \
6715
6716/**
6717 * @opcode 0xa6
6718 */
6719FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6720{
6721
6722 /*
6723 * Use the C implementation if a repeat prefix is encountered.
6724 */
6725 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6726 {
6727 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6729 switch (pVCpu->iem.s.enmEffAddrMode)
6730 {
6731 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6732 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6733 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6735 }
6736 }
6737 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6738 {
6739 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6741 switch (pVCpu->iem.s.enmEffAddrMode)
6742 {
6743 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6744 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6745 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6747 }
6748 }
6749
6750 /*
6751 * Sharing case implementation with cmps[wdq] below.
6752 */
6753 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6754 switch (pVCpu->iem.s.enmEffAddrMode)
6755 {
6756 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6757 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6758 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
6759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6760 }
6761}
6762
6763
6764/**
6765 * @opcode 0xa7
6766 */
6767FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6768{
6769 /*
6770 * Use the C implementation if a repeat prefix is encountered.
6771 */
6772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6773 {
6774 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6776 switch (pVCpu->iem.s.enmEffOpSize)
6777 {
6778 case IEMMODE_16BIT:
6779 switch (pVCpu->iem.s.enmEffAddrMode)
6780 {
6781 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6782 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6783 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6785 }
6786 break;
6787 case IEMMODE_32BIT:
6788 switch (pVCpu->iem.s.enmEffAddrMode)
6789 {
6790 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6791 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6792 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6794 }
6795 case IEMMODE_64BIT:
6796 switch (pVCpu->iem.s.enmEffAddrMode)
6797 {
6798 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6799 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6800 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6802 }
6803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6804 }
6805 }
6806
6807 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6808 {
6809 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6811 switch (pVCpu->iem.s.enmEffOpSize)
6812 {
6813 case IEMMODE_16BIT:
6814 switch (pVCpu->iem.s.enmEffAddrMode)
6815 {
6816 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6817 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6818 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6820 }
6821 break;
6822 case IEMMODE_32BIT:
6823 switch (pVCpu->iem.s.enmEffAddrMode)
6824 {
6825 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6826 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6827 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6829 }
6830 case IEMMODE_64BIT:
6831 switch (pVCpu->iem.s.enmEffAddrMode)
6832 {
6833 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6834 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6835 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6837 }
6838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6839 }
6840 }
6841
6842 /*
6843 * Annoying double switch here.
6844 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6845 */
6846 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6847 switch (pVCpu->iem.s.enmEffOpSize)
6848 {
6849 case IEMMODE_16BIT:
6850 switch (pVCpu->iem.s.enmEffAddrMode)
6851 {
6852 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6853 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6854 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
6855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6856 }
6857 break;
6858
6859 case IEMMODE_32BIT:
6860 switch (pVCpu->iem.s.enmEffAddrMode)
6861 {
6862 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6863 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6864 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
6865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6866 }
6867 break;
6868
6869 case IEMMODE_64BIT:
6870 switch (pVCpu->iem.s.enmEffAddrMode)
6871 {
6872 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6873 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
6874 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
6875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6876 }
6877 break;
6878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6879 }
6880}
6881
6882#undef IEM_CMPS_CASE
6883
6884/**
6885 * @opcode 0xa8
6886 */
6887FNIEMOP_DEF(iemOp_test_AL_Ib)
6888{
6889 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6890 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6891 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6892}
6893
6894
6895/**
6896 * @opcode 0xa9
6897 */
6898FNIEMOP_DEF(iemOp_test_eAX_Iz)
6899{
6900 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6901 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6902 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6903}
6904
6905
6906/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6907#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
6908 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6910 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6911 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6912 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6913 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6914 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6916 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6917 } IEM_MC_ELSE() { \
6918 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6919 } IEM_MC_ENDIF(); \
6920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6921 IEM_MC_END() \
6922
6923/**
6924 * @opcode 0xaa
6925 */
6926FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6927{
6928 /*
6929 * Use the C implementation if a repeat prefix is encountered.
6930 */
6931 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6932 {
6933 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6935 switch (pVCpu->iem.s.enmEffAddrMode)
6936 {
6937 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6938 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6939 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6941 }
6942 }
6943
6944 /*
6945 * Sharing case implementation with stos[wdq] below.
6946 */
6947 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6948 switch (pVCpu->iem.s.enmEffAddrMode)
6949 {
6950 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6951 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6952 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
6953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6954 }
6955}
6956
6957
6958/**
6959 * @opcode 0xab
6960 */
6961FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6962{
6963 /*
6964 * Use the C implementation if a repeat prefix is encountered.
6965 */
6966 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6967 {
6968 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6970 switch (pVCpu->iem.s.enmEffOpSize)
6971 {
6972 case IEMMODE_16BIT:
6973 switch (pVCpu->iem.s.enmEffAddrMode)
6974 {
6975 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
6976 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
6977 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
6978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6979 }
6980 break;
6981 case IEMMODE_32BIT:
6982 switch (pVCpu->iem.s.enmEffAddrMode)
6983 {
6984 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
6985 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
6986 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
6987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6988 }
6989 case IEMMODE_64BIT:
6990 switch (pVCpu->iem.s.enmEffAddrMode)
6991 {
6992 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6993 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
6994 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
6995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6996 }
6997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6998 }
6999 }
7000
7001 /*
7002 * Annoying double switch here.
7003 * Using ugly macro for implementing the cases, sharing it with stosb.
7004 */
7005 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7006 switch (pVCpu->iem.s.enmEffOpSize)
7007 {
7008 case IEMMODE_16BIT:
7009 switch (pVCpu->iem.s.enmEffAddrMode)
7010 {
7011 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7012 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7013 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7015 }
7016 break;
7017
7018 case IEMMODE_32BIT:
7019 switch (pVCpu->iem.s.enmEffAddrMode)
7020 {
7021 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7022 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7023 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7025 }
7026 break;
7027
7028 case IEMMODE_64BIT:
7029 switch (pVCpu->iem.s.enmEffAddrMode)
7030 {
7031 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7032 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7033 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7035 }
7036 break;
7037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7038 }
7039}
7040
7041#undef IEM_STOS_CASE
7042
7043/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7044#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7045 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7047 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7048 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7049 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7050 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7051 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7052 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7053 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7054 } IEM_MC_ELSE() { \
7055 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7056 } IEM_MC_ENDIF(); \
7057 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7058 IEM_MC_END() \
7059
7060/**
7061 * @opcode 0xac
7062 */
7063FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7064{
7065 /*
7066 * Use the C implementation if a repeat prefix is encountered.
7067 */
7068 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7069 {
7070 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7072 switch (pVCpu->iem.s.enmEffAddrMode)
7073 {
7074 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7075 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7076 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7078 }
7079 }
7080
7081 /*
7082 * Sharing case implementation with stos[wdq] below.
7083 */
7084 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7085 switch (pVCpu->iem.s.enmEffAddrMode)
7086 {
7087 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7088 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7089 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7091 }
7092}
7093
7094
7095/**
7096 * @opcode 0xad
7097 */
7098FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7099{
7100 /*
7101 * Use the C implementation if a repeat prefix is encountered.
7102 */
7103 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7104 {
7105 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7107 switch (pVCpu->iem.s.enmEffOpSize)
7108 {
7109 case IEMMODE_16BIT:
7110 switch (pVCpu->iem.s.enmEffAddrMode)
7111 {
7112 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7113 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7114 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7116 }
7117 break;
7118 case IEMMODE_32BIT:
7119 switch (pVCpu->iem.s.enmEffAddrMode)
7120 {
7121 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7122 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7123 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7125 }
7126 case IEMMODE_64BIT:
7127 switch (pVCpu->iem.s.enmEffAddrMode)
7128 {
7129 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7130 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7131 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7133 }
7134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7135 }
7136 }
7137
7138 /*
7139 * Annoying double switch here.
7140 * Using ugly macro for implementing the cases, sharing it with lodsb.
7141 */
7142 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7143 switch (pVCpu->iem.s.enmEffOpSize)
7144 {
7145 case IEMMODE_16BIT:
7146 switch (pVCpu->iem.s.enmEffAddrMode)
7147 {
7148 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7149 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7150 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7152 }
7153 break;
7154
7155 case IEMMODE_32BIT:
7156 switch (pVCpu->iem.s.enmEffAddrMode)
7157 {
7158 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7159 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7160 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 break;
7164
7165 case IEMMODE_64BIT:
7166 switch (pVCpu->iem.s.enmEffAddrMode)
7167 {
7168 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7169 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7170 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7172 }
7173 break;
7174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7175 }
7176}
7177
7178#undef IEM_LODS_CASE
7179
7180/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7181#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7182 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7184 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7185 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7186 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7187 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7188 \
7189 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7190 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7191 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7192 IEM_MC_REF_EFLAGS(pEFlags); \
7193 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7194 \
7195 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7196 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7197 } IEM_MC_ELSE() { \
7198 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7199 } IEM_MC_ENDIF(); \
7200 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7201 IEM_MC_END();
7202
7203/**
7204 * @opcode 0xae
7205 */
7206FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7207{
7208 /*
7209 * Use the C implementation if a repeat prefix is encountered.
7210 */
7211 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7212 {
7213 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7215 switch (pVCpu->iem.s.enmEffAddrMode)
7216 {
7217 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7218 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7219 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7221 }
7222 }
7223 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7224 {
7225 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7227 switch (pVCpu->iem.s.enmEffAddrMode)
7228 {
7229 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7230 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7231 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7233 }
7234 }
7235
7236 /*
7237 * Sharing case implementation with stos[wdq] below.
7238 */
7239 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7240 switch (pVCpu->iem.s.enmEffAddrMode)
7241 {
7242 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7243 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7244 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7246 }
7247}
7248
7249
7250/**
7251 * @opcode 0xaf
7252 */
7253FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7254{
7255 /*
7256 * Use the C implementation if a repeat prefix is encountered.
7257 */
7258 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7259 {
7260 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7262 switch (pVCpu->iem.s.enmEffOpSize)
7263 {
7264 case IEMMODE_16BIT:
7265 switch (pVCpu->iem.s.enmEffAddrMode)
7266 {
7267 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7268 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7269 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7271 }
7272 break;
7273 case IEMMODE_32BIT:
7274 switch (pVCpu->iem.s.enmEffAddrMode)
7275 {
7276 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7277 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7278 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7280 }
7281 case IEMMODE_64BIT:
7282 switch (pVCpu->iem.s.enmEffAddrMode)
7283 {
7284 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7285 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7286 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7288 }
7289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7290 }
7291 }
7292 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7293 {
7294 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7296 switch (pVCpu->iem.s.enmEffOpSize)
7297 {
7298 case IEMMODE_16BIT:
7299 switch (pVCpu->iem.s.enmEffAddrMode)
7300 {
7301 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7302 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7303 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7305 }
7306 break;
7307 case IEMMODE_32BIT:
7308 switch (pVCpu->iem.s.enmEffAddrMode)
7309 {
7310 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7311 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7312 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7314 }
7315 case IEMMODE_64BIT:
7316 switch (pVCpu->iem.s.enmEffAddrMode)
7317 {
7318 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7319 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7320 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7322 }
7323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7324 }
7325 }
7326
7327 /*
7328 * Annoying double switch here.
7329 * Using ugly macro for implementing the cases, sharing it with scasb.
7330 */
7331 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7332 switch (pVCpu->iem.s.enmEffOpSize)
7333 {
7334 case IEMMODE_16BIT:
7335 switch (pVCpu->iem.s.enmEffAddrMode)
7336 {
7337 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7338 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7339 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7341 }
7342 break;
7343
7344 case IEMMODE_32BIT:
7345 switch (pVCpu->iem.s.enmEffAddrMode)
7346 {
7347 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7348 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7349 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7351 }
7352 break;
7353
7354 case IEMMODE_64BIT:
7355 switch (pVCpu->iem.s.enmEffAddrMode)
7356 {
7357 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7358 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7359 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7361 }
7362 break;
7363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7364 }
7365}
7366
7367#undef IEM_SCAS_CASE
7368
7369/**
7370 * Common 'mov r8, imm8' helper.
7371 */
7372FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7373{
7374 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7375 IEM_MC_BEGIN(0, 1, 0, 0);
7376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7377 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
7378 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
7379 IEM_MC_ADVANCE_RIP_AND_FINISH();
7380 IEM_MC_END();
7381}
7382
7383
7384/**
7385 * @opcode 0xb0
7386 */
7387FNIEMOP_DEF(iemOp_mov_AL_Ib)
7388{
7389 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7390 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7391}
7392
7393
7394/**
7395 * @opcode 0xb1
7396 */
7397FNIEMOP_DEF(iemOp_CL_Ib)
7398{
7399 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7400 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7401}
7402
7403
7404/**
7405 * @opcode 0xb2
7406 */
7407FNIEMOP_DEF(iemOp_DL_Ib)
7408{
7409 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7410 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7411}
7412
7413
7414/**
7415 * @opcode 0xb3
7416 */
7417FNIEMOP_DEF(iemOp_BL_Ib)
7418{
7419 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7420 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7421}
7422
7423
7424/**
7425 * @opcode 0xb4
7426 */
7427FNIEMOP_DEF(iemOp_mov_AH_Ib)
7428{
7429 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7430 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7431}
7432
7433
7434/**
7435 * @opcode 0xb5
7436 */
7437FNIEMOP_DEF(iemOp_CH_Ib)
7438{
7439 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7440 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7441}
7442
7443
7444/**
7445 * @opcode 0xb6
7446 */
7447FNIEMOP_DEF(iemOp_DH_Ib)
7448{
7449 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7450 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7451}
7452
7453
7454/**
7455 * @opcode 0xb7
7456 */
7457FNIEMOP_DEF(iemOp_BH_Ib)
7458{
7459 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7460 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7461}
7462
7463
7464/**
7465 * Common 'mov regX,immX' helper.
7466 */
7467FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7468{
7469 switch (pVCpu->iem.s.enmEffOpSize)
7470 {
7471 case IEMMODE_16BIT:
7472 IEM_MC_BEGIN(0, 1, 0, 0);
7473 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7475 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
7476 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
7477 IEM_MC_ADVANCE_RIP_AND_FINISH();
7478 IEM_MC_END();
7479 break;
7480
7481 case IEMMODE_32BIT:
7482 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
7483 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7485 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
7486 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
7487 IEM_MC_ADVANCE_RIP_AND_FINISH();
7488 IEM_MC_END();
7489 break;
7490
7491 case IEMMODE_64BIT:
7492 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7493 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7495 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
7496 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
7497 IEM_MC_ADVANCE_RIP_AND_FINISH();
7498 IEM_MC_END();
7499 break;
7500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7501 }
7502}
7503
7504
7505/**
7506 * @opcode 0xb8
7507 */
7508FNIEMOP_DEF(iemOp_eAX_Iv)
7509{
7510 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7511 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7512}
7513
7514
7515/**
7516 * @opcode 0xb9
7517 */
7518FNIEMOP_DEF(iemOp_eCX_Iv)
7519{
7520 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7521 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7522}
7523
7524
7525/**
7526 * @opcode 0xba
7527 */
7528FNIEMOP_DEF(iemOp_eDX_Iv)
7529{
7530 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7531 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7532}
7533
7534
7535/**
7536 * @opcode 0xbb
7537 */
7538FNIEMOP_DEF(iemOp_eBX_Iv)
7539{
7540 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7541 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7542}
7543
7544
7545/**
7546 * @opcode 0xbc
7547 */
7548FNIEMOP_DEF(iemOp_eSP_Iv)
7549{
7550 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7551 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7552}
7553
7554
7555/**
7556 * @opcode 0xbd
7557 */
7558FNIEMOP_DEF(iemOp_eBP_Iv)
7559{
7560 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7561 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7562}
7563
7564
7565/**
7566 * @opcode 0xbe
7567 */
7568FNIEMOP_DEF(iemOp_eSI_Iv)
7569{
7570 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7571 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7572}
7573
7574
7575/**
7576 * @opcode 0xbf
7577 */
7578FNIEMOP_DEF(iemOp_eDI_Iv)
7579{
7580 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7581 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7582}
7583
7584
7585/**
7586 * @opcode 0xc0
7587 */
7588FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7589{
7590 IEMOP_HLP_MIN_186();
7591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7592 PCIEMOPSHIFTSIZES pImpl;
7593 switch (IEM_GET_MODRM_REG_8(bRm))
7594 {
7595 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7596 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7597 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7598 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7599 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7600 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7601 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7602 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7603 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7604 }
7605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7606
7607 if (IEM_IS_MODRM_REG_MODE(bRm))
7608 {
7609 /* register */
7610 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7611 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
7612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7613 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7614 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7615 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7616 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7617 IEM_MC_REF_EFLAGS(pEFlags);
7618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7619 IEM_MC_ADVANCE_RIP_AND_FINISH();
7620 IEM_MC_END();
7621 }
7622 else
7623 {
7624 /* memory */
7625 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
7626 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7627 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7628 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7630 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7631
7632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7633 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7634 IEM_MC_ASSIGN(cShiftArg, cShift);
7635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7636 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7637 IEM_MC_FETCH_EFLAGS(EFlags);
7638 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7639
7640 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7641 IEM_MC_COMMIT_EFLAGS(EFlags);
7642 IEM_MC_ADVANCE_RIP_AND_FINISH();
7643 IEM_MC_END();
7644 }
7645}
7646
7647
7648/**
7649 * @opcode 0xc1
7650 */
7651FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7652{
7653 IEMOP_HLP_MIN_186();
7654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7655 PCIEMOPSHIFTSIZES pImpl;
7656 switch (IEM_GET_MODRM_REG_8(bRm))
7657 {
7658 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7659 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7660 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7661 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7662 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7663 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7664 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7665 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7666 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7667 }
7668 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7669
7670 if (IEM_IS_MODRM_REG_MODE(bRm))
7671 {
7672 /* register */
7673 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7674 switch (pVCpu->iem.s.enmEffOpSize)
7675 {
7676 case IEMMODE_16BIT:
7677 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
7678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7679 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7680 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7681 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7682 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7683 IEM_MC_REF_EFLAGS(pEFlags);
7684 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7685 IEM_MC_ADVANCE_RIP_AND_FINISH();
7686 IEM_MC_END();
7687 break;
7688
7689 case IEMMODE_32BIT:
7690 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7692 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7693 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7694 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7695 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7696 IEM_MC_REF_EFLAGS(pEFlags);
7697 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7698 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7699 IEM_MC_ADVANCE_RIP_AND_FINISH();
7700 IEM_MC_END();
7701 break;
7702
7703 case IEMMODE_64BIT:
7704 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7706 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7707 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7708 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7709 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7710 IEM_MC_REF_EFLAGS(pEFlags);
7711 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7712 IEM_MC_ADVANCE_RIP_AND_FINISH();
7713 IEM_MC_END();
7714 break;
7715
7716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7717 }
7718 }
7719 else
7720 {
7721 /* memory */
7722 switch (pVCpu->iem.s.enmEffOpSize)
7723 {
7724 case IEMMODE_16BIT:
7725 IEM_MC_BEGIN(3, 3, 0, 0);
7726 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7727 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7728 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7730 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7731
7732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7733 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7734 IEM_MC_ASSIGN(cShiftArg, cShift);
7735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7736 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7737 IEM_MC_FETCH_EFLAGS(EFlags);
7738 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7739
7740 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
7741 IEM_MC_COMMIT_EFLAGS(EFlags);
7742 IEM_MC_ADVANCE_RIP_AND_FINISH();
7743 IEM_MC_END();
7744 break;
7745
7746 case IEMMODE_32BIT:
7747 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
7748 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7749 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7750 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7752 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7753
7754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7755 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7756 IEM_MC_ASSIGN(cShiftArg, cShift);
7757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7758 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7759 IEM_MC_FETCH_EFLAGS(EFlags);
7760 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7761
7762 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
7763 IEM_MC_COMMIT_EFLAGS(EFlags);
7764 IEM_MC_ADVANCE_RIP_AND_FINISH();
7765 IEM_MC_END();
7766 break;
7767
7768 case IEMMODE_64BIT:
7769 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
7770 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7771 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7772 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7774 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7775
7776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7777 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7778 IEM_MC_ASSIGN(cShiftArg, cShift);
7779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7780 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7781 IEM_MC_FETCH_EFLAGS(EFlags);
7782 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7783
7784 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
7785 IEM_MC_COMMIT_EFLAGS(EFlags);
7786 IEM_MC_ADVANCE_RIP_AND_FINISH();
7787 IEM_MC_END();
7788 break;
7789
7790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7791 }
7792 }
7793}
7794
7795
7796/**
7797 * @opcode 0xc2
7798 */
7799FNIEMOP_DEF(iemOp_retn_Iw)
7800{
7801 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7802 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7803 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7805 switch (pVCpu->iem.s.enmEffOpSize)
7806 {
7807 case IEMMODE_16BIT:
7808 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7809 case IEMMODE_32BIT:
7810 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7811 case IEMMODE_64BIT:
7812 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7814 }
7815}
7816
7817
7818/**
7819 * @opcode 0xc3
7820 */
7821FNIEMOP_DEF(iemOp_retn)
7822{
7823 IEMOP_MNEMONIC(retn, "retn");
7824 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7826 switch (pVCpu->iem.s.enmEffOpSize)
7827 {
7828 case IEMMODE_16BIT:
7829 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7830 case IEMMODE_32BIT:
7831 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7832 case IEMMODE_64BIT:
7833 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7835 }
7836}
7837
7838
7839/**
7840 * @opcode 0xc4
7841 */
7842FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7843{
7844 /* The LDS instruction is invalid 64-bit mode. In legacy and
7845 compatability mode it is invalid with MOD=3.
7846 The use as a VEX prefix is made possible by assigning the inverted
7847 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7848 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7850 if ( IEM_IS_64BIT_CODE(pVCpu)
7851 || IEM_IS_MODRM_REG_MODE(bRm) )
7852 {
7853 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7854 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7855 {
7856 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7857 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7858 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7859 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7860 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7861 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7862 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7863 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7864 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7865 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7866 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7867 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7868 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7869
7870 switch (bRm & 0x1f)
7871 {
7872 case 1: /* 0x0f lead opcode byte. */
7873#ifdef IEM_WITH_VEX
7874 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7875#else
7876 IEMOP_BITCH_ABOUT_STUB();
7877 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7878#endif
7879
7880 case 2: /* 0x0f 0x38 lead opcode bytes. */
7881#ifdef IEM_WITH_VEX
7882 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7883#else
7884 IEMOP_BITCH_ABOUT_STUB();
7885 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7886#endif
7887
7888 case 3: /* 0x0f 0x3a lead opcode bytes. */
7889#ifdef IEM_WITH_VEX
7890 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7891#else
7892 IEMOP_BITCH_ABOUT_STUB();
7893 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7894#endif
7895
7896 default:
7897 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7898 IEMOP_RAISE_INVALID_OPCODE_RET();
7899 }
7900 }
7901 Log(("VEX3: VEX support disabled!\n"));
7902 IEMOP_RAISE_INVALID_OPCODE_RET();
7903 }
7904
7905 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7906 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7907}
7908
7909
7910/**
7911 * @opcode 0xc5
7912 */
7913FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7914{
7915 /* The LES instruction is invalid 64-bit mode. In legacy and
7916 compatability mode it is invalid with MOD=3.
7917 The use as a VEX prefix is made possible by assigning the inverted
7918 REX.R to the top MOD bit, and the top bit in the inverted register
7919 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7920 to accessing registers 0..7 in this VEX form. */
7921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7922 if ( IEM_IS_64BIT_CODE(pVCpu)
7923 || IEM_IS_MODRM_REG_MODE(bRm))
7924 {
7925 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7926 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7927 {
7928 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7929 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7930 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7931 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7932 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7933 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7934 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7935 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7936
7937#ifdef IEM_WITH_VEX
7938 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7939#else
7940 IEMOP_BITCH_ABOUT_STUB();
7941 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7942#endif
7943 }
7944
7945 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7946 Log(("VEX2: VEX support disabled!\n"));
7947 IEMOP_RAISE_INVALID_OPCODE_RET();
7948 }
7949
7950 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7951 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7952}
7953
7954
7955/**
7956 * @opcode 0xc6
7957 */
7958FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7959{
7960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7961 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7962 IEMOP_RAISE_INVALID_OPCODE_RET();
7963 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7964
7965 if (IEM_IS_MODRM_REG_MODE(bRm))
7966 {
7967 /* register access */
7968 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7969 IEM_MC_BEGIN(0, 0, 0, 0);
7970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7971 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7972 IEM_MC_ADVANCE_RIP_AND_FINISH();
7973 IEM_MC_END();
7974 }
7975 else
7976 {
7977 /* memory access. */
7978 IEM_MC_BEGIN(0, 1, 0, 0);
7979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7981 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7983 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7984 IEM_MC_ADVANCE_RIP_AND_FINISH();
7985 IEM_MC_END();
7986 }
7987}
7988
7989
7990/**
7991 * @opcode 0xc7
7992 */
7993FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7994{
7995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7996 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7997 IEMOP_RAISE_INVALID_OPCODE_RET();
7998 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7999
8000 if (IEM_IS_MODRM_REG_MODE(bRm))
8001 {
8002 /* register access */
8003 switch (pVCpu->iem.s.enmEffOpSize)
8004 {
8005 case IEMMODE_16BIT:
8006 IEM_MC_BEGIN(0, 0, 0, 0);
8007 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8009 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8010 IEM_MC_ADVANCE_RIP_AND_FINISH();
8011 IEM_MC_END();
8012 break;
8013
8014 case IEMMODE_32BIT:
8015 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8016 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8018 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8019 IEM_MC_ADVANCE_RIP_AND_FINISH();
8020 IEM_MC_END();
8021 break;
8022
8023 case IEMMODE_64BIT:
8024 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8025 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8027 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8028 IEM_MC_ADVANCE_RIP_AND_FINISH();
8029 IEM_MC_END();
8030 break;
8031
8032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8033 }
8034 }
8035 else
8036 {
8037 /* memory access. */
8038 switch (pVCpu->iem.s.enmEffOpSize)
8039 {
8040 case IEMMODE_16BIT:
8041 IEM_MC_BEGIN(0, 1, 0, 0);
8042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8044 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8046 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8047 IEM_MC_ADVANCE_RIP_AND_FINISH();
8048 IEM_MC_END();
8049 break;
8050
8051 case IEMMODE_32BIT:
8052 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8055 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8057 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8058 IEM_MC_ADVANCE_RIP_AND_FINISH();
8059 IEM_MC_END();
8060 break;
8061
8062 case IEMMODE_64BIT:
8063 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8066 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8068 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8069 IEM_MC_ADVANCE_RIP_AND_FINISH();
8070 IEM_MC_END();
8071 break;
8072
8073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8074 }
8075 }
8076}
8077
8078
8079
8080
8081/**
8082 * @opcode 0xc8
8083 */
8084FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8085{
8086 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8087 IEMOP_HLP_MIN_186();
8088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8089 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8090 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8092 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8093}
8094
8095
8096/**
8097 * @opcode 0xc9
8098 */
8099FNIEMOP_DEF(iemOp_leave)
8100{
8101 IEMOP_MNEMONIC(leave, "leave");
8102 IEMOP_HLP_MIN_186();
8103 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8105 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8106}
8107
8108
8109/**
8110 * @opcode 0xca
8111 */
8112FNIEMOP_DEF(iemOp_retf_Iw)
8113{
8114 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8115 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8117 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8118 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8119}
8120
8121
8122/**
8123 * @opcode 0xcb
8124 */
8125FNIEMOP_DEF(iemOp_retf)
8126{
8127 IEMOP_MNEMONIC(retf, "retf");
8128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8129 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8130 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8131}
8132
8133
8134/**
8135 * @opcode 0xcc
8136 */
8137FNIEMOP_DEF(iemOp_int3)
8138{
8139 IEMOP_MNEMONIC(int3, "int3");
8140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8141 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8142 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8143 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8144}
8145
8146
8147/**
8148 * @opcode 0xcd
8149 */
8150FNIEMOP_DEF(iemOp_int_Ib)
8151{
8152 IEMOP_MNEMONIC(int_Ib, "int Ib");
8153 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8155 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8156 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8157 iemCImpl_int, u8Int, IEMINT_INTN);
8158}
8159
8160
8161/**
8162 * @opcode 0xce
8163 */
8164FNIEMOP_DEF(iemOp_into)
8165{
8166 IEMOP_MNEMONIC(into, "into");
8167 IEMOP_HLP_NO_64BIT();
8168 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8169 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8170 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8171}
8172
8173
8174/**
8175 * @opcode 0xcf
8176 */
8177FNIEMOP_DEF(iemOp_iret)
8178{
8179 IEMOP_MNEMONIC(iret, "iret");
8180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8181 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8182 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8183 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8184}
8185
8186
8187/**
8188 * @opcode 0xd0
8189 */
8190FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8191{
8192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8193 PCIEMOPSHIFTSIZES pImpl;
8194 switch (IEM_GET_MODRM_REG_8(bRm))
8195 {
8196 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8197 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8198 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8199 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8200 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8201 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8202 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8203 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8204 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8205 }
8206 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8207
8208 if (IEM_IS_MODRM_REG_MODE(bRm))
8209 {
8210 /* register */
8211 IEM_MC_BEGIN(3, 0, 0, 0);
8212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8213 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8214 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8216 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8217 IEM_MC_REF_EFLAGS(pEFlags);
8218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8219 IEM_MC_ADVANCE_RIP_AND_FINISH();
8220 IEM_MC_END();
8221 }
8222 else
8223 {
8224 /* memory */
8225 IEM_MC_BEGIN(3, 3, 0, 0);
8226 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8227 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8228 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8230 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8231
8232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8234 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8235 IEM_MC_FETCH_EFLAGS(EFlags);
8236 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8237
8238 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8239 IEM_MC_COMMIT_EFLAGS(EFlags);
8240 IEM_MC_ADVANCE_RIP_AND_FINISH();
8241 IEM_MC_END();
8242 }
8243}
8244
8245
8246
8247/**
8248 * @opcode 0xd1
8249 */
8250FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8251{
8252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8253 PCIEMOPSHIFTSIZES pImpl;
8254 switch (IEM_GET_MODRM_REG_8(bRm))
8255 {
8256 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8257 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8258 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8259 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8260 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8261 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8262 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8263 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8264 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8265 }
8266 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8267
8268 if (IEM_IS_MODRM_REG_MODE(bRm))
8269 {
8270 /* register */
8271 switch (pVCpu->iem.s.enmEffOpSize)
8272 {
8273 case IEMMODE_16BIT:
8274 IEM_MC_BEGIN(3, 0, 0, 0);
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8276 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8277 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8278 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8279 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8280 IEM_MC_REF_EFLAGS(pEFlags);
8281 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8282 IEM_MC_ADVANCE_RIP_AND_FINISH();
8283 IEM_MC_END();
8284 break;
8285
8286 case IEMMODE_32BIT:
8287 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8289 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8290 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8291 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8292 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8293 IEM_MC_REF_EFLAGS(pEFlags);
8294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8295 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8296 IEM_MC_ADVANCE_RIP_AND_FINISH();
8297 IEM_MC_END();
8298 break;
8299
8300 case IEMMODE_64BIT:
8301 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8303 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8304 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8305 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8306 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8307 IEM_MC_REF_EFLAGS(pEFlags);
8308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8309 IEM_MC_ADVANCE_RIP_AND_FINISH();
8310 IEM_MC_END();
8311 break;
8312
8313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8314 }
8315 }
8316 else
8317 {
8318 /* memory */
8319 switch (pVCpu->iem.s.enmEffOpSize)
8320 {
8321 case IEMMODE_16BIT:
8322 IEM_MC_BEGIN(3, 3, 0, 0);
8323 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8324 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8325 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8327 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8328
8329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8331 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8332 IEM_MC_FETCH_EFLAGS(EFlags);
8333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8334
8335 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8336 IEM_MC_COMMIT_EFLAGS(EFlags);
8337 IEM_MC_ADVANCE_RIP_AND_FINISH();
8338 IEM_MC_END();
8339 break;
8340
8341 case IEMMODE_32BIT:
8342 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8343 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8344 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8345 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8347 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8348
8349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8351 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8352 IEM_MC_FETCH_EFLAGS(EFlags);
8353 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8354
8355 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8356 IEM_MC_COMMIT_EFLAGS(EFlags);
8357 IEM_MC_ADVANCE_RIP_AND_FINISH();
8358 IEM_MC_END();
8359 break;
8360
8361 case IEMMODE_64BIT:
8362 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8363 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8364 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8365 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8367 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8368
8369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8371 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8372 IEM_MC_FETCH_EFLAGS(EFlags);
8373 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8374
8375 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8376 IEM_MC_COMMIT_EFLAGS(EFlags);
8377 IEM_MC_ADVANCE_RIP_AND_FINISH();
8378 IEM_MC_END();
8379 break;
8380
8381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8382 }
8383 }
8384}
8385
8386
8387/**
8388 * @opcode 0xd2
8389 */
8390FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8391{
8392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8393 PCIEMOPSHIFTSIZES pImpl;
8394 switch (IEM_GET_MODRM_REG_8(bRm))
8395 {
8396 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8397 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8398 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8399 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8400 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8401 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8402 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8403 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8404 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8405 }
8406 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8407
8408 if (IEM_IS_MODRM_REG_MODE(bRm))
8409 {
8410 /* register */
8411 IEM_MC_BEGIN(3, 0, 0, 0);
8412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8413 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8414 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8416 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8417 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8418 IEM_MC_REF_EFLAGS(pEFlags);
8419 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8420 IEM_MC_ADVANCE_RIP_AND_FINISH();
8421 IEM_MC_END();
8422 }
8423 else
8424 {
8425 /* memory */
8426 IEM_MC_BEGIN(3, 3, 0, 0);
8427 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8428 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8429 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8431 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8432
8433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8435 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8436 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8437 IEM_MC_FETCH_EFLAGS(EFlags);
8438 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8439
8440 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8441 IEM_MC_COMMIT_EFLAGS(EFlags);
8442 IEM_MC_ADVANCE_RIP_AND_FINISH();
8443 IEM_MC_END();
8444 }
8445}
8446
8447
8448/**
8449 * @opcode 0xd3
8450 */
8451FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8452{
8453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8454 PCIEMOPSHIFTSIZES pImpl;
8455 switch (IEM_GET_MODRM_REG_8(bRm))
8456 {
8457 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8458 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8459 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8460 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8461 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8462 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8463 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8464 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8465 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8466 }
8467 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8468
8469 if (IEM_IS_MODRM_REG_MODE(bRm))
8470 {
8471 /* register */
8472 switch (pVCpu->iem.s.enmEffOpSize)
8473 {
8474 case IEMMODE_16BIT:
8475 IEM_MC_BEGIN(3, 0, 0, 0);
8476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8477 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8478 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8479 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8480 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8481 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8482 IEM_MC_REF_EFLAGS(pEFlags);
8483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8484 IEM_MC_ADVANCE_RIP_AND_FINISH();
8485 IEM_MC_END();
8486 break;
8487
8488 case IEMMODE_32BIT:
8489 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8491 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8492 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8493 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8494 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8495 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8496 IEM_MC_REF_EFLAGS(pEFlags);
8497 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8498 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8499 IEM_MC_ADVANCE_RIP_AND_FINISH();
8500 IEM_MC_END();
8501 break;
8502
8503 case IEMMODE_64BIT:
8504 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8507 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8508 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8509 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8510 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8511 IEM_MC_REF_EFLAGS(pEFlags);
8512 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8513 IEM_MC_ADVANCE_RIP_AND_FINISH();
8514 IEM_MC_END();
8515 break;
8516
8517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8518 }
8519 }
8520 else
8521 {
8522 /* memory */
8523 switch (pVCpu->iem.s.enmEffOpSize)
8524 {
8525 case IEMMODE_16BIT:
8526 IEM_MC_BEGIN(3, 3, 0, 0);
8527 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8528 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8529 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8531 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8532
8533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8535 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8536 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8537 IEM_MC_FETCH_EFLAGS(EFlags);
8538 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8539
8540 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8541 IEM_MC_COMMIT_EFLAGS(EFlags);
8542 IEM_MC_ADVANCE_RIP_AND_FINISH();
8543 IEM_MC_END();
8544 break;
8545
8546 case IEMMODE_32BIT:
8547 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8548 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8549 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8550 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8552 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8553
8554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8556 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8557 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8558 IEM_MC_FETCH_EFLAGS(EFlags);
8559 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8560
8561 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8562 IEM_MC_COMMIT_EFLAGS(EFlags);
8563 IEM_MC_ADVANCE_RIP_AND_FINISH();
8564 IEM_MC_END();
8565 break;
8566
8567 case IEMMODE_64BIT:
8568 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8569 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8570 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8571 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8573 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8574
8575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8577 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8578 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8579 IEM_MC_FETCH_EFLAGS(EFlags);
8580 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8581
8582 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8583 IEM_MC_COMMIT_EFLAGS(EFlags);
8584 IEM_MC_ADVANCE_RIP_AND_FINISH();
8585 IEM_MC_END();
8586 break;
8587
8588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8589 }
8590 }
8591}
8592
8593/**
8594 * @opcode 0xd4
8595 */
8596FNIEMOP_DEF(iemOp_aam_Ib)
8597{
8598 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8599 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8601 IEMOP_HLP_NO_64BIT();
8602 if (!bImm)
8603 IEMOP_RAISE_DIVIDE_ERROR_RET();
8604 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8605}
8606
8607
8608/**
8609 * @opcode 0xd5
8610 */
8611FNIEMOP_DEF(iemOp_aad_Ib)
8612{
8613 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8614 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8616 IEMOP_HLP_NO_64BIT();
8617 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8618}
8619
8620
8621/**
8622 * @opcode 0xd6
8623 */
8624FNIEMOP_DEF(iemOp_salc)
8625{
8626 IEMOP_MNEMONIC(salc, "salc");
8627 IEMOP_HLP_NO_64BIT();
8628
8629 IEM_MC_BEGIN(0, 0, 0, 0);
8630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8631 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8632 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8633 } IEM_MC_ELSE() {
8634 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8635 } IEM_MC_ENDIF();
8636 IEM_MC_ADVANCE_RIP_AND_FINISH();
8637 IEM_MC_END();
8638}
8639
8640
8641/**
8642 * @opcode 0xd7
8643 */
8644FNIEMOP_DEF(iemOp_xlat)
8645{
8646 IEMOP_MNEMONIC(xlat, "xlat");
8647 switch (pVCpu->iem.s.enmEffAddrMode)
8648 {
8649 case IEMMODE_16BIT:
8650 IEM_MC_BEGIN(2, 0, 0, 0);
8651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8652 IEM_MC_LOCAL(uint8_t, u8Tmp);
8653 IEM_MC_LOCAL(uint16_t, u16Addr);
8654 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8655 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8656 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8657 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8658 IEM_MC_ADVANCE_RIP_AND_FINISH();
8659 IEM_MC_END();
8660 break;
8661
8662 case IEMMODE_32BIT:
8663 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
8664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8665 IEM_MC_LOCAL(uint8_t, u8Tmp);
8666 IEM_MC_LOCAL(uint32_t, u32Addr);
8667 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8668 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8669 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8670 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8671 IEM_MC_ADVANCE_RIP_AND_FINISH();
8672 IEM_MC_END();
8673 break;
8674
8675 case IEMMODE_64BIT:
8676 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
8677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8678 IEM_MC_LOCAL(uint8_t, u8Tmp);
8679 IEM_MC_LOCAL(uint64_t, u64Addr);
8680 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8681 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8682 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8683 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8684 IEM_MC_ADVANCE_RIP_AND_FINISH();
8685 IEM_MC_END();
8686 break;
8687
8688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8689 }
8690}
8691
8692
8693/**
8694 * Common worker for FPU instructions working on ST0 and STn, and storing the
8695 * result in ST0.
8696 *
8697 * @param bRm Mod R/M byte.
8698 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8699 */
8700FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8701{
8702 IEM_MC_BEGIN(3, 1, 0, 0);
8703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8704 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8705 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8706 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8707 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8708
8709 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8710 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8711 IEM_MC_PREPARE_FPU_USAGE();
8712 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8713 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8714 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8715 } IEM_MC_ELSE() {
8716 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8717 } IEM_MC_ENDIF();
8718 IEM_MC_ADVANCE_RIP_AND_FINISH();
8719
8720 IEM_MC_END();
8721}
8722
8723
8724/**
8725 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8726 * flags.
8727 *
8728 * @param bRm Mod R/M byte.
8729 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8730 */
8731FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8732{
8733 IEM_MC_BEGIN(3, 1, 0, 0);
8734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8735 IEM_MC_LOCAL(uint16_t, u16Fsw);
8736 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8737 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8738 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8739
8740 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8741 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8742 IEM_MC_PREPARE_FPU_USAGE();
8743 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8744 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8745 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8746 } IEM_MC_ELSE() {
8747 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8748 } IEM_MC_ENDIF();
8749 IEM_MC_ADVANCE_RIP_AND_FINISH();
8750
8751 IEM_MC_END();
8752}
8753
8754
8755/**
8756 * Common worker for FPU instructions working on ST0 and STn, only affecting
8757 * flags, and popping when done.
8758 *
8759 * @param bRm Mod R/M byte.
8760 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8761 */
8762FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8763{
8764 IEM_MC_BEGIN(3, 1, 0, 0);
8765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8766 IEM_MC_LOCAL(uint16_t, u16Fsw);
8767 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8768 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8769 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8770
8771 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8772 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8773 IEM_MC_PREPARE_FPU_USAGE();
8774 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8775 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8776 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8777 } IEM_MC_ELSE() {
8778 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8779 } IEM_MC_ENDIF();
8780 IEM_MC_ADVANCE_RIP_AND_FINISH();
8781
8782 IEM_MC_END();
8783}
8784
8785
8786/** Opcode 0xd8 11/0. */
8787FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8788{
8789 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8790 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8791}
8792
8793
8794/** Opcode 0xd8 11/1. */
8795FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8796{
8797 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8798 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8799}
8800
8801
8802/** Opcode 0xd8 11/2. */
8803FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8804{
8805 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8806 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8807}
8808
8809
8810/** Opcode 0xd8 11/3. */
8811FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8812{
8813 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8814 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8815}
8816
8817
8818/** Opcode 0xd8 11/4. */
8819FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8820{
8821 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8822 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8823}
8824
8825
8826/** Opcode 0xd8 11/5. */
8827FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8828{
8829 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8830 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8831}
8832
8833
8834/** Opcode 0xd8 11/6. */
8835FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8836{
8837 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8838 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8839}
8840
8841
8842/** Opcode 0xd8 11/7. */
8843FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8844{
8845 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8846 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8847}
8848
8849
8850/**
8851 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8852 * the result in ST0.
8853 *
8854 * @param bRm Mod R/M byte.
8855 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8856 */
8857FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8858{
8859 IEM_MC_BEGIN(3, 3, 0, 0);
8860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8861 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8862 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8863 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8864 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8865 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8866
8867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8869
8870 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8871 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8872 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8873
8874 IEM_MC_PREPARE_FPU_USAGE();
8875 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8876 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8877 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8878 } IEM_MC_ELSE() {
8879 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8880 } IEM_MC_ENDIF();
8881 IEM_MC_ADVANCE_RIP_AND_FINISH();
8882
8883 IEM_MC_END();
8884}
8885
8886
8887/** Opcode 0xd8 !11/0. */
8888FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8889{
8890 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8891 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8892}
8893
8894
8895/** Opcode 0xd8 !11/1. */
8896FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8897{
8898 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8899 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8900}
8901
8902
8903/** Opcode 0xd8 !11/2. */
8904FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8905{
8906 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8907
8908 IEM_MC_BEGIN(3, 3, 0, 0);
8909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8910 IEM_MC_LOCAL(uint16_t, u16Fsw);
8911 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8912 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8913 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8914 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8915
8916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8918
8919 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8920 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8921 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8922
8923 IEM_MC_PREPARE_FPU_USAGE();
8924 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8925 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8926 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8927 } IEM_MC_ELSE() {
8928 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8929 } IEM_MC_ENDIF();
8930 IEM_MC_ADVANCE_RIP_AND_FINISH();
8931
8932 IEM_MC_END();
8933}
8934
8935
8936/** Opcode 0xd8 !11/3. */
8937FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8938{
8939 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8940
8941 IEM_MC_BEGIN(3, 3, 0, 0);
8942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8943 IEM_MC_LOCAL(uint16_t, u16Fsw);
8944 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8945 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8946 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8947 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8948
8949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8951
8952 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8953 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8954 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8955
8956 IEM_MC_PREPARE_FPU_USAGE();
8957 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8958 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8959 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8960 } IEM_MC_ELSE() {
8961 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8962 } IEM_MC_ENDIF();
8963 IEM_MC_ADVANCE_RIP_AND_FINISH();
8964
8965 IEM_MC_END();
8966}
8967
8968
8969/** Opcode 0xd8 !11/4. */
8970FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8971{
8972 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8973 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8974}
8975
8976
8977/** Opcode 0xd8 !11/5. */
8978FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8979{
8980 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8981 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8982}
8983
8984
8985/** Opcode 0xd8 !11/6. */
8986FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8987{
8988 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8989 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8990}
8991
8992
8993/** Opcode 0xd8 !11/7. */
8994FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8995{
8996 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8997 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8998}
8999
9000
9001/**
9002 * @opcode 0xd8
9003 */
9004FNIEMOP_DEF(iemOp_EscF0)
9005{
9006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9007 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9008
9009 if (IEM_IS_MODRM_REG_MODE(bRm))
9010 {
9011 switch (IEM_GET_MODRM_REG_8(bRm))
9012 {
9013 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9014 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9015 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9016 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9017 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9018 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9019 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9020 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9022 }
9023 }
9024 else
9025 {
9026 switch (IEM_GET_MODRM_REG_8(bRm))
9027 {
9028 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9029 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9030 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9031 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9032 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9033 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9034 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9035 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9037 }
9038 }
9039}
9040
9041
9042/** Opcode 0xd9 /0 mem32real
9043 * @sa iemOp_fld_m64r */
9044FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9045{
9046 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9047
9048 IEM_MC_BEGIN(2, 3, 0, 0);
9049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9050 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9051 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9052 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9053 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9054
9055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9057
9058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9060 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9061 IEM_MC_PREPARE_FPU_USAGE();
9062 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9063 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9064 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9065 } IEM_MC_ELSE() {
9066 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9067 } IEM_MC_ENDIF();
9068 IEM_MC_ADVANCE_RIP_AND_FINISH();
9069
9070 IEM_MC_END();
9071}
9072
9073
9074/** Opcode 0xd9 !11/2 mem32real */
9075FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9076{
9077 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9078 IEM_MC_BEGIN(3, 2, 0, 0);
9079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9080 IEM_MC_LOCAL(uint16_t, u16Fsw);
9081 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9082 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9083 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9084
9085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9089
9090 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9091 IEM_MC_PREPARE_FPU_USAGE();
9092 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9093 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9094 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9095 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9096 } IEM_MC_ELSE() {
9097 IEM_MC_IF_FCW_IM() {
9098 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9099 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9100 } IEM_MC_ENDIF();
9101 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9102 } IEM_MC_ENDIF();
9103 IEM_MC_ADVANCE_RIP_AND_FINISH();
9104
9105 IEM_MC_END();
9106}
9107
9108
9109/** Opcode 0xd9 !11/3 */
9110FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9111{
9112 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9113 IEM_MC_BEGIN(3, 2, 0, 0);
9114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9115 IEM_MC_LOCAL(uint16_t, u16Fsw);
9116 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9117 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9118 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9119
9120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9122 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9123 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9124
9125 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9126 IEM_MC_PREPARE_FPU_USAGE();
9127 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9128 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9129 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9130 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9131 } IEM_MC_ELSE() {
9132 IEM_MC_IF_FCW_IM() {
9133 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9134 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9135 } IEM_MC_ENDIF();
9136 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9137 } IEM_MC_ENDIF();
9138 IEM_MC_ADVANCE_RIP_AND_FINISH();
9139
9140 IEM_MC_END();
9141}
9142
9143
9144/** Opcode 0xd9 !11/4 */
9145FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9146{
9147 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9148 IEM_MC_BEGIN(3, 0, 0, 0);
9149 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9150 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9151 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9154 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9155 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9156 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9157 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9158 IEM_MC_END();
9159}
9160
9161
9162/** Opcode 0xd9 !11/5 */
9163FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9164{
9165 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9166 IEM_MC_BEGIN(1, 1, 0, 0);
9167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9168 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9171 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9172 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9173 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9174 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9175 IEM_MC_END();
9176}
9177
9178
9179/** Opcode 0xd9 !11/6 */
9180FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9181{
9182 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9183 IEM_MC_BEGIN(3, 0, 0, 0);
9184 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9185 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9186 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9190 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9191 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9192 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9193 IEM_MC_END();
9194}
9195
9196
9197/** Opcode 0xd9 !11/7 */
9198FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9199{
9200 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9201 IEM_MC_BEGIN(2, 0, 0, 0);
9202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9203 IEM_MC_LOCAL(uint16_t, u16Fcw);
9204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9207 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9208 IEM_MC_FETCH_FCW(u16Fcw);
9209 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9210 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9211 IEM_MC_END();
9212}
9213
9214
9215/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9216FNIEMOP_DEF(iemOp_fnop)
9217{
9218 IEMOP_MNEMONIC(fnop, "fnop");
9219 IEM_MC_BEGIN(0, 0, 0, 0);
9220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9221 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9222 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9223 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9224 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9225 * intel optimizations. Investigate. */
9226 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9227 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9228 IEM_MC_END();
9229}
9230
9231
9232/** Opcode 0xd9 11/0 stN */
9233FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9234{
9235 IEMOP_MNEMONIC(fld_stN, "fld stN");
9236 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9237 * indicates that it does. */
9238 IEM_MC_BEGIN(0, 2, 0, 0);
9239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9240 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9241 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9244
9245 IEM_MC_PREPARE_FPU_USAGE();
9246 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9247 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9248 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9249 } IEM_MC_ELSE() {
9250 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9251 } IEM_MC_ENDIF();
9252
9253 IEM_MC_ADVANCE_RIP_AND_FINISH();
9254 IEM_MC_END();
9255}
9256
9257
9258/** Opcode 0xd9 11/3 stN */
9259FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9260{
9261 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9262 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9263 * indicates that it does. */
9264 IEM_MC_BEGIN(2, 3, 0, 0);
9265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9266 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9267 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9268 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9269 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9270 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9272 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9273
9274 IEM_MC_PREPARE_FPU_USAGE();
9275 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9276 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9277 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9278 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9279 } IEM_MC_ELSE() {
9280 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9281 } IEM_MC_ENDIF();
9282
9283 IEM_MC_ADVANCE_RIP_AND_FINISH();
9284 IEM_MC_END();
9285}
9286
9287
9288/** Opcode 0xd9 11/4, 0xdd 11/2. */
9289FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9290{
9291 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9292
9293 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9294 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9295 if (!iDstReg)
9296 {
9297 IEM_MC_BEGIN(0, 1, 0, 0);
9298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9299 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9300 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9301 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9302
9303 IEM_MC_PREPARE_FPU_USAGE();
9304 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9305 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9306 } IEM_MC_ELSE() {
9307 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9308 } IEM_MC_ENDIF();
9309
9310 IEM_MC_ADVANCE_RIP_AND_FINISH();
9311 IEM_MC_END();
9312 }
9313 else
9314 {
9315 IEM_MC_BEGIN(0, 2, 0, 0);
9316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9317 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9318 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9320 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9321
9322 IEM_MC_PREPARE_FPU_USAGE();
9323 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9324 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9325 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9326 } IEM_MC_ELSE() {
9327 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9328 } IEM_MC_ENDIF();
9329
9330 IEM_MC_ADVANCE_RIP_AND_FINISH();
9331 IEM_MC_END();
9332 }
9333}
9334
9335
9336/**
9337 * Common worker for FPU instructions working on ST0 and replaces it with the
9338 * result, i.e. unary operators.
9339 *
9340 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9341 */
9342FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9343{
9344 IEM_MC_BEGIN(2, 1, 0, 0);
9345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9346 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9347 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9348 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9349
9350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9351 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9352 IEM_MC_PREPARE_FPU_USAGE();
9353 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9354 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9355 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9356 } IEM_MC_ELSE() {
9357 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9358 } IEM_MC_ENDIF();
9359 IEM_MC_ADVANCE_RIP_AND_FINISH();
9360
9361 IEM_MC_END();
9362}
9363
9364
9365/** Opcode 0xd9 0xe0. */
9366FNIEMOP_DEF(iemOp_fchs)
9367{
9368 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9369 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9370}
9371
9372
9373/** Opcode 0xd9 0xe1. */
9374FNIEMOP_DEF(iemOp_fabs)
9375{
9376 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9377 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9378}
9379
9380
9381/** Opcode 0xd9 0xe4. */
9382FNIEMOP_DEF(iemOp_ftst)
9383{
9384 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9385 IEM_MC_BEGIN(2, 1, 0, 0);
9386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9387 IEM_MC_LOCAL(uint16_t, u16Fsw);
9388 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9389 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9390
9391 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9392 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9393 IEM_MC_PREPARE_FPU_USAGE();
9394 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9395 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9396 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9397 } IEM_MC_ELSE() {
9398 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9399 } IEM_MC_ENDIF();
9400 IEM_MC_ADVANCE_RIP_AND_FINISH();
9401
9402 IEM_MC_END();
9403}
9404
9405
9406/** Opcode 0xd9 0xe5. */
9407FNIEMOP_DEF(iemOp_fxam)
9408{
9409 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9410 IEM_MC_BEGIN(2, 1, 0, 0);
9411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9412 IEM_MC_LOCAL(uint16_t, u16Fsw);
9413 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9414 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9415
9416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9418 IEM_MC_PREPARE_FPU_USAGE();
9419 IEM_MC_REF_FPUREG(pr80Value, 0);
9420 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9421 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9422 IEM_MC_ADVANCE_RIP_AND_FINISH();
9423
9424 IEM_MC_END();
9425}
9426
9427
9428/**
9429 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9430 *
9431 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9432 */
9433FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9434{
9435 IEM_MC_BEGIN(1, 1, 0, 0);
9436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9437 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9438 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9439
9440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9441 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9442 IEM_MC_PREPARE_FPU_USAGE();
9443 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9444 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9445 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9446 } IEM_MC_ELSE() {
9447 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9448 } IEM_MC_ENDIF();
9449 IEM_MC_ADVANCE_RIP_AND_FINISH();
9450
9451 IEM_MC_END();
9452}
9453
9454
9455/** Opcode 0xd9 0xe8. */
9456FNIEMOP_DEF(iemOp_fld1)
9457{
9458 IEMOP_MNEMONIC(fld1, "fld1");
9459 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9460}
9461
9462
9463/** Opcode 0xd9 0xe9. */
9464FNIEMOP_DEF(iemOp_fldl2t)
9465{
9466 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9467 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9468}
9469
9470
9471/** Opcode 0xd9 0xea. */
9472FNIEMOP_DEF(iemOp_fldl2e)
9473{
9474 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9475 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9476}
9477
9478/** Opcode 0xd9 0xeb. */
9479FNIEMOP_DEF(iemOp_fldpi)
9480{
9481 IEMOP_MNEMONIC(fldpi, "fldpi");
9482 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9483}
9484
9485
9486/** Opcode 0xd9 0xec. */
9487FNIEMOP_DEF(iemOp_fldlg2)
9488{
9489 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9490 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9491}
9492
9493/** Opcode 0xd9 0xed. */
9494FNIEMOP_DEF(iemOp_fldln2)
9495{
9496 IEMOP_MNEMONIC(fldln2, "fldln2");
9497 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9498}
9499
9500
9501/** Opcode 0xd9 0xee. */
9502FNIEMOP_DEF(iemOp_fldz)
9503{
9504 IEMOP_MNEMONIC(fldz, "fldz");
9505 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9506}
9507
9508
9509/** Opcode 0xd9 0xf0.
9510 *
9511 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9512 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9513 * to produce proper results for +Inf and -Inf.
9514 *
9515 * This is probably usful in the implementation pow() and similar.
9516 */
9517FNIEMOP_DEF(iemOp_f2xm1)
9518{
9519 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9520 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9521}
9522
9523
9524/**
9525 * Common worker for FPU instructions working on STn and ST0, storing the result
9526 * in STn, and popping the stack unless IE, DE or ZE was raised.
9527 *
9528 * @param bRm Mod R/M byte.
9529 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9530 */
9531FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9532{
9533 IEM_MC_BEGIN(3, 1, 0, 0);
9534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9535 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9536 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9537 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9538 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9539
9540 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9541 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9542
9543 IEM_MC_PREPARE_FPU_USAGE();
9544 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9545 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9546 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9547 } IEM_MC_ELSE() {
9548 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9549 } IEM_MC_ENDIF();
9550 IEM_MC_ADVANCE_RIP_AND_FINISH();
9551
9552 IEM_MC_END();
9553}
9554
9555
9556/** Opcode 0xd9 0xf1. */
9557FNIEMOP_DEF(iemOp_fyl2x)
9558{
9559 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9560 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9561}
9562
9563
9564/**
9565 * Common worker for FPU instructions working on ST0 and having two outputs, one
9566 * replacing ST0 and one pushed onto the stack.
9567 *
9568 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9569 */
9570FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9571{
9572 IEM_MC_BEGIN(2, 1, 0, 0);
9573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9574 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9575 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9576 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9577
9578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9579 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9580 IEM_MC_PREPARE_FPU_USAGE();
9581 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9582 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9583 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9584 } IEM_MC_ELSE() {
9585 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9586 } IEM_MC_ENDIF();
9587 IEM_MC_ADVANCE_RIP_AND_FINISH();
9588
9589 IEM_MC_END();
9590}
9591
9592
9593/** Opcode 0xd9 0xf2. */
9594FNIEMOP_DEF(iemOp_fptan)
9595{
9596 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9597 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9598}
9599
9600
9601/** Opcode 0xd9 0xf3. */
9602FNIEMOP_DEF(iemOp_fpatan)
9603{
9604 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9605 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9606}
9607
9608
9609/** Opcode 0xd9 0xf4. */
9610FNIEMOP_DEF(iemOp_fxtract)
9611{
9612 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9613 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9614}
9615
9616
9617/** Opcode 0xd9 0xf5. */
9618FNIEMOP_DEF(iemOp_fprem1)
9619{
9620 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9621 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9622}
9623
9624
9625/** Opcode 0xd9 0xf6. */
9626FNIEMOP_DEF(iemOp_fdecstp)
9627{
9628 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9629 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9630 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9631 * FINCSTP and FDECSTP. */
9632 IEM_MC_BEGIN(0, 0, 0, 0);
9633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9634
9635 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9636 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9637
9638 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9639 IEM_MC_FPU_STACK_DEC_TOP();
9640 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9641
9642 IEM_MC_ADVANCE_RIP_AND_FINISH();
9643 IEM_MC_END();
9644}
9645
9646
9647/** Opcode 0xd9 0xf7. */
9648FNIEMOP_DEF(iemOp_fincstp)
9649{
9650 IEMOP_MNEMONIC(fincstp, "fincstp");
9651 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9652 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9653 * FINCSTP and FDECSTP. */
9654 IEM_MC_BEGIN(0, 0, 0, 0);
9655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9656
9657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9659
9660 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9661 IEM_MC_FPU_STACK_INC_TOP();
9662 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9663
9664 IEM_MC_ADVANCE_RIP_AND_FINISH();
9665 IEM_MC_END();
9666}
9667
9668
9669/** Opcode 0xd9 0xf8. */
9670FNIEMOP_DEF(iemOp_fprem)
9671{
9672 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9673 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9674}
9675
9676
9677/** Opcode 0xd9 0xf9. */
9678FNIEMOP_DEF(iemOp_fyl2xp1)
9679{
9680 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9681 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9682}
9683
9684
9685/** Opcode 0xd9 0xfa. */
9686FNIEMOP_DEF(iemOp_fsqrt)
9687{
9688 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9689 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9690}
9691
9692
9693/** Opcode 0xd9 0xfb. */
9694FNIEMOP_DEF(iemOp_fsincos)
9695{
9696 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9697 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9698}
9699
9700
9701/** Opcode 0xd9 0xfc. */
9702FNIEMOP_DEF(iemOp_frndint)
9703{
9704 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9705 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9706}
9707
9708
9709/** Opcode 0xd9 0xfd. */
9710FNIEMOP_DEF(iemOp_fscale)
9711{
9712 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9713 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9714}
9715
9716
9717/** Opcode 0xd9 0xfe. */
9718FNIEMOP_DEF(iemOp_fsin)
9719{
9720 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9721 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9722}
9723
9724
9725/** Opcode 0xd9 0xff. */
9726FNIEMOP_DEF(iemOp_fcos)
9727{
9728 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9729 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9730}
9731
9732
9733/** Used by iemOp_EscF1. */
9734IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9735{
9736 /* 0xe0 */ iemOp_fchs,
9737 /* 0xe1 */ iemOp_fabs,
9738 /* 0xe2 */ iemOp_Invalid,
9739 /* 0xe3 */ iemOp_Invalid,
9740 /* 0xe4 */ iemOp_ftst,
9741 /* 0xe5 */ iemOp_fxam,
9742 /* 0xe6 */ iemOp_Invalid,
9743 /* 0xe7 */ iemOp_Invalid,
9744 /* 0xe8 */ iemOp_fld1,
9745 /* 0xe9 */ iemOp_fldl2t,
9746 /* 0xea */ iemOp_fldl2e,
9747 /* 0xeb */ iemOp_fldpi,
9748 /* 0xec */ iemOp_fldlg2,
9749 /* 0xed */ iemOp_fldln2,
9750 /* 0xee */ iemOp_fldz,
9751 /* 0xef */ iemOp_Invalid,
9752 /* 0xf0 */ iemOp_f2xm1,
9753 /* 0xf1 */ iemOp_fyl2x,
9754 /* 0xf2 */ iemOp_fptan,
9755 /* 0xf3 */ iemOp_fpatan,
9756 /* 0xf4 */ iemOp_fxtract,
9757 /* 0xf5 */ iemOp_fprem1,
9758 /* 0xf6 */ iemOp_fdecstp,
9759 /* 0xf7 */ iemOp_fincstp,
9760 /* 0xf8 */ iemOp_fprem,
9761 /* 0xf9 */ iemOp_fyl2xp1,
9762 /* 0xfa */ iemOp_fsqrt,
9763 /* 0xfb */ iemOp_fsincos,
9764 /* 0xfc */ iemOp_frndint,
9765 /* 0xfd */ iemOp_fscale,
9766 /* 0xfe */ iemOp_fsin,
9767 /* 0xff */ iemOp_fcos
9768};
9769
9770
9771/**
9772 * @opcode 0xd9
9773 */
9774FNIEMOP_DEF(iemOp_EscF1)
9775{
9776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9777 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9778
9779 if (IEM_IS_MODRM_REG_MODE(bRm))
9780 {
9781 switch (IEM_GET_MODRM_REG_8(bRm))
9782 {
9783 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9784 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9785 case 2:
9786 if (bRm == 0xd0)
9787 return FNIEMOP_CALL(iemOp_fnop);
9788 IEMOP_RAISE_INVALID_OPCODE_RET();
9789 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9790 case 4:
9791 case 5:
9792 case 6:
9793 case 7:
9794 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9795 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9797 }
9798 }
9799 else
9800 {
9801 switch (IEM_GET_MODRM_REG_8(bRm))
9802 {
9803 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9804 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9805 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9806 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9807 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9808 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9809 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9810 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9811 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9812 }
9813 }
9814}
9815
9816
9817/** Opcode 0xda 11/0. */
9818FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9819{
9820 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9821 IEM_MC_BEGIN(0, 1, 0, 0);
9822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9823 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9824
9825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9827
9828 IEM_MC_PREPARE_FPU_USAGE();
9829 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9831 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9832 } IEM_MC_ENDIF();
9833 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9834 } IEM_MC_ELSE() {
9835 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9836 } IEM_MC_ENDIF();
9837 IEM_MC_ADVANCE_RIP_AND_FINISH();
9838
9839 IEM_MC_END();
9840}
9841
9842
9843/** Opcode 0xda 11/1. */
9844FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9845{
9846 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9847 IEM_MC_BEGIN(0, 1, 0, 0);
9848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9849 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9850
9851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9853
9854 IEM_MC_PREPARE_FPU_USAGE();
9855 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9857 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9858 } IEM_MC_ENDIF();
9859 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9860 } IEM_MC_ELSE() {
9861 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9862 } IEM_MC_ENDIF();
9863 IEM_MC_ADVANCE_RIP_AND_FINISH();
9864
9865 IEM_MC_END();
9866}
9867
9868
9869/** Opcode 0xda 11/2. */
9870FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9871{
9872 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9873 IEM_MC_BEGIN(0, 1, 0, 0);
9874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9875 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9876
9877 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9878 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9879
9880 IEM_MC_PREPARE_FPU_USAGE();
9881 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9882 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9883 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9884 } IEM_MC_ENDIF();
9885 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9886 } IEM_MC_ELSE() {
9887 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9888 } IEM_MC_ENDIF();
9889 IEM_MC_ADVANCE_RIP_AND_FINISH();
9890
9891 IEM_MC_END();
9892}
9893
9894
9895/** Opcode 0xda 11/3. */
9896FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9897{
9898 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9899 IEM_MC_BEGIN(0, 1, 0, 0);
9900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9901 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9902
9903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9904 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9905
9906 IEM_MC_PREPARE_FPU_USAGE();
9907 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9909 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9910 } IEM_MC_ENDIF();
9911 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9912 } IEM_MC_ELSE() {
9913 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9914 } IEM_MC_ENDIF();
9915 IEM_MC_ADVANCE_RIP_AND_FINISH();
9916
9917 IEM_MC_END();
9918}
9919
9920
9921/**
9922 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9923 * flags, and popping twice when done.
9924 *
9925 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9926 */
9927FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9928{
9929 IEM_MC_BEGIN(3, 1, 0, 0);
9930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9931 IEM_MC_LOCAL(uint16_t, u16Fsw);
9932 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9933 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9934 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9935
9936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9938
9939 IEM_MC_PREPARE_FPU_USAGE();
9940 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9941 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9942 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9943 } IEM_MC_ELSE() {
9944 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9945 } IEM_MC_ENDIF();
9946 IEM_MC_ADVANCE_RIP_AND_FINISH();
9947
9948 IEM_MC_END();
9949}
9950
9951
9952/** Opcode 0xda 0xe9. */
9953FNIEMOP_DEF(iemOp_fucompp)
9954{
9955 IEMOP_MNEMONIC(fucompp, "fucompp");
9956 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9957}
9958
9959
9960/**
9961 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9962 * the result in ST0.
9963 *
9964 * @param bRm Mod R/M byte.
9965 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9966 */
9967FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9968{
9969 IEM_MC_BEGIN(3, 3, 0, 0);
9970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9971 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9972 IEM_MC_LOCAL(int32_t, i32Val2);
9973 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9974 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9975 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9976
9977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9979
9980 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9981 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9982 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9983
9984 IEM_MC_PREPARE_FPU_USAGE();
9985 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9986 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9987 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9988 } IEM_MC_ELSE() {
9989 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9990 } IEM_MC_ENDIF();
9991 IEM_MC_ADVANCE_RIP_AND_FINISH();
9992
9993 IEM_MC_END();
9994}
9995
9996
9997/** Opcode 0xda !11/0. */
9998FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9999{
10000 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10001 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10002}
10003
10004
10005/** Opcode 0xda !11/1. */
10006FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10007{
10008 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10009 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10010}
10011
10012
10013/** Opcode 0xda !11/2. */
10014FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10015{
10016 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10017
10018 IEM_MC_BEGIN(3, 3, 0, 0);
10019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10020 IEM_MC_LOCAL(uint16_t, u16Fsw);
10021 IEM_MC_LOCAL(int32_t, i32Val2);
10022 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10023 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10024 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10025
10026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10028
10029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10031 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10032
10033 IEM_MC_PREPARE_FPU_USAGE();
10034 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10035 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10036 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10037 } IEM_MC_ELSE() {
10038 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10039 } IEM_MC_ENDIF();
10040 IEM_MC_ADVANCE_RIP_AND_FINISH();
10041
10042 IEM_MC_END();
10043}
10044
10045
10046/** Opcode 0xda !11/3. */
10047FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10048{
10049 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10050
10051 IEM_MC_BEGIN(3, 3, 0, 0);
10052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10053 IEM_MC_LOCAL(uint16_t, u16Fsw);
10054 IEM_MC_LOCAL(int32_t, i32Val2);
10055 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10056 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10057 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10058
10059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10061
10062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10064 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10065
10066 IEM_MC_PREPARE_FPU_USAGE();
10067 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10068 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10069 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10070 } IEM_MC_ELSE() {
10071 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10072 } IEM_MC_ENDIF();
10073 IEM_MC_ADVANCE_RIP_AND_FINISH();
10074
10075 IEM_MC_END();
10076}
10077
10078
10079/** Opcode 0xda !11/4. */
10080FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10081{
10082 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10083 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10084}
10085
10086
10087/** Opcode 0xda !11/5. */
10088FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10089{
10090 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10091 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10092}
10093
10094
10095/** Opcode 0xda !11/6. */
10096FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10097{
10098 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10099 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10100}
10101
10102
10103/** Opcode 0xda !11/7. */
10104FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10105{
10106 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10107 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10108}
10109
10110
10111/**
10112 * @opcode 0xda
10113 */
10114FNIEMOP_DEF(iemOp_EscF2)
10115{
10116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10117 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10118 if (IEM_IS_MODRM_REG_MODE(bRm))
10119 {
10120 switch (IEM_GET_MODRM_REG_8(bRm))
10121 {
10122 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10123 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10124 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10125 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10126 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10127 case 5:
10128 if (bRm == 0xe9)
10129 return FNIEMOP_CALL(iemOp_fucompp);
10130 IEMOP_RAISE_INVALID_OPCODE_RET();
10131 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10132 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10134 }
10135 }
10136 else
10137 {
10138 switch (IEM_GET_MODRM_REG_8(bRm))
10139 {
10140 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10141 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10142 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10143 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10144 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10145 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10146 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10147 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10149 }
10150 }
10151}
10152
10153
10154/** Opcode 0xdb !11/0. */
10155FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10156{
10157 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10158
10159 IEM_MC_BEGIN(2, 3, 0, 0);
10160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10161 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10162 IEM_MC_LOCAL(int32_t, i32Val);
10163 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10164 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10165
10166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10168
10169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10170 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10171 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10172
10173 IEM_MC_PREPARE_FPU_USAGE();
10174 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10175 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10176 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10177 } IEM_MC_ELSE() {
10178 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10179 } IEM_MC_ENDIF();
10180 IEM_MC_ADVANCE_RIP_AND_FINISH();
10181
10182 IEM_MC_END();
10183}
10184
10185
10186/** Opcode 0xdb !11/1. */
10187FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10188{
10189 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10190 IEM_MC_BEGIN(3, 2, 0, 0);
10191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10192 IEM_MC_LOCAL(uint16_t, u16Fsw);
10193 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10194 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10195 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10196
10197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10199 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10200 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10201
10202 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10203 IEM_MC_PREPARE_FPU_USAGE();
10204 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10205 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10206 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10207 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10208 } IEM_MC_ELSE() {
10209 IEM_MC_IF_FCW_IM() {
10210 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10211 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10212 } IEM_MC_ENDIF();
10213 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10214 } IEM_MC_ENDIF();
10215 IEM_MC_ADVANCE_RIP_AND_FINISH();
10216
10217 IEM_MC_END();
10218}
10219
10220
10221/** Opcode 0xdb !11/2. */
10222FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10223{
10224 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10225 IEM_MC_BEGIN(3, 2, 0, 0);
10226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10227 IEM_MC_LOCAL(uint16_t, u16Fsw);
10228 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10229 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10230 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10231
10232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10234 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10235 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10236
10237 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10238 IEM_MC_PREPARE_FPU_USAGE();
10239 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10240 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10241 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10242 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10243 } IEM_MC_ELSE() {
10244 IEM_MC_IF_FCW_IM() {
10245 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10246 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10247 } IEM_MC_ENDIF();
10248 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10249 } IEM_MC_ENDIF();
10250 IEM_MC_ADVANCE_RIP_AND_FINISH();
10251
10252 IEM_MC_END();
10253}
10254
10255
10256/** Opcode 0xdb !11/3. */
10257FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10258{
10259 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10260 IEM_MC_BEGIN(3, 2, 0, 0);
10261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10262 IEM_MC_LOCAL(uint16_t, u16Fsw);
10263 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10264 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10265 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10266
10267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10271
10272 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10273 IEM_MC_PREPARE_FPU_USAGE();
10274 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10275 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10276 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10277 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10278 } IEM_MC_ELSE() {
10279 IEM_MC_IF_FCW_IM() {
10280 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10281 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10282 } IEM_MC_ENDIF();
10283 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10284 } IEM_MC_ENDIF();
10285 IEM_MC_ADVANCE_RIP_AND_FINISH();
10286
10287 IEM_MC_END();
10288}
10289
10290
10291/** Opcode 0xdb !11/5. */
10292FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10293{
10294 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10295
10296 IEM_MC_BEGIN(2, 3, 0, 0);
10297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10298 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10299 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10300 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10301 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10302
10303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10305
10306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10308 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10309
10310 IEM_MC_PREPARE_FPU_USAGE();
10311 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10312 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10313 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10314 } IEM_MC_ELSE() {
10315 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10316 } IEM_MC_ENDIF();
10317 IEM_MC_ADVANCE_RIP_AND_FINISH();
10318
10319 IEM_MC_END();
10320}
10321
10322
10323/** Opcode 0xdb !11/7. */
10324FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10325{
10326 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10327 IEM_MC_BEGIN(3, 2, 0, 0);
10328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10329 IEM_MC_LOCAL(uint16_t, u16Fsw);
10330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10331 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10332 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10333
10334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10336 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10337 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10338
10339 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10340 IEM_MC_PREPARE_FPU_USAGE();
10341 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10342 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10343 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10344 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10345 } IEM_MC_ELSE() {
10346 IEM_MC_IF_FCW_IM() {
10347 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10348 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10349 } IEM_MC_ENDIF();
10350 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10351 } IEM_MC_ENDIF();
10352 IEM_MC_ADVANCE_RIP_AND_FINISH();
10353
10354 IEM_MC_END();
10355}
10356
10357
10358/** Opcode 0xdb 11/0. */
10359FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10360{
10361 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10362 IEM_MC_BEGIN(0, 1, 0, 0);
10363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10364 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10365
10366 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10367 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10368
10369 IEM_MC_PREPARE_FPU_USAGE();
10370 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10371 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10372 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10373 } IEM_MC_ENDIF();
10374 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10375 } IEM_MC_ELSE() {
10376 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10377 } IEM_MC_ENDIF();
10378 IEM_MC_ADVANCE_RIP_AND_FINISH();
10379
10380 IEM_MC_END();
10381}
10382
10383
10384/** Opcode 0xdb 11/1. */
10385FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10386{
10387 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10388 IEM_MC_BEGIN(0, 1, 0, 0);
10389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10390 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10391
10392 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10393 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10394
10395 IEM_MC_PREPARE_FPU_USAGE();
10396 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10397 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10398 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10399 } IEM_MC_ENDIF();
10400 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10401 } IEM_MC_ELSE() {
10402 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10403 } IEM_MC_ENDIF();
10404 IEM_MC_ADVANCE_RIP_AND_FINISH();
10405
10406 IEM_MC_END();
10407}
10408
10409
10410/** Opcode 0xdb 11/2. */
10411FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10412{
10413 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10414 IEM_MC_BEGIN(0, 1, 0, 0);
10415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10416 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10417
10418 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10419 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10420
10421 IEM_MC_PREPARE_FPU_USAGE();
10422 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10423 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10424 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10425 } IEM_MC_ENDIF();
10426 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10427 } IEM_MC_ELSE() {
10428 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10429 } IEM_MC_ENDIF();
10430 IEM_MC_ADVANCE_RIP_AND_FINISH();
10431
10432 IEM_MC_END();
10433}
10434
10435
10436/** Opcode 0xdb 11/3. */
10437FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10438{
10439 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10440 IEM_MC_BEGIN(0, 1, 0, 0);
10441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10442 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10443
10444 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10445 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10446
10447 IEM_MC_PREPARE_FPU_USAGE();
10448 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10449 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10450 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10451 } IEM_MC_ENDIF();
10452 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10453 } IEM_MC_ELSE() {
10454 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10455 } IEM_MC_ENDIF();
10456 IEM_MC_ADVANCE_RIP_AND_FINISH();
10457
10458 IEM_MC_END();
10459}
10460
10461
10462/** Opcode 0xdb 0xe0. */
10463FNIEMOP_DEF(iemOp_fneni)
10464{
10465 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10466 IEM_MC_BEGIN(0, 0, 0, 0);
10467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10469 IEM_MC_ADVANCE_RIP_AND_FINISH();
10470 IEM_MC_END();
10471}
10472
10473
10474/** Opcode 0xdb 0xe1. */
10475FNIEMOP_DEF(iemOp_fndisi)
10476{
10477 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10478 IEM_MC_BEGIN(0, 0, 0, 0);
10479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10481 IEM_MC_ADVANCE_RIP_AND_FINISH();
10482 IEM_MC_END();
10483}
10484
10485
10486/** Opcode 0xdb 0xe2. */
10487FNIEMOP_DEF(iemOp_fnclex)
10488{
10489 IEMOP_MNEMONIC(fnclex, "fnclex");
10490 IEM_MC_BEGIN(0, 0, 0, 0);
10491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10492 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10493 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10494 IEM_MC_CLEAR_FSW_EX();
10495 IEM_MC_ADVANCE_RIP_AND_FINISH();
10496 IEM_MC_END();
10497}
10498
10499
10500/** Opcode 0xdb 0xe3. */
10501FNIEMOP_DEF(iemOp_fninit)
10502{
10503 IEMOP_MNEMONIC(fninit, "fninit");
10504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10505 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10506}
10507
10508
10509/** Opcode 0xdb 0xe4. */
10510FNIEMOP_DEF(iemOp_fnsetpm)
10511{
10512 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10513 IEM_MC_BEGIN(0, 0, 0, 0);
10514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10515 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10516 IEM_MC_ADVANCE_RIP_AND_FINISH();
10517 IEM_MC_END();
10518}
10519
10520
10521/** Opcode 0xdb 0xe5. */
10522FNIEMOP_DEF(iemOp_frstpm)
10523{
10524 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10525#if 0 /* #UDs on newer CPUs */
10526 IEM_MC_BEGIN(0, 0, 0, 0);
10527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10528 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10529 IEM_MC_ADVANCE_RIP_AND_FINISH();
10530 IEM_MC_END();
10531 return VINF_SUCCESS;
10532#else
10533 IEMOP_RAISE_INVALID_OPCODE_RET();
10534#endif
10535}
10536
10537
10538/** Opcode 0xdb 11/5. */
10539FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10540{
10541 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10542 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10543 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
10544 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10545}
10546
10547
10548/** Opcode 0xdb 11/6. */
10549FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10550{
10551 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10552 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10553 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
10554 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10555}
10556
10557
10558/**
10559 * @opcode 0xdb
10560 */
10561FNIEMOP_DEF(iemOp_EscF3)
10562{
10563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10564 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10565 if (IEM_IS_MODRM_REG_MODE(bRm))
10566 {
10567 switch (IEM_GET_MODRM_REG_8(bRm))
10568 {
10569 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10570 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10571 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10572 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10573 case 4:
10574 switch (bRm)
10575 {
10576 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10577 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10578 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10579 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10580 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10581 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10582 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10583 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10585 }
10586 break;
10587 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10588 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10589 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10591 }
10592 }
10593 else
10594 {
10595 switch (IEM_GET_MODRM_REG_8(bRm))
10596 {
10597 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10598 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10599 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10600 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10601 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10602 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10603 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10604 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10606 }
10607 }
10608}
10609
10610
10611/**
10612 * Common worker for FPU instructions working on STn and ST0, and storing the
10613 * result in STn unless IE, DE or ZE was raised.
10614 *
10615 * @param bRm Mod R/M byte.
10616 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10617 */
10618FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10619{
10620 IEM_MC_BEGIN(3, 1, 0, 0);
10621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10622 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10623 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10624 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10625 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10626
10627 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10628 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10629
10630 IEM_MC_PREPARE_FPU_USAGE();
10631 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10632 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10633 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10634 } IEM_MC_ELSE() {
10635 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10636 } IEM_MC_ENDIF();
10637 IEM_MC_ADVANCE_RIP_AND_FINISH();
10638
10639 IEM_MC_END();
10640}
10641
10642
10643/** Opcode 0xdc 11/0. */
10644FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10645{
10646 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10647 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10648}
10649
10650
10651/** Opcode 0xdc 11/1. */
10652FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10653{
10654 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10655 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10656}
10657
10658
10659/** Opcode 0xdc 11/4. */
10660FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10661{
10662 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10663 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10664}
10665
10666
10667/** Opcode 0xdc 11/5. */
10668FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10669{
10670 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10671 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10672}
10673
10674
10675/** Opcode 0xdc 11/6. */
10676FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10677{
10678 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10679 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10680}
10681
10682
10683/** Opcode 0xdc 11/7. */
10684FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10685{
10686 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10687 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10688}
10689
10690
10691/**
10692 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10693 * memory operand, and storing the result in ST0.
10694 *
10695 * @param bRm Mod R/M byte.
10696 * @param pfnImpl Pointer to the instruction implementation (assembly).
10697 */
10698FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10699{
10700 IEM_MC_BEGIN(3, 3, 0, 0);
10701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10702 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10703 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10704 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10705 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10706 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10707
10708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10710 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10711 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10712
10713 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10714 IEM_MC_PREPARE_FPU_USAGE();
10715 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10716 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10717 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10718 } IEM_MC_ELSE() {
10719 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10720 } IEM_MC_ENDIF();
10721 IEM_MC_ADVANCE_RIP_AND_FINISH();
10722
10723 IEM_MC_END();
10724}
10725
10726
10727/** Opcode 0xdc !11/0. */
10728FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10729{
10730 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10731 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10732}
10733
10734
10735/** Opcode 0xdc !11/1. */
10736FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10737{
10738 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10739 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10740}
10741
10742
10743/** Opcode 0xdc !11/2. */
10744FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10745{
10746 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10747
10748 IEM_MC_BEGIN(3, 3, 0, 0);
10749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10750 IEM_MC_LOCAL(uint16_t, u16Fsw);
10751 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10752 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10753 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10754 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10755
10756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10758
10759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10760 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10761 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10762
10763 IEM_MC_PREPARE_FPU_USAGE();
10764 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10765 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10766 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10767 } IEM_MC_ELSE() {
10768 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10769 } IEM_MC_ENDIF();
10770 IEM_MC_ADVANCE_RIP_AND_FINISH();
10771
10772 IEM_MC_END();
10773}
10774
10775
10776/** Opcode 0xdc !11/3. */
10777FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10778{
10779 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10780
10781 IEM_MC_BEGIN(3, 3, 0, 0);
10782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10783 IEM_MC_LOCAL(uint16_t, u16Fsw);
10784 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10785 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10786 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10787 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10788
10789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10791
10792 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10793 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10794 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10795
10796 IEM_MC_PREPARE_FPU_USAGE();
10797 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10798 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10799 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10800 } IEM_MC_ELSE() {
10801 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10802 } IEM_MC_ENDIF();
10803 IEM_MC_ADVANCE_RIP_AND_FINISH();
10804
10805 IEM_MC_END();
10806}
10807
10808
10809/** Opcode 0xdc !11/4. */
10810FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10811{
10812 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10813 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10814}
10815
10816
10817/** Opcode 0xdc !11/5. */
10818FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10819{
10820 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10821 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10822}
10823
10824
10825/** Opcode 0xdc !11/6. */
10826FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10827{
10828 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10829 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10830}
10831
10832
10833/** Opcode 0xdc !11/7. */
10834FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10835{
10836 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10837 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10838}
10839
10840
10841/**
10842 * @opcode 0xdc
10843 */
10844FNIEMOP_DEF(iemOp_EscF4)
10845{
10846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10847 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10848 if (IEM_IS_MODRM_REG_MODE(bRm))
10849 {
10850 switch (IEM_GET_MODRM_REG_8(bRm))
10851 {
10852 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10853 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10854 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10855 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10856 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10857 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10858 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10859 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10861 }
10862 }
10863 else
10864 {
10865 switch (IEM_GET_MODRM_REG_8(bRm))
10866 {
10867 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10868 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10869 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10870 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10871 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10872 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10873 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10874 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10876 }
10877 }
10878}
10879
10880
10881/** Opcode 0xdd !11/0.
10882 * @sa iemOp_fld_m32r */
10883FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10884{
10885 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10886
10887 IEM_MC_BEGIN(2, 3, 0, 0);
10888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10889 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10890 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10891 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10892 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10893
10894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10896 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10897 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10898
10899 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10900 IEM_MC_PREPARE_FPU_USAGE();
10901 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10902 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10903 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10904 } IEM_MC_ELSE() {
10905 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10906 } IEM_MC_ENDIF();
10907 IEM_MC_ADVANCE_RIP_AND_FINISH();
10908
10909 IEM_MC_END();
10910}
10911
10912
10913/** Opcode 0xdd !11/0. */
10914FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10915{
10916 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10917 IEM_MC_BEGIN(3, 2, 0, 0);
10918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10919 IEM_MC_LOCAL(uint16_t, u16Fsw);
10920 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10921 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10922 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10923
10924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10927 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10928
10929 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10930 IEM_MC_PREPARE_FPU_USAGE();
10931 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10932 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10933 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10934 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10935 } IEM_MC_ELSE() {
10936 IEM_MC_IF_FCW_IM() {
10937 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10938 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10939 } IEM_MC_ENDIF();
10940 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10941 } IEM_MC_ENDIF();
10942 IEM_MC_ADVANCE_RIP_AND_FINISH();
10943
10944 IEM_MC_END();
10945}
10946
10947
10948/** Opcode 0xdd !11/0. */
10949FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10950{
10951 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10952 IEM_MC_BEGIN(3, 2, 0, 0);
10953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10954 IEM_MC_LOCAL(uint16_t, u16Fsw);
10955 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10956 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10957 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10958
10959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10961 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10962 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10963
10964 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10965 IEM_MC_PREPARE_FPU_USAGE();
10966 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10967 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10968 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10969 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10970 } IEM_MC_ELSE() {
10971 IEM_MC_IF_FCW_IM() {
10972 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10973 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10974 } IEM_MC_ENDIF();
10975 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10976 } IEM_MC_ENDIF();
10977 IEM_MC_ADVANCE_RIP_AND_FINISH();
10978
10979 IEM_MC_END();
10980}
10981
10982
10983
10984
10985/** Opcode 0xdd !11/0. */
10986FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10987{
10988 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10989 IEM_MC_BEGIN(3, 2, 0, 0);
10990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10991 IEM_MC_LOCAL(uint16_t, u16Fsw);
10992 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10993 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10995
10996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10999 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11000
11001 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11002 IEM_MC_PREPARE_FPU_USAGE();
11003 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11004 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11005 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11006 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11007 } IEM_MC_ELSE() {
11008 IEM_MC_IF_FCW_IM() {
11009 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11010 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11011 } IEM_MC_ENDIF();
11012 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11013 } IEM_MC_ENDIF();
11014 IEM_MC_ADVANCE_RIP_AND_FINISH();
11015
11016 IEM_MC_END();
11017}
11018
11019
11020/** Opcode 0xdd !11/0. */
11021FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11022{
11023 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11024 IEM_MC_BEGIN(3, 0, 0, 0);
11025 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11026 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11027 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11030 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11031 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11032 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11033 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11034 IEM_MC_END();
11035}
11036
11037
11038/** Opcode 0xdd !11/0. */
11039FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11040{
11041 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11042 IEM_MC_BEGIN(3, 0, 0, 0);
11043 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11044 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11045 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11049 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11050 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11051 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11052 IEM_MC_END();
11053}
11054
11055/** Opcode 0xdd !11/0. */
11056FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11057{
11058 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11059
11060 IEM_MC_BEGIN(0, 2, 0, 0);
11061 IEM_MC_LOCAL(uint16_t, u16Tmp);
11062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11063
11064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11066 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11067
11068 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11069 IEM_MC_FETCH_FSW(u16Tmp);
11070 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11071 IEM_MC_ADVANCE_RIP_AND_FINISH();
11072
11073/** @todo Debug / drop a hint to the verifier that things may differ
11074 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11075 * NT4SP1. (X86_FSW_PE) */
11076 IEM_MC_END();
11077}
11078
11079
11080/** Opcode 0xdd 11/0. */
11081FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11082{
11083 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11084 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11085 unmodified. */
11086 IEM_MC_BEGIN(0, 0, 0, 0);
11087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11088
11089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11090 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11091
11092 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11093 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11094 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11095
11096 IEM_MC_ADVANCE_RIP_AND_FINISH();
11097 IEM_MC_END();
11098}
11099
11100
11101/** Opcode 0xdd 11/1. */
11102FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11103{
11104 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11105 IEM_MC_BEGIN(0, 2, 0, 0);
11106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11107 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11108 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11110 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11111
11112 IEM_MC_PREPARE_FPU_USAGE();
11113 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11114 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11115 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11116 } IEM_MC_ELSE() {
11117 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11118 } IEM_MC_ENDIF();
11119
11120 IEM_MC_ADVANCE_RIP_AND_FINISH();
11121 IEM_MC_END();
11122}
11123
11124
11125/** Opcode 0xdd 11/3. */
11126FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11127{
11128 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11129 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11130}
11131
11132
11133/** Opcode 0xdd 11/4. */
11134FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11135{
11136 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11137 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11138}
11139
11140
11141/**
11142 * @opcode 0xdd
11143 */
11144FNIEMOP_DEF(iemOp_EscF5)
11145{
11146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11147 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11148 if (IEM_IS_MODRM_REG_MODE(bRm))
11149 {
11150 switch (IEM_GET_MODRM_REG_8(bRm))
11151 {
11152 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11153 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11154 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11155 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11156 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11157 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11158 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11159 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11161 }
11162 }
11163 else
11164 {
11165 switch (IEM_GET_MODRM_REG_8(bRm))
11166 {
11167 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11168 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11169 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11170 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11171 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11172 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11173 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11174 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11176 }
11177 }
11178}
11179
11180
11181/** Opcode 0xde 11/0. */
11182FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11183{
11184 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11185 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11186}
11187
11188
11189/** Opcode 0xde 11/0. */
11190FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11191{
11192 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11193 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11194}
11195
11196
11197/** Opcode 0xde 0xd9. */
11198FNIEMOP_DEF(iemOp_fcompp)
11199{
11200 IEMOP_MNEMONIC(fcompp, "fcompp");
11201 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11202}
11203
11204
11205/** Opcode 0xde 11/4. */
11206FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11207{
11208 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11209 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11210}
11211
11212
11213/** Opcode 0xde 11/5. */
11214FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11215{
11216 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11217 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11218}
11219
11220
11221/** Opcode 0xde 11/6. */
11222FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11223{
11224 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11225 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11226}
11227
11228
11229/** Opcode 0xde 11/7. */
11230FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11231{
11232 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11233 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11234}
11235
11236
11237/**
11238 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11239 * the result in ST0.
11240 *
11241 * @param bRm Mod R/M byte.
11242 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11243 */
11244FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11245{
11246 IEM_MC_BEGIN(3, 3, 0, 0);
11247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11248 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11249 IEM_MC_LOCAL(int16_t, i16Val2);
11250 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11251 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11252 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11253
11254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11256
11257 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11258 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11259 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11260
11261 IEM_MC_PREPARE_FPU_USAGE();
11262 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11263 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11264 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11265 } IEM_MC_ELSE() {
11266 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11267 } IEM_MC_ENDIF();
11268 IEM_MC_ADVANCE_RIP_AND_FINISH();
11269
11270 IEM_MC_END();
11271}
11272
11273
11274/** Opcode 0xde !11/0. */
11275FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11276{
11277 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11278 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11279}
11280
11281
11282/** Opcode 0xde !11/1. */
11283FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11284{
11285 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11286 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11287}
11288
11289
11290/** Opcode 0xde !11/2. */
11291FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11292{
11293 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11294
11295 IEM_MC_BEGIN(3, 3, 0, 0);
11296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11297 IEM_MC_LOCAL(uint16_t, u16Fsw);
11298 IEM_MC_LOCAL(int16_t, i16Val2);
11299 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11301 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11302
11303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11305
11306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11308 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11309
11310 IEM_MC_PREPARE_FPU_USAGE();
11311 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11312 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11313 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11314 } IEM_MC_ELSE() {
11315 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11316 } IEM_MC_ENDIF();
11317 IEM_MC_ADVANCE_RIP_AND_FINISH();
11318
11319 IEM_MC_END();
11320}
11321
11322
11323/** Opcode 0xde !11/3. */
11324FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11325{
11326 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11327
11328 IEM_MC_BEGIN(3, 3, 0, 0);
11329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11330 IEM_MC_LOCAL(uint16_t, u16Fsw);
11331 IEM_MC_LOCAL(int16_t, i16Val2);
11332 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11333 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11334 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11335
11336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11338
11339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11341 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11342
11343 IEM_MC_PREPARE_FPU_USAGE();
11344 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11345 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11346 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11347 } IEM_MC_ELSE() {
11348 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11349 } IEM_MC_ENDIF();
11350 IEM_MC_ADVANCE_RIP_AND_FINISH();
11351
11352 IEM_MC_END();
11353}
11354
11355
11356/** Opcode 0xde !11/4. */
11357FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11358{
11359 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11360 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11361}
11362
11363
11364/** Opcode 0xde !11/5. */
11365FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11366{
11367 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11368 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11369}
11370
11371
11372/** Opcode 0xde !11/6. */
11373FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11374{
11375 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11376 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11377}
11378
11379
11380/** Opcode 0xde !11/7. */
11381FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11382{
11383 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11384 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11385}
11386
11387
11388/**
11389 * @opcode 0xde
11390 */
11391FNIEMOP_DEF(iemOp_EscF6)
11392{
11393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11394 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11395 if (IEM_IS_MODRM_REG_MODE(bRm))
11396 {
11397 switch (IEM_GET_MODRM_REG_8(bRm))
11398 {
11399 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11400 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11401 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11402 case 3: if (bRm == 0xd9)
11403 return FNIEMOP_CALL(iemOp_fcompp);
11404 IEMOP_RAISE_INVALID_OPCODE_RET();
11405 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11406 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11407 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11408 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11410 }
11411 }
11412 else
11413 {
11414 switch (IEM_GET_MODRM_REG_8(bRm))
11415 {
11416 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11417 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11418 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11419 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11420 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11421 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11422 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11423 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11425 }
11426 }
11427}
11428
11429
11430/** Opcode 0xdf 11/0.
11431 * Undocument instruction, assumed to work like ffree + fincstp. */
11432FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11433{
11434 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11435 IEM_MC_BEGIN(0, 0, 0, 0);
11436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11437
11438 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11439 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11440
11441 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11442 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11443 IEM_MC_FPU_STACK_INC_TOP();
11444 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11445
11446 IEM_MC_ADVANCE_RIP_AND_FINISH();
11447 IEM_MC_END();
11448}
11449
11450
11451/** Opcode 0xdf 0xe0. */
11452FNIEMOP_DEF(iemOp_fnstsw_ax)
11453{
11454 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11455 IEM_MC_BEGIN(0, 1, 0, 0);
11456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11457 IEM_MC_LOCAL(uint16_t, u16Tmp);
11458 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11459 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11460 IEM_MC_FETCH_FSW(u16Tmp);
11461 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11462 IEM_MC_ADVANCE_RIP_AND_FINISH();
11463 IEM_MC_END();
11464}
11465
11466
11467/** Opcode 0xdf 11/5. */
11468FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11469{
11470 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11471 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11472 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11473 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11474}
11475
11476
11477/** Opcode 0xdf 11/6. */
11478FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11479{
11480 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11481 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11482 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11483 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11484}
11485
11486
11487/** Opcode 0xdf !11/0. */
11488FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11489{
11490 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11491
11492 IEM_MC_BEGIN(2, 3, 0, 0);
11493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11494 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11495 IEM_MC_LOCAL(int16_t, i16Val);
11496 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11497 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11498
11499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11501
11502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11504 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11505
11506 IEM_MC_PREPARE_FPU_USAGE();
11507 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11508 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11509 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11510 } IEM_MC_ELSE() {
11511 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11512 } IEM_MC_ENDIF();
11513 IEM_MC_ADVANCE_RIP_AND_FINISH();
11514
11515 IEM_MC_END();
11516}
11517
11518
11519/** Opcode 0xdf !11/1. */
11520FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11521{
11522 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11523 IEM_MC_BEGIN(3, 2, 0, 0);
11524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11525 IEM_MC_LOCAL(uint16_t, u16Fsw);
11526 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11527 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11528 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11529
11530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11532 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11533 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11534
11535 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11536 IEM_MC_PREPARE_FPU_USAGE();
11537 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11538 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11539 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11540 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11541 } IEM_MC_ELSE() {
11542 IEM_MC_IF_FCW_IM() {
11543 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11544 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11545 } IEM_MC_ENDIF();
11546 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11547 } IEM_MC_ENDIF();
11548 IEM_MC_ADVANCE_RIP_AND_FINISH();
11549
11550 IEM_MC_END();
11551}
11552
11553
11554/** Opcode 0xdf !11/2. */
11555FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11556{
11557 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11558 IEM_MC_BEGIN(3, 2, 0, 0);
11559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11560 IEM_MC_LOCAL(uint16_t, u16Fsw);
11561 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11562 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11563 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11564
11565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11567 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11568 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11569
11570 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11571 IEM_MC_PREPARE_FPU_USAGE();
11572 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11573 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11574 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11575 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11576 } IEM_MC_ELSE() {
11577 IEM_MC_IF_FCW_IM() {
11578 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11579 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11580 } IEM_MC_ENDIF();
11581 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11582 } IEM_MC_ENDIF();
11583 IEM_MC_ADVANCE_RIP_AND_FINISH();
11584
11585 IEM_MC_END();
11586}
11587
11588
11589/** Opcode 0xdf !11/3. */
11590FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11591{
11592 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11593 IEM_MC_BEGIN(3, 2, 0, 0);
11594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11595 IEM_MC_LOCAL(uint16_t, u16Fsw);
11596 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11597 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11598 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11599
11600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11602 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11603 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11604
11605 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11606 IEM_MC_PREPARE_FPU_USAGE();
11607 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11608 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11609 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11610 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11611 } IEM_MC_ELSE() {
11612 IEM_MC_IF_FCW_IM() {
11613 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11614 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11615 } IEM_MC_ENDIF();
11616 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11617 } IEM_MC_ENDIF();
11618 IEM_MC_ADVANCE_RIP_AND_FINISH();
11619
11620 IEM_MC_END();
11621}
11622
11623
11624/** Opcode 0xdf !11/4. */
11625FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11626{
11627 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11628
11629 IEM_MC_BEGIN(2, 3, 0, 0);
11630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11631 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11632 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11633 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11634 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11635
11636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11638
11639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11641 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11642
11643 IEM_MC_PREPARE_FPU_USAGE();
11644 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11645 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11646 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11647 } IEM_MC_ELSE() {
11648 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11649 } IEM_MC_ENDIF();
11650 IEM_MC_ADVANCE_RIP_AND_FINISH();
11651
11652 IEM_MC_END();
11653}
11654
11655
11656/** Opcode 0xdf !11/5. */
11657FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11658{
11659 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11660
11661 IEM_MC_BEGIN(2, 3, 0, 0);
11662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11663 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11664 IEM_MC_LOCAL(int64_t, i64Val);
11665 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11666 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11667
11668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11670
11671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11672 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11673 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11674
11675 IEM_MC_PREPARE_FPU_USAGE();
11676 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11677 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11678 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11679 } IEM_MC_ELSE() {
11680 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11681 } IEM_MC_ENDIF();
11682 IEM_MC_ADVANCE_RIP_AND_FINISH();
11683
11684 IEM_MC_END();
11685}
11686
11687
11688/** Opcode 0xdf !11/6. */
11689FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11690{
11691 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11692 IEM_MC_BEGIN(3, 2, 0, 0);
11693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11694 IEM_MC_LOCAL(uint16_t, u16Fsw);
11695 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11696 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11697 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11698
11699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11701 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11702 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11703
11704 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11705 IEM_MC_PREPARE_FPU_USAGE();
11706 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11707 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11708 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11709 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11710 } IEM_MC_ELSE() {
11711 IEM_MC_IF_FCW_IM() {
11712 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11713 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11714 } IEM_MC_ENDIF();
11715 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11716 } IEM_MC_ENDIF();
11717 IEM_MC_ADVANCE_RIP_AND_FINISH();
11718
11719 IEM_MC_END();
11720}
11721
11722
11723/** Opcode 0xdf !11/7. */
11724FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11725{
11726 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11727 IEM_MC_BEGIN(3, 2, 0, 0);
11728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11729 IEM_MC_LOCAL(uint16_t, u16Fsw);
11730 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11731 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11732 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11733
11734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11738
11739 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11740 IEM_MC_PREPARE_FPU_USAGE();
11741 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11742 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11743 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11744 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11745 } IEM_MC_ELSE() {
11746 IEM_MC_IF_FCW_IM() {
11747 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11748 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11749 } IEM_MC_ENDIF();
11750 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11751 } IEM_MC_ENDIF();
11752 IEM_MC_ADVANCE_RIP_AND_FINISH();
11753
11754 IEM_MC_END();
11755}
11756
11757
11758/**
11759 * @opcode 0xdf
11760 */
11761FNIEMOP_DEF(iemOp_EscF7)
11762{
11763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11764 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11765 if (IEM_IS_MODRM_REG_MODE(bRm))
11766 {
11767 switch (IEM_GET_MODRM_REG_8(bRm))
11768 {
11769 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11770 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11771 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11772 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11773 case 4: if (bRm == 0xe0)
11774 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11775 IEMOP_RAISE_INVALID_OPCODE_RET();
11776 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11777 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11778 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11780 }
11781 }
11782 else
11783 {
11784 switch (IEM_GET_MODRM_REG_8(bRm))
11785 {
11786 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11787 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11788 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11789 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11790 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11791 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11792 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11793 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11795 }
11796 }
11797}
11798
11799
11800/**
11801 * @opcode 0xe0
11802 */
11803FNIEMOP_DEF(iemOp_loopne_Jb)
11804{
11805 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11806 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11808
11809 switch (pVCpu->iem.s.enmEffAddrMode)
11810 {
11811 case IEMMODE_16BIT:
11812 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11814 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11815 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11816 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11817 } IEM_MC_ELSE() {
11818 IEM_MC_ADVANCE_RIP_AND_FINISH();
11819 } IEM_MC_ENDIF();
11820 IEM_MC_END();
11821 break;
11822
11823 case IEMMODE_32BIT:
11824 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11826 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11827 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11828 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11829 } IEM_MC_ELSE() {
11830 IEM_MC_ADVANCE_RIP_AND_FINISH();
11831 } IEM_MC_ENDIF();
11832 IEM_MC_END();
11833 break;
11834
11835 case IEMMODE_64BIT:
11836 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11838 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11839 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11840 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11841 } IEM_MC_ELSE() {
11842 IEM_MC_ADVANCE_RIP_AND_FINISH();
11843 } IEM_MC_ENDIF();
11844 IEM_MC_END();
11845 break;
11846
11847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11848 }
11849}
11850
11851
11852/**
11853 * @opcode 0xe1
11854 */
11855FNIEMOP_DEF(iemOp_loope_Jb)
11856{
11857 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11858 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11859 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11860
11861 switch (pVCpu->iem.s.enmEffAddrMode)
11862 {
11863 case IEMMODE_16BIT:
11864 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11866 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11867 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11868 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11869 } IEM_MC_ELSE() {
11870 IEM_MC_ADVANCE_RIP_AND_FINISH();
11871 } IEM_MC_ENDIF();
11872 IEM_MC_END();
11873 break;
11874
11875 case IEMMODE_32BIT:
11876 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11878 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11879 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11880 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11881 } IEM_MC_ELSE() {
11882 IEM_MC_ADVANCE_RIP_AND_FINISH();
11883 } IEM_MC_ENDIF();
11884 IEM_MC_END();
11885 break;
11886
11887 case IEMMODE_64BIT:
11888 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11890 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11891 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11892 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11893 } IEM_MC_ELSE() {
11894 IEM_MC_ADVANCE_RIP_AND_FINISH();
11895 } IEM_MC_ENDIF();
11896 IEM_MC_END();
11897 break;
11898
11899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11900 }
11901}
11902
11903
11904/**
11905 * @opcode 0xe2
11906 */
11907FNIEMOP_DEF(iemOp_loop_Jb)
11908{
11909 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11910 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11911 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11912
11913 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11914 * using the 32-bit operand size override. How can that be restarted? See
11915 * weird pseudo code in intel manual. */
11916
11917 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11918 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11919 * the loop causes guest crashes, but when logging it's nice to skip a few million
11920 * lines of useless output. */
11921#if defined(LOG_ENABLED)
11922 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11923 switch (pVCpu->iem.s.enmEffAddrMode)
11924 {
11925 case IEMMODE_16BIT:
11926 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11928 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11929 IEM_MC_ADVANCE_RIP_AND_FINISH();
11930 IEM_MC_END();
11931 break;
11932
11933 case IEMMODE_32BIT:
11934 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11936 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11937 IEM_MC_ADVANCE_RIP_AND_FINISH();
11938 IEM_MC_END();
11939 break;
11940
11941 case IEMMODE_64BIT:
11942 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11944 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11945 IEM_MC_ADVANCE_RIP_AND_FINISH();
11946 IEM_MC_END();
11947 break;
11948
11949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11950 }
11951#endif
11952
11953 switch (pVCpu->iem.s.enmEffAddrMode)
11954 {
11955 case IEMMODE_16BIT:
11956 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11958 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11959 IEM_MC_IF_CX_IS_NZ() {
11960 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11961 } IEM_MC_ELSE() {
11962 IEM_MC_ADVANCE_RIP_AND_FINISH();
11963 } IEM_MC_ENDIF();
11964 IEM_MC_END();
11965 break;
11966
11967 case IEMMODE_32BIT:
11968 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11970 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11971 IEM_MC_IF_ECX_IS_NZ() {
11972 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11973 } IEM_MC_ELSE() {
11974 IEM_MC_ADVANCE_RIP_AND_FINISH();
11975 } IEM_MC_ENDIF();
11976 IEM_MC_END();
11977 break;
11978
11979 case IEMMODE_64BIT:
11980 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11982 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11983 IEM_MC_IF_RCX_IS_NZ() {
11984 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11985 } IEM_MC_ELSE() {
11986 IEM_MC_ADVANCE_RIP_AND_FINISH();
11987 } IEM_MC_ENDIF();
11988 IEM_MC_END();
11989 break;
11990
11991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11992 }
11993}
11994
11995
11996/**
11997 * @opcode 0xe3
11998 */
11999FNIEMOP_DEF(iemOp_jecxz_Jb)
12000{
12001 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12002 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12003 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12004
12005 switch (pVCpu->iem.s.enmEffAddrMode)
12006 {
12007 case IEMMODE_16BIT:
12008 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12010 IEM_MC_IF_CX_IS_NZ() {
12011 IEM_MC_ADVANCE_RIP_AND_FINISH();
12012 } IEM_MC_ELSE() {
12013 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12014 } IEM_MC_ENDIF();
12015 IEM_MC_END();
12016 break;
12017
12018 case IEMMODE_32BIT:
12019 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12021 IEM_MC_IF_ECX_IS_NZ() {
12022 IEM_MC_ADVANCE_RIP_AND_FINISH();
12023 } IEM_MC_ELSE() {
12024 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12025 } IEM_MC_ENDIF();
12026 IEM_MC_END();
12027 break;
12028
12029 case IEMMODE_64BIT:
12030 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12032 IEM_MC_IF_RCX_IS_NZ() {
12033 IEM_MC_ADVANCE_RIP_AND_FINISH();
12034 } IEM_MC_ELSE() {
12035 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12036 } IEM_MC_ENDIF();
12037 IEM_MC_END();
12038 break;
12039
12040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12041 }
12042}
12043
12044
12045/** Opcode 0xe4 */
12046FNIEMOP_DEF(iemOp_in_AL_Ib)
12047{
12048 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12049 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12051 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12052 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12053}
12054
12055
12056/** Opcode 0xe5 */
12057FNIEMOP_DEF(iemOp_in_eAX_Ib)
12058{
12059 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12060 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12062 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12063 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12064 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12065}
12066
12067
12068/** Opcode 0xe6 */
12069FNIEMOP_DEF(iemOp_out_Ib_AL)
12070{
12071 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12072 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12074 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12075 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12076}
12077
12078
12079/** Opcode 0xe7 */
12080FNIEMOP_DEF(iemOp_out_Ib_eAX)
12081{
12082 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12083 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12085 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12086 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12087 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12088}
12089
12090
12091/**
12092 * @opcode 0xe8
12093 */
12094FNIEMOP_DEF(iemOp_call_Jv)
12095{
12096 IEMOP_MNEMONIC(call_Jv, "call Jv");
12097 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12098 switch (pVCpu->iem.s.enmEffOpSize)
12099 {
12100 case IEMMODE_16BIT:
12101 {
12102 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12103 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12104 }
12105
12106 case IEMMODE_32BIT:
12107 {
12108 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12109 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12110 }
12111
12112 case IEMMODE_64BIT:
12113 {
12114 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12115 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12116 }
12117
12118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12119 }
12120}
12121
12122
12123/**
12124 * @opcode 0xe9
12125 */
12126FNIEMOP_DEF(iemOp_jmp_Jv)
12127{
12128 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12129 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12130 switch (pVCpu->iem.s.enmEffOpSize)
12131 {
12132 case IEMMODE_16BIT:
12133 IEM_MC_BEGIN(0, 0, 0, 0);
12134 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12136 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12137 IEM_MC_END();
12138 break;
12139
12140 case IEMMODE_64BIT:
12141 case IEMMODE_32BIT:
12142 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12143 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12145 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12146 IEM_MC_END();
12147 break;
12148
12149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12150 }
12151}
12152
12153
12154/**
12155 * @opcode 0xea
12156 */
12157FNIEMOP_DEF(iemOp_jmp_Ap)
12158{
12159 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12160 IEMOP_HLP_NO_64BIT();
12161
12162 /* Decode the far pointer address and pass it on to the far call C implementation. */
12163 uint32_t off32Seg;
12164 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12165 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12166 else
12167 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12168 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12170 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12171 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12172 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12173}
12174
12175
12176/**
12177 * @opcode 0xeb
12178 */
12179FNIEMOP_DEF(iemOp_jmp_Jb)
12180{
12181 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12182 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12184
12185 IEM_MC_BEGIN(0, 0, 0, 0);
12186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12187 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12188 IEM_MC_END();
12189}
12190
12191
12192/** Opcode 0xec */
12193FNIEMOP_DEF(iemOp_in_AL_DX)
12194{
12195 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12197 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12198 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12199}
12200
12201
12202/** Opcode 0xed */
12203FNIEMOP_DEF(iemOp_in_eAX_DX)
12204{
12205 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12207 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12208 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12209 pVCpu->iem.s.enmEffAddrMode);
12210}
12211
12212
12213/** Opcode 0xee */
12214FNIEMOP_DEF(iemOp_out_DX_AL)
12215{
12216 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12218 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12219 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12220}
12221
12222
12223/** Opcode 0xef */
12224FNIEMOP_DEF(iemOp_out_DX_eAX)
12225{
12226 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12229 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12230 pVCpu->iem.s.enmEffAddrMode);
12231}
12232
12233
12234/**
12235 * @opcode 0xf0
12236 */
12237FNIEMOP_DEF(iemOp_lock)
12238{
12239 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12240 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12241 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12242
12243 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12244 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12245}
12246
12247
12248/**
12249 * @opcode 0xf1
12250 */
12251FNIEMOP_DEF(iemOp_int1)
12252{
12253 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12254 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12255 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12256 * LOADALL memo. Needs some testing. */
12257 IEMOP_HLP_MIN_386();
12258 /** @todo testcase! */
12259 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12260 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12261 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12262}
12263
12264
12265/**
12266 * @opcode 0xf2
12267 */
12268FNIEMOP_DEF(iemOp_repne)
12269{
12270 /* This overrides any previous REPE prefix. */
12271 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12272 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12273 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12274
12275 /* For the 4 entry opcode tables, REPNZ overrides any previous
12276 REPZ and operand size prefixes. */
12277 pVCpu->iem.s.idxPrefix = 3;
12278
12279 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12280 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12281}
12282
12283
12284/**
12285 * @opcode 0xf3
12286 */
12287FNIEMOP_DEF(iemOp_repe)
12288{
12289 /* This overrides any previous REPNE prefix. */
12290 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12291 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12292 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12293
12294 /* For the 4 entry opcode tables, REPNZ overrides any previous
12295 REPNZ and operand size prefixes. */
12296 pVCpu->iem.s.idxPrefix = 2;
12297
12298 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12299 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12300}
12301
12302
12303/**
12304 * @opcode 0xf4
12305 */
12306FNIEMOP_DEF(iemOp_hlt)
12307{
12308 IEMOP_MNEMONIC(hlt, "hlt");
12309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12310 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12311}
12312
12313
12314/**
12315 * @opcode 0xf5
12316 */
12317FNIEMOP_DEF(iemOp_cmc)
12318{
12319 IEMOP_MNEMONIC(cmc, "cmc");
12320 IEM_MC_BEGIN(0, 0, 0, 0);
12321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12322 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12323 IEM_MC_ADVANCE_RIP_AND_FINISH();
12324 IEM_MC_END();
12325}
12326
12327
12328/**
12329 * Body for of 'inc/dec/not/neg Eb'.
12330 */
12331#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12332 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12333 { \
12334 /* register access */ \
12335 IEM_MC_BEGIN(2, 0, 0, 0); \
12336 IEMOP_HLP_DONE_DECODING(); \
12337 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12338 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12339 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12340 IEM_MC_REF_EFLAGS(pEFlags); \
12341 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12342 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12343 IEM_MC_END(); \
12344 } \
12345 else \
12346 { \
12347 /* memory access. */ \
12348 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12349 { \
12350 IEM_MC_BEGIN(2, 2, 0, 0); \
12351 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12352 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12354 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12355 \
12356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12357 IEMOP_HLP_DONE_DECODING(); \
12358 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12359 IEM_MC_FETCH_EFLAGS(EFlags); \
12360 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12361 \
12362 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12363 IEM_MC_COMMIT_EFLAGS(EFlags); \
12364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12365 IEM_MC_END(); \
12366 } \
12367 else \
12368 { \
12369 IEM_MC_BEGIN(2, 2, 0, 0); \
12370 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12371 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12373 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12374 \
12375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12376 IEMOP_HLP_DONE_DECODING(); \
12377 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12378 IEM_MC_FETCH_EFLAGS(EFlags); \
12379 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12380 \
12381 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12382 IEM_MC_COMMIT_EFLAGS(EFlags); \
12383 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12384 IEM_MC_END(); \
12385 } \
12386 } \
12387 (void)0
12388
12389
12390/**
12391 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12392 */
12393#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12394 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12395 { \
12396 /* \
12397 * Register target \
12398 */ \
12399 switch (pVCpu->iem.s.enmEffOpSize) \
12400 { \
12401 case IEMMODE_16BIT: \
12402 IEM_MC_BEGIN(2, 0, 0, 0); \
12403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12404 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12405 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12406 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12407 IEM_MC_REF_EFLAGS(pEFlags); \
12408 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12409 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12410 IEM_MC_END(); \
12411 break; \
12412 \
12413 case IEMMODE_32BIT: \
12414 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
12415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12416 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12417 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12418 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12419 IEM_MC_REF_EFLAGS(pEFlags); \
12420 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12421 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
12422 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12423 IEM_MC_END(); \
12424 break; \
12425 \
12426 case IEMMODE_64BIT: \
12427 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
12428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12429 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12430 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12431 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12432 IEM_MC_REF_EFLAGS(pEFlags); \
12433 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12434 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12435 IEM_MC_END(); \
12436 break; \
12437 \
12438 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12439 } \
12440 } \
12441 else \
12442 { \
12443 /* \
12444 * Memory target. \
12445 */ \
12446 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12447 { \
12448 switch (pVCpu->iem.s.enmEffOpSize) \
12449 { \
12450 case IEMMODE_16BIT: \
12451 IEM_MC_BEGIN(2, 3, 0, 0); \
12452 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12453 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12455 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12456 \
12457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12459 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12460 IEM_MC_FETCH_EFLAGS(EFlags); \
12461 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12462 \
12463 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12464 IEM_MC_COMMIT_EFLAGS(EFlags); \
12465 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12466 IEM_MC_END(); \
12467 break; \
12468 \
12469 case IEMMODE_32BIT: \
12470 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
12471 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12472 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12474 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12475 \
12476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12478 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12479 IEM_MC_FETCH_EFLAGS(EFlags); \
12480 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12481 \
12482 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12483 IEM_MC_COMMIT_EFLAGS(EFlags); \
12484 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12485 IEM_MC_END(); \
12486 break; \
12487 \
12488 case IEMMODE_64BIT: \
12489 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
12490 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12491 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12493 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12494 \
12495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12497 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12498 IEM_MC_FETCH_EFLAGS(EFlags); \
12499 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12500 \
12501 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12502 IEM_MC_COMMIT_EFLAGS(EFlags); \
12503 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12504 IEM_MC_END(); \
12505 break; \
12506 \
12507 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12508 } \
12509 } \
12510 else \
12511 { \
12512 (void)0
12513
12514#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12515 switch (pVCpu->iem.s.enmEffOpSize) \
12516 { \
12517 case IEMMODE_16BIT: \
12518 IEM_MC_BEGIN(2, 3, 0, 0); \
12519 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12520 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12522 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12523 \
12524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12525 IEMOP_HLP_DONE_DECODING(); \
12526 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12527 IEM_MC_FETCH_EFLAGS(EFlags); \
12528 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12529 \
12530 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12531 IEM_MC_COMMIT_EFLAGS(EFlags); \
12532 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12533 IEM_MC_END(); \
12534 break; \
12535 \
12536 case IEMMODE_32BIT: \
12537 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
12538 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12539 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12541 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12542 \
12543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12544 IEMOP_HLP_DONE_DECODING(); \
12545 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12546 IEM_MC_FETCH_EFLAGS(EFlags); \
12547 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12548 \
12549 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12550 IEM_MC_COMMIT_EFLAGS(EFlags); \
12551 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12552 IEM_MC_END(); \
12553 break; \
12554 \
12555 case IEMMODE_64BIT: \
12556 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
12557 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12561 \
12562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12563 IEMOP_HLP_DONE_DECODING(); \
12564 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12565 IEM_MC_FETCH_EFLAGS(EFlags); \
12566 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12567 \
12568 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12569 IEM_MC_COMMIT_EFLAGS(EFlags); \
12570 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12571 IEM_MC_END(); \
12572 break; \
12573 \
12574 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12575 } \
12576 } \
12577 } \
12578 (void)0
12579
12580
12581/**
12582 * @opmaps grp3_f6
12583 * @opcode /0
12584 * @todo also /1
12585 */
12586FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12587{
12588 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12589 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12590
12591 if (IEM_IS_MODRM_REG_MODE(bRm))
12592 {
12593 /* register access */
12594 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12595 IEM_MC_BEGIN(3, 0, 0, 0);
12596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12597 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12598 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12600 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12601 IEM_MC_REF_EFLAGS(pEFlags);
12602 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12603 IEM_MC_ADVANCE_RIP_AND_FINISH();
12604 IEM_MC_END();
12605 }
12606 else
12607 {
12608 /* memory access. */
12609 IEM_MC_BEGIN(3, 3, 0, 0);
12610 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12611 IEM_MC_ARG(uint8_t, u8Src, 1);
12612 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12614 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12615
12616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12617 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12618 IEM_MC_ASSIGN(u8Src, u8Imm);
12619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12620 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12621 IEM_MC_FETCH_EFLAGS(EFlags);
12622 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12623
12624 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12625 IEM_MC_COMMIT_EFLAGS(EFlags);
12626 IEM_MC_ADVANCE_RIP_AND_FINISH();
12627 IEM_MC_END();
12628 }
12629}
12630
12631
12632/** Opcode 0xf6 /4, /5, /6 and /7. */
12633FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12634{
12635 if (IEM_IS_MODRM_REG_MODE(bRm))
12636 {
12637 /* register access */
12638 IEM_MC_BEGIN(3, 1, 0, 0);
12639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12640 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12641 IEM_MC_ARG(uint8_t, u8Value, 1);
12642 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12643 IEM_MC_LOCAL(int32_t, rc);
12644
12645 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12646 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12647 IEM_MC_REF_EFLAGS(pEFlags);
12648 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12649 IEM_MC_IF_LOCAL_IS_Z(rc) {
12650 IEM_MC_ADVANCE_RIP_AND_FINISH();
12651 } IEM_MC_ELSE() {
12652 IEM_MC_RAISE_DIVIDE_ERROR();
12653 } IEM_MC_ENDIF();
12654
12655 IEM_MC_END();
12656 }
12657 else
12658 {
12659 /* memory access. */
12660 IEM_MC_BEGIN(3, 2, 0, 0);
12661 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12662 IEM_MC_ARG(uint8_t, u8Value, 1);
12663 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12665 IEM_MC_LOCAL(int32_t, rc);
12666
12667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12669 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12670 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12671 IEM_MC_REF_EFLAGS(pEFlags);
12672 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12673 IEM_MC_IF_LOCAL_IS_Z(rc) {
12674 IEM_MC_ADVANCE_RIP_AND_FINISH();
12675 } IEM_MC_ELSE() {
12676 IEM_MC_RAISE_DIVIDE_ERROR();
12677 } IEM_MC_ENDIF();
12678
12679 IEM_MC_END();
12680 }
12681}
12682
12683
12684/** Opcode 0xf7 /4, /5, /6 and /7. */
12685FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12686{
12687 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12688
12689 if (IEM_IS_MODRM_REG_MODE(bRm))
12690 {
12691 /* register access */
12692 switch (pVCpu->iem.s.enmEffOpSize)
12693 {
12694 case IEMMODE_16BIT:
12695 IEM_MC_BEGIN(4, 1, 0, 0);
12696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12697 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12698 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12699 IEM_MC_ARG(uint16_t, u16Value, 2);
12700 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12701 IEM_MC_LOCAL(int32_t, rc);
12702
12703 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12704 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12705 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12706 IEM_MC_REF_EFLAGS(pEFlags);
12707 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12708 IEM_MC_IF_LOCAL_IS_Z(rc) {
12709 IEM_MC_ADVANCE_RIP_AND_FINISH();
12710 } IEM_MC_ELSE() {
12711 IEM_MC_RAISE_DIVIDE_ERROR();
12712 } IEM_MC_ENDIF();
12713
12714 IEM_MC_END();
12715 break;
12716
12717 case IEMMODE_32BIT:
12718 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
12719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12720 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12721 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12722 IEM_MC_ARG(uint32_t, u32Value, 2);
12723 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12724 IEM_MC_LOCAL(int32_t, rc);
12725
12726 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12727 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12728 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12729 IEM_MC_REF_EFLAGS(pEFlags);
12730 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12731 IEM_MC_IF_LOCAL_IS_Z(rc) {
12732 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12733 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12734 IEM_MC_ADVANCE_RIP_AND_FINISH();
12735 } IEM_MC_ELSE() {
12736 IEM_MC_RAISE_DIVIDE_ERROR();
12737 } IEM_MC_ENDIF();
12738
12739 IEM_MC_END();
12740 break;
12741
12742 case IEMMODE_64BIT:
12743 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
12744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12745 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12746 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12747 IEM_MC_ARG(uint64_t, u64Value, 2);
12748 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12749 IEM_MC_LOCAL(int32_t, rc);
12750
12751 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12752 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12753 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12754 IEM_MC_REF_EFLAGS(pEFlags);
12755 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12756 IEM_MC_IF_LOCAL_IS_Z(rc) {
12757 IEM_MC_ADVANCE_RIP_AND_FINISH();
12758 } IEM_MC_ELSE() {
12759 IEM_MC_RAISE_DIVIDE_ERROR();
12760 } IEM_MC_ENDIF();
12761
12762 IEM_MC_END();
12763 break;
12764
12765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12766 }
12767 }
12768 else
12769 {
12770 /* memory access. */
12771 switch (pVCpu->iem.s.enmEffOpSize)
12772 {
12773 case IEMMODE_16BIT:
12774 IEM_MC_BEGIN(4, 2, 0, 0);
12775 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12776 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12777 IEM_MC_ARG(uint16_t, u16Value, 2);
12778 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12780 IEM_MC_LOCAL(int32_t, rc);
12781
12782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12784 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12785 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12786 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12787 IEM_MC_REF_EFLAGS(pEFlags);
12788 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12789 IEM_MC_IF_LOCAL_IS_Z(rc) {
12790 IEM_MC_ADVANCE_RIP_AND_FINISH();
12791 } IEM_MC_ELSE() {
12792 IEM_MC_RAISE_DIVIDE_ERROR();
12793 } IEM_MC_ENDIF();
12794
12795 IEM_MC_END();
12796 break;
12797
12798 case IEMMODE_32BIT:
12799 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
12800 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12801 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12802 IEM_MC_ARG(uint32_t, u32Value, 2);
12803 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12805 IEM_MC_LOCAL(int32_t, rc);
12806
12807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12809 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12810 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12811 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12812 IEM_MC_REF_EFLAGS(pEFlags);
12813 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12814 IEM_MC_IF_LOCAL_IS_Z(rc) {
12815 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12816 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12817 IEM_MC_ADVANCE_RIP_AND_FINISH();
12818 } IEM_MC_ELSE() {
12819 IEM_MC_RAISE_DIVIDE_ERROR();
12820 } IEM_MC_ENDIF();
12821
12822 IEM_MC_END();
12823 break;
12824
12825 case IEMMODE_64BIT:
12826 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
12827 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12828 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12829 IEM_MC_ARG(uint64_t, u64Value, 2);
12830 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12832 IEM_MC_LOCAL(int32_t, rc);
12833
12834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12836 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12837 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12838 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12839 IEM_MC_REF_EFLAGS(pEFlags);
12840 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12841 IEM_MC_IF_LOCAL_IS_Z(rc) {
12842 IEM_MC_ADVANCE_RIP_AND_FINISH();
12843 } IEM_MC_ELSE() {
12844 IEM_MC_RAISE_DIVIDE_ERROR();
12845 } IEM_MC_ENDIF();
12846
12847 IEM_MC_END();
12848 break;
12849
12850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12851 }
12852 }
12853}
12854
12855
12856/**
12857 * @opmaps grp3_f6
12858 * @opcode /2
12859 */
12860FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12861{
12862 IEMOP_MNEMONIC(not_Eb, "not Eb");
12863 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12864}
12865
12866
12867/**
12868 * @opmaps grp3_f6
12869 * @opcode /3
12870 */
12871FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12872{
12873 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12874 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12875}
12876
12877
12878/**
12879 * @opcode 0xf6
12880 */
12881FNIEMOP_DEF(iemOp_Grp3_Eb)
12882{
12883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12884 switch (IEM_GET_MODRM_REG_8(bRm))
12885 {
12886 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12887 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12888 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12889 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12890 case 4:
12891 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12892 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12893 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12894 case 5:
12895 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12896 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12897 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12898 case 6:
12899 IEMOP_MNEMONIC(div_Eb, "div Eb");
12900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12901 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12902 case 7:
12903 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12904 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12905 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12907 }
12908}
12909
12910
12911/** Opcode 0xf7 /0. */
12912FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12913{
12914 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12916
12917 if (IEM_IS_MODRM_REG_MODE(bRm))
12918 {
12919 /* register access */
12920 switch (pVCpu->iem.s.enmEffOpSize)
12921 {
12922 case IEMMODE_16BIT:
12923 IEM_MC_BEGIN(3, 0, 0, 0);
12924 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12926 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12927 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12928 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12929 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12930 IEM_MC_REF_EFLAGS(pEFlags);
12931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12932 IEM_MC_ADVANCE_RIP_AND_FINISH();
12933 IEM_MC_END();
12934 break;
12935
12936 case IEMMODE_32BIT:
12937 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
12938 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12940 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12941 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12942 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12943 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12944 IEM_MC_REF_EFLAGS(pEFlags);
12945 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12946 /* No clearing the high dword here - test doesn't write back the result. */
12947 IEM_MC_ADVANCE_RIP_AND_FINISH();
12948 IEM_MC_END();
12949 break;
12950
12951 case IEMMODE_64BIT:
12952 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
12953 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12955 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12956 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12957 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12958 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12959 IEM_MC_REF_EFLAGS(pEFlags);
12960 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12961 IEM_MC_ADVANCE_RIP_AND_FINISH();
12962 IEM_MC_END();
12963 break;
12964
12965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12966 }
12967 }
12968 else
12969 {
12970 /* memory access. */
12971 switch (pVCpu->iem.s.enmEffOpSize)
12972 {
12973 case IEMMODE_16BIT:
12974 IEM_MC_BEGIN(3, 3, 0, 0);
12975 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
12976 IEM_MC_ARG(uint16_t, u16Src, 1);
12977 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12979 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12980
12981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12982 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12983 IEM_MC_ASSIGN(u16Src, u16Imm);
12984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12985 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12986 IEM_MC_FETCH_EFLAGS(EFlags);
12987 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12988
12989 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
12990 IEM_MC_COMMIT_EFLAGS(EFlags);
12991 IEM_MC_ADVANCE_RIP_AND_FINISH();
12992 IEM_MC_END();
12993 break;
12994
12995 case IEMMODE_32BIT:
12996 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
12997 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
12998 IEM_MC_ARG(uint32_t, u32Src, 1);
12999 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13001 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13002
13003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13004 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13005 IEM_MC_ASSIGN(u32Src, u32Imm);
13006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13007 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13008 IEM_MC_FETCH_EFLAGS(EFlags);
13009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13010
13011 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13012 IEM_MC_COMMIT_EFLAGS(EFlags);
13013 IEM_MC_ADVANCE_RIP_AND_FINISH();
13014 IEM_MC_END();
13015 break;
13016
13017 case IEMMODE_64BIT:
13018 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13019 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13020 IEM_MC_ARG(uint64_t, u64Src, 1);
13021 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13023 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13024
13025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13026 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13027 IEM_MC_ASSIGN(u64Src, u64Imm);
13028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13029 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13030 IEM_MC_FETCH_EFLAGS(EFlags);
13031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13032
13033 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13034 IEM_MC_COMMIT_EFLAGS(EFlags);
13035 IEM_MC_ADVANCE_RIP_AND_FINISH();
13036 IEM_MC_END();
13037 break;
13038
13039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13040 }
13041 }
13042}
13043
13044
13045/** Opcode 0xf7 /2. */
13046FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13047{
13048 IEMOP_MNEMONIC(not_Ev, "not Ev");
13049 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13050 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13051}
13052
13053
13054/** Opcode 0xf7 /3. */
13055FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13056{
13057 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13058 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13059 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13060}
13061
13062
13063/**
13064 * @opcode 0xf7
13065 */
13066FNIEMOP_DEF(iemOp_Grp3_Ev)
13067{
13068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13069 switch (IEM_GET_MODRM_REG_8(bRm))
13070 {
13071 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13072 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13073 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13074 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13075 case 4:
13076 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13077 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13078 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13079 case 5:
13080 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13081 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13082 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13083 case 6:
13084 IEMOP_MNEMONIC(div_Ev, "div Ev");
13085 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13086 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13087 case 7:
13088 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13089 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13090 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13091 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13092 }
13093}
13094
13095
13096/**
13097 * @opcode 0xf8
13098 */
13099FNIEMOP_DEF(iemOp_clc)
13100{
13101 IEMOP_MNEMONIC(clc, "clc");
13102 IEM_MC_BEGIN(0, 0, 0, 0);
13103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13104 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13105 IEM_MC_ADVANCE_RIP_AND_FINISH();
13106 IEM_MC_END();
13107}
13108
13109
13110/**
13111 * @opcode 0xf9
13112 */
13113FNIEMOP_DEF(iemOp_stc)
13114{
13115 IEMOP_MNEMONIC(stc, "stc");
13116 IEM_MC_BEGIN(0, 0, 0, 0);
13117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13118 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13119 IEM_MC_ADVANCE_RIP_AND_FINISH();
13120 IEM_MC_END();
13121}
13122
13123
13124/**
13125 * @opcode 0xfa
13126 */
13127FNIEMOP_DEF(iemOp_cli)
13128{
13129 IEMOP_MNEMONIC(cli, "cli");
13130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13131 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13132}
13133
13134
13135FNIEMOP_DEF(iemOp_sti)
13136{
13137 IEMOP_MNEMONIC(sti, "sti");
13138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13139 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13140 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, iemCImpl_sti);
13141}
13142
13143
13144/**
13145 * @opcode 0xfc
13146 */
13147FNIEMOP_DEF(iemOp_cld)
13148{
13149 IEMOP_MNEMONIC(cld, "cld");
13150 IEM_MC_BEGIN(0, 0, 0, 0);
13151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13152 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13153 IEM_MC_ADVANCE_RIP_AND_FINISH();
13154 IEM_MC_END();
13155}
13156
13157
13158/**
13159 * @opcode 0xfd
13160 */
13161FNIEMOP_DEF(iemOp_std)
13162{
13163 IEMOP_MNEMONIC(std, "std");
13164 IEM_MC_BEGIN(0, 0, 0, 0);
13165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13166 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13167 IEM_MC_ADVANCE_RIP_AND_FINISH();
13168 IEM_MC_END();
13169}
13170
13171
13172/**
13173 * @opmaps grp4
13174 * @opcode /0
13175 */
13176FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13177{
13178 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13179 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13180}
13181
13182
13183/**
13184 * @opmaps grp4
13185 * @opcode /1
13186 */
13187FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13188{
13189 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13190 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13191}
13192
13193
13194/**
13195 * @opcode 0xfe
13196 */
13197FNIEMOP_DEF(iemOp_Grp4)
13198{
13199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13200 switch (IEM_GET_MODRM_REG_8(bRm))
13201 {
13202 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13203 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13204 default:
13205 /** @todo is the eff-addr decoded? */
13206 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13207 IEMOP_RAISE_INVALID_OPCODE_RET();
13208 }
13209}
13210
13211/** Opcode 0xff /0. */
13212FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13213{
13214 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13215 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13216 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13217}
13218
13219
13220/** Opcode 0xff /1. */
13221FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13222{
13223 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13224 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13225 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13226}
13227
13228
13229/**
13230 * Opcode 0xff /2.
13231 * @param bRm The RM byte.
13232 */
13233FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13234{
13235 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13236 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13237
13238 if (IEM_IS_MODRM_REG_MODE(bRm))
13239 {
13240 /* The new RIP is taken from a register. */
13241 switch (pVCpu->iem.s.enmEffOpSize)
13242 {
13243 case IEMMODE_16BIT:
13244 IEM_MC_BEGIN(1, 0, 0, 0);
13245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13246 IEM_MC_ARG(uint16_t, u16Target, 0);
13247 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13248 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13249 IEM_MC_END();
13250 break;
13251
13252 case IEMMODE_32BIT:
13253 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13255 IEM_MC_ARG(uint32_t, u32Target, 0);
13256 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13257 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13258 IEM_MC_END();
13259 break;
13260
13261 case IEMMODE_64BIT:
13262 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13264 IEM_MC_ARG(uint64_t, u64Target, 0);
13265 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13266 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13267 IEM_MC_END();
13268 break;
13269
13270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13271 }
13272 }
13273 else
13274 {
13275 /* The new RIP is taken from a register. */
13276 switch (pVCpu->iem.s.enmEffOpSize)
13277 {
13278 case IEMMODE_16BIT:
13279 IEM_MC_BEGIN(1, 1, 0, 0);
13280 IEM_MC_ARG(uint16_t, u16Target, 0);
13281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13284 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13285 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13286 IEM_MC_END();
13287 break;
13288
13289 case IEMMODE_32BIT:
13290 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13291 IEM_MC_ARG(uint32_t, u32Target, 0);
13292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13295 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13296 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13297 IEM_MC_END();
13298 break;
13299
13300 case IEMMODE_64BIT:
13301 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13302 IEM_MC_ARG(uint64_t, u64Target, 0);
13303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13306 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13307 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13308 IEM_MC_END();
13309 break;
13310
13311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13312 }
13313 }
13314}
13315
13316#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13317 /* Registers? How?? */ \
13318 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13319 { /* likely */ } \
13320 else \
13321 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13322 \
13323 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13324 /** @todo what does VIA do? */ \
13325 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13326 { /* likely */ } \
13327 else \
13328 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13329 \
13330 /* Far pointer loaded from memory. */ \
13331 switch (pVCpu->iem.s.enmEffOpSize) \
13332 { \
13333 case IEMMODE_16BIT: \
13334 IEM_MC_BEGIN(3, 1, 0, 0); \
13335 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13336 IEM_MC_ARG(uint16_t, offSeg, 1); \
13337 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13341 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13342 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13343 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13344 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13345 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13346 IEM_MC_END(); \
13347 break; \
13348 \
13349 case IEMMODE_32BIT: \
13350 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13351 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13352 IEM_MC_ARG(uint32_t, offSeg, 1); \
13353 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13357 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13358 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13359 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13360 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13361 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13362 IEM_MC_END(); \
13363 break; \
13364 \
13365 case IEMMODE_64BIT: \
13366 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13367 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13368 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13369 IEM_MC_ARG(uint64_t, offSeg, 1); \
13370 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13374 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13375 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13376 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13377 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13378 IEM_MC_END(); \
13379 break; \
13380 \
13381 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13382 } do {} while (0)
13383
13384
13385/**
13386 * Opcode 0xff /3.
13387 * @param bRm The RM byte.
13388 */
13389FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13390{
13391 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13392 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13393}
13394
13395
13396/**
13397 * Opcode 0xff /4.
13398 * @param bRm The RM byte.
13399 */
13400FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13401{
13402 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13403 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13404
13405 if (IEM_IS_MODRM_REG_MODE(bRm))
13406 {
13407 /* The new RIP is taken from a register. */
13408 switch (pVCpu->iem.s.enmEffOpSize)
13409 {
13410 case IEMMODE_16BIT:
13411 IEM_MC_BEGIN(0, 1, 0, 0);
13412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13413 IEM_MC_LOCAL(uint16_t, u16Target);
13414 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13415 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13416 IEM_MC_END();
13417 break;
13418
13419 case IEMMODE_32BIT:
13420 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
13421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13422 IEM_MC_LOCAL(uint32_t, u32Target);
13423 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13424 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13425 IEM_MC_END();
13426 break;
13427
13428 case IEMMODE_64BIT:
13429 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
13430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13431 IEM_MC_LOCAL(uint64_t, u64Target);
13432 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13433 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13434 IEM_MC_END();
13435 break;
13436
13437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13438 }
13439 }
13440 else
13441 {
13442 /* The new RIP is taken from a memory location. */
13443 switch (pVCpu->iem.s.enmEffOpSize)
13444 {
13445 case IEMMODE_16BIT:
13446 IEM_MC_BEGIN(0, 2, 0, 0);
13447 IEM_MC_LOCAL(uint16_t, u16Target);
13448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13451 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13452 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13453 IEM_MC_END();
13454 break;
13455
13456 case IEMMODE_32BIT:
13457 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
13458 IEM_MC_LOCAL(uint32_t, u32Target);
13459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13462 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13463 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13464 IEM_MC_END();
13465 break;
13466
13467 case IEMMODE_64BIT:
13468 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
13469 IEM_MC_LOCAL(uint64_t, u64Target);
13470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13473 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13474 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13475 IEM_MC_END();
13476 break;
13477
13478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13479 }
13480 }
13481}
13482
13483
13484/**
13485 * Opcode 0xff /5.
13486 * @param bRm The RM byte.
13487 */
13488FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13489{
13490 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13491 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13492}
13493
13494
13495/**
13496 * Opcode 0xff /6.
13497 * @param bRm The RM byte.
13498 */
13499FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13500{
13501 IEMOP_MNEMONIC(push_Ev, "push Ev");
13502
13503 /* Registers are handled by a common worker. */
13504 if (IEM_IS_MODRM_REG_MODE(bRm))
13505 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13506
13507 /* Memory we do here. */
13508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13509 switch (pVCpu->iem.s.enmEffOpSize)
13510 {
13511 case IEMMODE_16BIT:
13512 IEM_MC_BEGIN(0, 2, 0, 0);
13513 IEM_MC_LOCAL(uint16_t, u16Src);
13514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13517 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13518 IEM_MC_PUSH_U16(u16Src);
13519 IEM_MC_ADVANCE_RIP_AND_FINISH();
13520 IEM_MC_END();
13521 break;
13522
13523 case IEMMODE_32BIT:
13524 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
13525 IEM_MC_LOCAL(uint32_t, u32Src);
13526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13529 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13530 IEM_MC_PUSH_U32(u32Src);
13531 IEM_MC_ADVANCE_RIP_AND_FINISH();
13532 IEM_MC_END();
13533 break;
13534
13535 case IEMMODE_64BIT:
13536 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
13537 IEM_MC_LOCAL(uint64_t, u64Src);
13538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13541 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13542 IEM_MC_PUSH_U64(u64Src);
13543 IEM_MC_ADVANCE_RIP_AND_FINISH();
13544 IEM_MC_END();
13545 break;
13546
13547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13548 }
13549}
13550
13551
13552/**
13553 * @opcode 0xff
13554 */
13555FNIEMOP_DEF(iemOp_Grp5)
13556{
13557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13558 switch (IEM_GET_MODRM_REG_8(bRm))
13559 {
13560 case 0:
13561 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13562 case 1:
13563 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13564 case 2:
13565 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13566 case 3:
13567 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13568 case 4:
13569 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13570 case 5:
13571 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13572 case 6:
13573 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13574 case 7:
13575 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13576 IEMOP_RAISE_INVALID_OPCODE_RET();
13577 }
13578 AssertFailedReturn(VERR_IEM_IPE_3);
13579}
13580
13581
13582
13583const PFNIEMOP g_apfnOneByteMap[256] =
13584{
13585 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13586 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13587 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13588 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13589 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13590 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13591 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13592 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13593 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13594 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13595 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13596 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13597 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13598 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13599 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13600 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13601 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13602 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13603 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13604 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13605 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13606 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13607 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13608 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13609 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13610 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13611 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13612 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13613 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13614 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13615 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13616 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13617 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13618 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13619 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13620 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13621 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13622 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13623 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13624 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13625 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13626 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13627 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13628 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13629 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13630 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13631 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13632 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13633 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13634 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13635 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13636 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13637 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13638 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13639 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13640 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13641 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13642 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13643 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13644 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13645 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13646 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13647 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13648 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13649};
13650
13651
13652/** @} */
13653
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette