VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 100651

最後變更 在這個檔案從100651是 100623,由 vboxsync 提交於 20 月 前

VMM/IEM: pop [mem] correction, iEffSeg must be fetched after address decoding. Fixes regression from r157932. bugref:10369

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 450.1 KB
 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 100623 2023-07-18 09:58:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.alldomusa.eu.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8(a_fnNormalU8, a_fRW) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
74 \
75 IEM_MC_BEGIN(3, 0); \
76 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
77 IEM_MC_ARG(uint8_t, u8Src, 1); \
78 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
79 \
80 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
81 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
82 IEM_MC_REF_EFLAGS(pEFlags); \
83 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
84 \
85 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
86 IEM_MC_END(); \
87 } \
88 else \
89 { \
90 /* \
91 * We're accessing memory. \
92 * Note! We're putting the eflags on the stack here so we can commit them \
93 * after the memory. \
94 */ \
95 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
96 { \
97 IEM_MC_BEGIN(3, 2); \
98 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
99 IEM_MC_ARG(uint8_t, u8Src, 1); \
100 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
120 IEMOP_HLP_DONE_DECODING(); \
121 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
122 } \
123 } \
124 (void)0
125
126#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
127 IEM_MC_BEGIN(3, 2); \
128 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
129 IEM_MC_ARG(uint8_t, u8Src, 1); \
130 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
132 \
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
134 IEMOP_HLP_DONE_DECODING(); \
135 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
136 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
137 IEM_MC_FETCH_EFLAGS(EFlags); \
138 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
139 \
140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
141 IEM_MC_COMMIT_EFLAGS(EFlags); \
142 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
143 IEM_MC_END(); \
144 } \
145 } \
146 (void)0
147
148/**
149 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
150 * destination.
151 */
152#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
154 \
155 /* \
156 * If rm is denoting a register, no more instruction bytes. \
157 */ \
158 if (IEM_IS_MODRM_REG_MODE(bRm)) \
159 { \
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
161 IEM_MC_BEGIN(3, 0); \
162 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
163 IEM_MC_ARG(uint8_t, u8Src, 1); \
164 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
165 \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
167 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
168 IEM_MC_REF_EFLAGS(pEFlags); \
169 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
170 \
171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
172 IEM_MC_END(); \
173 } \
174 else \
175 { \
176 /* \
177 * We're accessing memory. \
178 */ \
179 IEM_MC_BEGIN(3, 1); \
180 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
181 IEM_MC_ARG(uint8_t, u8Src, 1); \
182 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
184 \
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
187 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
188 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
189 IEM_MC_REF_EFLAGS(pEFlags); \
190 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
191 \
192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
193 IEM_MC_END(); \
194 } \
195 (void)0
196
197
198/**
199 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
200 * memory/register as the destination.
201 */
202#define IEMOP_BODY_BINARY_rm_rv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
204 \
205 /* \
206 * If rm is denoting a register, no more instruction bytes. \
207 */ \
208 if (IEM_IS_MODRM_REG_MODE(bRm)) \
209 { \
210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
211 switch (pVCpu->iem.s.enmEffOpSize) \
212 { \
213 case IEMMODE_16BIT: \
214 IEM_MC_BEGIN(3, 0); \
215 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
216 IEM_MC_ARG(uint16_t, u16Src, 1); \
217 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
218 \
219 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
220 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
221 IEM_MC_REF_EFLAGS(pEFlags); \
222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
223 \
224 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
225 IEM_MC_END(); \
226 break; \
227 \
228 case IEMMODE_32BIT: \
229 IEM_MC_BEGIN(3, 0); \
230 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
231 IEM_MC_ARG(uint32_t, u32Src, 1); \
232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
233 \
234 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
238 \
239 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \
240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
242 IEM_MC_END(); \
243 break; \
244 \
245 case IEMMODE_64BIT: \
246 IEM_MC_BEGIN(3, 0); \
247 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
248 IEM_MC_ARG(uint64_t, u64Src, 1); \
249 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
250 \
251 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
252 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
253 IEM_MC_REF_EFLAGS(pEFlags); \
254 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
255 \
256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
257 IEM_MC_END(); \
258 break; \
259 \
260 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
261 } \
262 } \
263 else \
264 { \
265 /* \
266 * We're accessing memory. \
267 * Note! We're putting the eflags on the stack here so we can commit them \
268 * after the memory. \
269 */ \
270 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
271 { \
272 switch (pVCpu->iem.s.enmEffOpSize) \
273 { \
274 case IEMMODE_16BIT: \
275 IEM_MC_BEGIN(3, 2); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
280 \
281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
282 IEMOP_HLP_DONE_DECODING(); \
283 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
284 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
285 IEM_MC_FETCH_EFLAGS(EFlags); \
286 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
287 \
288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
289 IEM_MC_COMMIT_EFLAGS(EFlags); \
290 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
291 IEM_MC_END(); \
292 break; \
293 \
294 case IEMMODE_32BIT: \
295 IEM_MC_BEGIN(3, 2); \
296 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
297 IEM_MC_ARG(uint32_t, u32Src, 1); \
298 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
300 \
301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
302 IEMOP_HLP_DONE_DECODING(); \
303 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
304 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
305 IEM_MC_FETCH_EFLAGS(EFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
307 \
308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
309 IEM_MC_COMMIT_EFLAGS(EFlags); \
310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
311 IEM_MC_END(); \
312 break; \
313 \
314 case IEMMODE_64BIT: \
315 IEM_MC_BEGIN(3, 2); \
316 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
317 IEM_MC_ARG(uint64_t, u64Src, 1); \
318 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
320 \
321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
322 IEMOP_HLP_DONE_DECODING(); \
323 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
324 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
325 IEM_MC_FETCH_EFLAGS(EFlags); \
326 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
327 \
328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
329 IEM_MC_COMMIT_EFLAGS(EFlags); \
330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
331 IEM_MC_END(); \
332 break; \
333 \
334 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
335 } \
336 } \
337 else \
338 { \
339 (void)0
340
341#define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \
342 IEMOP_HLP_DONE_DECODING(); \
343 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
344 } \
345 } \
346 (void)0
347
348#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
349 switch (pVCpu->iem.s.enmEffOpSize) \
350 { \
351 case IEMMODE_16BIT: \
352 IEM_MC_BEGIN(3, 2); \
353 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
354 IEM_MC_ARG(uint16_t, u16Src, 1); \
355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
357 \
358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
359 IEMOP_HLP_DONE_DECODING(); \
360 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
361 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
362 IEM_MC_FETCH_EFLAGS(EFlags); \
363 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
364 \
365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
366 IEM_MC_COMMIT_EFLAGS(EFlags); \
367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
368 IEM_MC_END(); \
369 break; \
370 \
371 case IEMMODE_32BIT: \
372 IEM_MC_BEGIN(3, 2); \
373 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
374 IEM_MC_ARG(uint32_t, u32Src, 1); \
375 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
377 \
378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
379 IEMOP_HLP_DONE_DECODING(); \
380 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
381 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
382 IEM_MC_FETCH_EFLAGS(EFlags); \
383 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
384 \
385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW /* CMP,TEST */); \
386 IEM_MC_COMMIT_EFLAGS(EFlags); \
387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
388 IEM_MC_END(); \
389 break; \
390 \
391 case IEMMODE_64BIT: \
392 IEM_MC_BEGIN(3, 2); \
393 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
394 IEM_MC_ARG(uint64_t, u64Src, 1); \
395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
397 \
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
399 IEMOP_HLP_DONE_DECODING(); \
400 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
401 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
402 IEM_MC_FETCH_EFLAGS(EFlags); \
403 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
404 \
405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
406 IEM_MC_COMMIT_EFLAGS(EFlags); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
412 } \
413 } \
414 } \
415 (void)0
416
417
418/**
419 * Body for instructions like ADD, AND, OR, ++ with working on AL with
420 * a byte immediate.
421 */
422#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
423 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
425 \
426 IEM_MC_BEGIN(3, 0); \
427 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
428 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
429 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
430 \
431 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
432 IEM_MC_REF_EFLAGS(pEFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
434 \
435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
436 IEM_MC_END()
437
438/**
439 * Body for instructions like ADD, AND, OR, ++ with working on
440 * AX/EAX/RAX with a word/dword immediate.
441 */
442#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
443 switch (pVCpu->iem.s.enmEffOpSize) \
444 { \
445 case IEMMODE_16BIT: \
446 { \
447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
449 \
450 IEM_MC_BEGIN(3, 0); \
451 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
452 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
453 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
454 \
455 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
456 IEM_MC_REF_EFLAGS(pEFlags); \
457 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
458 \
459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
460 IEM_MC_END(); \
461 } \
462 \
463 case IEMMODE_32BIT: \
464 { \
465 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
467 \
468 IEM_MC_BEGIN(3, 0); \
469 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
470 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
471 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
472 \
473 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
474 IEM_MC_REF_EFLAGS(pEFlags); \
475 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
476 \
477 if (a_fModifiesDstReg) \
478 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
479 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
480 IEM_MC_END(); \
481 } \
482 \
483 case IEMMODE_64BIT: \
484 { \
485 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
487 \
488 IEM_MC_BEGIN(3, 0); \
489 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
490 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
491 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
492 \
493 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
494 IEM_MC_REF_EFLAGS(pEFlags); \
495 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
496 \
497 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
498 IEM_MC_END(); \
499 } \
500 \
501 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
502 } \
503 (void)0
504
505
506
507/* Instruction specification format - work in progress: */
508
509/**
510 * @opcode 0x00
511 * @opmnemonic add
512 * @op1 rm:Eb
513 * @op2 reg:Gb
514 * @opmaps one
515 * @openc ModR/M
516 * @opflmodify cf,pf,af,zf,sf,of
517 * @ophints harmless ignores_op_sizes
518 * @opstats add_Eb_Gb
519 * @opgroup og_gen_arith_bin
520 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
521 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
522 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
523 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
524 */
525FNIEMOP_DEF(iemOp_add_Eb_Gb)
526{
527 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
528 IEMOP_BODY_BINARY_rm_r8( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
529 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
530}
531
532
533/**
534 * @opcode 0x01
535 * @opgroup og_gen_arith_bin
536 * @opflmodify cf,pf,af,zf,sf,of
537 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
538 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
539 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
540 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
541 */
542FNIEMOP_DEF(iemOp_add_Ev_Gv)
543{
544 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
545 IEMOP_BODY_BINARY_rm_rv( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
546 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
547}
548
549
550/**
551 * @opcode 0x02
552 * @opgroup og_gen_arith_bin
553 * @opflmodify cf,pf,af,zf,sf,of
554 * @opcopytests iemOp_add_Eb_Gb
555 */
556FNIEMOP_DEF(iemOp_add_Gb_Eb)
557{
558 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
559 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
560}
561
562
563/**
564 * @opcode 0x03
565 * @opgroup og_gen_arith_bin
566 * @opflmodify cf,pf,af,zf,sf,of
567 * @opcopytests iemOp_add_Ev_Gv
568 */
569FNIEMOP_DEF(iemOp_add_Gv_Ev)
570{
571 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
572 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
573}
574
575
576/**
577 * @opcode 0x04
578 * @opgroup og_gen_arith_bin
579 * @opflmodify cf,pf,af,zf,sf,of
580 * @opcopytests iemOp_add_Eb_Gb
581 */
582FNIEMOP_DEF(iemOp_add_Al_Ib)
583{
584 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
585 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
586}
587
588
589/**
590 * @opcode 0x05
591 * @opgroup og_gen_arith_bin
592 * @opflmodify cf,pf,af,zf,sf,of
593 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
594 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
595 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
596 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
597 */
598FNIEMOP_DEF(iemOp_add_eAX_Iz)
599{
600 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
601 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
602}
603
604
605/**
606 * @opcode 0x06
607 * @opgroup og_stack_sreg
608 */
609FNIEMOP_DEF(iemOp_push_ES)
610{
611 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
612 IEMOP_HLP_NO_64BIT();
613 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
614}
615
616
617/**
618 * @opcode 0x07
619 * @opgroup og_stack_sreg
620 */
621FNIEMOP_DEF(iemOp_pop_ES)
622{
623 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
624 IEMOP_HLP_NO_64BIT();
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
627}
628
629
630/**
631 * @opcode 0x08
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
637 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
638 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
639 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
640 */
641FNIEMOP_DEF(iemOp_or_Eb_Gb)
642{
643 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
645 IEMOP_BODY_BINARY_rm_r8( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
646 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
647}
648
649
650/*
651 * @opcode 0x09
652 * @opgroup og_gen_arith_bin
653 * @opflmodify cf,pf,af,zf,sf,of
654 * @opflundef af
655 * @opflclear of,cf
656 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
657 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
658 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
659 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
660 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
661 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
662 */
663FNIEMOP_DEF(iemOp_or_Ev_Gv)
664{
665 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
667 IEMOP_BODY_BINARY_rm_rv( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
668 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
669}
670
671
672/**
673 * @opcode 0x0a
674 * @opgroup og_gen_arith_bin
675 * @opflmodify cf,pf,af,zf,sf,of
676 * @opflundef af
677 * @opflclear of,cf
678 * @opcopytests iemOp_or_Eb_Gb
679 */
680FNIEMOP_DEF(iemOp_or_Gb_Eb)
681{
682 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
684 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
685}
686
687
688/**
689 * @opcode 0x0b
690 * @opgroup og_gen_arith_bin
691 * @opflmodify cf,pf,af,zf,sf,of
692 * @opflundef af
693 * @opflclear of,cf
694 * @opcopytests iemOp_or_Ev_Gv
695 */
696FNIEMOP_DEF(iemOp_or_Gv_Ev)
697{
698 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
700 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
701}
702
703
704/**
705 * @opcode 0x0c
706 * @opgroup og_gen_arith_bin
707 * @opflmodify cf,pf,af,zf,sf,of
708 * @opflundef af
709 * @opflclear of,cf
710 * @opcopytests iemOp_or_Eb_Gb
711 */
712FNIEMOP_DEF(iemOp_or_Al_Ib)
713{
714 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
716 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
717}
718
719
720/**
721 * @opcode 0x0d
722 * @opgroup og_gen_arith_bin
723 * @opflmodify cf,pf,af,zf,sf,of
724 * @opflundef af
725 * @opflclear of,cf
726 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
727 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
728 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
729 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
730 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
731 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
732 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
733 */
734FNIEMOP_DEF(iemOp_or_eAX_Iz)
735{
736 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
737 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
738 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
739}
740
741
742/**
743 * @opcode 0x0e
744 * @opgroup og_stack_sreg
745 */
746FNIEMOP_DEF(iemOp_push_CS)
747{
748 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
749 IEMOP_HLP_NO_64BIT();
750 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
751}
752
753
754/**
755 * @opcode 0x0f
756 * @opmnemonic EscTwo0f
757 * @openc two0f
758 * @opdisenum OP_2B_ESC
759 * @ophints harmless
760 * @opgroup og_escapes
761 */
762FNIEMOP_DEF(iemOp_2byteEscape)
763{
764#ifdef VBOX_STRICT
765 /* Sanity check the table the first time around. */
766 static bool s_fTested = false;
767 if (RT_LIKELY(s_fTested)) { /* likely */ }
768 else
769 {
770 s_fTested = true;
771 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
772 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
773 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
774 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
775 }
776#endif
777
778 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
779 {
780 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
783 }
784 /* @opdone */
785
786 /*
787 * On the 8086 this is a POP CS instruction.
788 * For the time being we don't specify this this.
789 */
790 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
791 IEMOP_HLP_NO_64BIT();
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_END_TB/*?*/,
794 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
795}
796
797/**
798 * @opcode 0x10
799 * @opgroup og_gen_arith_bin
800 * @opfltest cf
801 * @opflmodify cf,pf,af,zf,sf,of
802 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
803 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
804 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
805 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
806 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
807 */
808FNIEMOP_DEF(iemOp_adc_Eb_Gb)
809{
810 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
811 IEMOP_BODY_BINARY_rm_r8( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
812 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
813}
814
815
816/**
817 * @opcode 0x11
818 * @opgroup og_gen_arith_bin
819 * @opfltest cf
820 * @opflmodify cf,pf,af,zf,sf,of
821 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
822 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
823 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
824 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
825 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
826 */
827FNIEMOP_DEF(iemOp_adc_Ev_Gv)
828{
829 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
830 IEMOP_BODY_BINARY_rm_rv( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
831 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
832}
833
834
835/**
836 * @opcode 0x12
837 * @opgroup og_gen_arith_bin
838 * @opfltest cf
839 * @opflmodify cf,pf,af,zf,sf,of
840 * @opcopytests iemOp_adc_Eb_Gb
841 */
842FNIEMOP_DEF(iemOp_adc_Gb_Eb)
843{
844 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
845 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
846}
847
848
849/**
850 * @opcode 0x13
851 * @opgroup og_gen_arith_bin
852 * @opfltest cf
853 * @opflmodify cf,pf,af,zf,sf,of
854 * @opcopytests iemOp_adc_Ev_Gv
855 */
856FNIEMOP_DEF(iemOp_adc_Gv_Ev)
857{
858 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
859 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
860}
861
862
863/**
864 * @opcode 0x14
865 * @opgroup og_gen_arith_bin
866 * @opfltest cf
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opcopytests iemOp_adc_Eb_Gb
869 */
870FNIEMOP_DEF(iemOp_adc_Al_Ib)
871{
872 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
873 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
874}
875
876
877/**
878 * @opcode 0x15
879 * @opgroup og_gen_arith_bin
880 * @opfltest cf
881 * @opflmodify cf,pf,af,zf,sf,of
882 * @opcopytests iemOp_adc_Ev_Gv
883 */
884FNIEMOP_DEF(iemOp_adc_eAX_Iz)
885{
886 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
887 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
888}
889
890
891/**
892 * @opcode 0x16
893 */
894FNIEMOP_DEF(iemOp_push_SS)
895{
896 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
897 IEMOP_HLP_NO_64BIT();
898 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
899}
900
901
902/**
903 * @opcode 0x17
904 * @opgroup og_gen_arith_bin
905 * @opfltest cf
906 * @opflmodify cf,pf,af,zf,sf,of
907 */
908FNIEMOP_DEF(iemOp_pop_SS)
909{
910 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
912 IEMOP_HLP_NO_64BIT();
913 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
914}
915
916
917/**
918 * @opcode 0x18
919 * @opgroup og_gen_arith_bin
920 * @opfltest cf
921 * @opflmodify cf,pf,af,zf,sf,of
922 */
923FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
924{
925 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
926 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
927 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
928}
929
930
931/**
932 * @opcode 0x19
933 * @opgroup og_gen_arith_bin
934 * @opfltest cf
935 * @opflmodify cf,pf,af,zf,sf,of
936 */
937FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
938{
939 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
940 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
941 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
942}
943
944
945/**
946 * @opcode 0x1a
947 * @opgroup og_gen_arith_bin
948 * @opfltest cf
949 * @opflmodify cf,pf,af,zf,sf,of
950 */
951FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
952{
953 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
954 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
955}
956
957
958/**
959 * @opcode 0x1b
960 * @opgroup og_gen_arith_bin
961 * @opfltest cf
962 * @opflmodify cf,pf,af,zf,sf,of
963 */
964FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
965{
966 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
967 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
968}
969
970
971/**
972 * @opcode 0x1c
973 * @opgroup og_gen_arith_bin
974 * @opfltest cf
975 * @opflmodify cf,pf,af,zf,sf,of
976 */
977FNIEMOP_DEF(iemOp_sbb_Al_Ib)
978{
979 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
980 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
981}
982
983
984/**
985 * @opcode 0x1d
986 * @opgroup og_gen_arith_bin
987 * @opfltest cf
988 * @opflmodify cf,pf,af,zf,sf,of
989 */
990FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
991{
992 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
993 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
994}
995
996
997/**
998 * @opcode 0x1e
999 * @opgroup og_stack_sreg
1000 */
1001FNIEMOP_DEF(iemOp_push_DS)
1002{
1003 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1004 IEMOP_HLP_NO_64BIT();
1005 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1006}
1007
1008
1009/**
1010 * @opcode 0x1f
1011 * @opgroup og_stack_sreg
1012 */
1013FNIEMOP_DEF(iemOp_pop_DS)
1014{
1015 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1017 IEMOP_HLP_NO_64BIT();
1018 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1019}
1020
1021
1022/**
1023 * @opcode 0x20
1024 * @opgroup og_gen_arith_bin
1025 * @opflmodify cf,pf,af,zf,sf,of
1026 * @opflundef af
1027 * @opflclear of,cf
1028 */
1029FNIEMOP_DEF(iemOp_and_Eb_Gb)
1030{
1031 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1032 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1033 IEMOP_BODY_BINARY_rm_r8( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
1034 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1035}
1036
1037
1038/**
1039 * @opcode 0x21
1040 * @opgroup og_gen_arith_bin
1041 * @opflmodify cf,pf,af,zf,sf,of
1042 * @opflundef af
1043 * @opflclear of,cf
1044 */
1045FNIEMOP_DEF(iemOp_and_Ev_Gv)
1046{
1047 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1049 IEMOP_BODY_BINARY_rm_rv( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
1050 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1051}
1052
1053
1054/**
1055 * @opcode 0x22
1056 * @opgroup og_gen_arith_bin
1057 * @opflmodify cf,pf,af,zf,sf,of
1058 * @opflundef af
1059 * @opflclear of,cf
1060 */
1061FNIEMOP_DEF(iemOp_and_Gb_Eb)
1062{
1063 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1065 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1066}
1067
1068
1069/**
1070 * @opcode 0x23
1071 * @opgroup og_gen_arith_bin
1072 * @opflmodify cf,pf,af,zf,sf,of
1073 * @opflundef af
1074 * @opflclear of,cf
1075 */
1076FNIEMOP_DEF(iemOp_and_Gv_Ev)
1077{
1078 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1080 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1081}
1082
1083
1084/**
1085 * @opcode 0x24
1086 * @opgroup og_gen_arith_bin
1087 * @opflmodify cf,pf,af,zf,sf,of
1088 * @opflundef af
1089 * @opflclear of,cf
1090 */
1091FNIEMOP_DEF(iemOp_and_Al_Ib)
1092{
1093 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1094 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1095 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1096}
1097
1098
1099/**
1100 * @opcode 0x25
1101 * @opgroup og_gen_arith_bin
1102 * @opflmodify cf,pf,af,zf,sf,of
1103 * @opflundef af
1104 * @opflclear of,cf
1105 */
1106FNIEMOP_DEF(iemOp_and_eAX_Iz)
1107{
1108 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1110 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1111}
1112
1113
1114/**
1115 * @opcode 0x26
1116 * @opmnemonic SEG
1117 * @op1 ES
1118 * @opgroup og_prefix
1119 * @openc prefix
1120 * @opdisenum OP_SEG
1121 * @ophints harmless
1122 */
1123FNIEMOP_DEF(iemOp_seg_ES)
1124{
1125 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1126 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1127 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1128
1129 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1130 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1131}
1132
1133
1134/**
1135 * @opcode 0x27
1136 * @opfltest af,cf
1137 * @opflmodify cf,pf,af,zf,sf,of
1138 * @opflundef of
1139 */
1140FNIEMOP_DEF(iemOp_daa)
1141{
1142 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1143 IEMOP_HLP_NO_64BIT();
1144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1145 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1146 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1147}
1148
1149
1150/**
1151 * @opcode 0x28
1152 * @opgroup og_gen_arith_bin
1153 * @opflmodify cf,pf,af,zf,sf,of
1154 */
1155FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1156{
1157 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1158 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
1159 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1160}
1161
1162
1163/**
1164 * @opcode 0x29
1165 * @opgroup og_gen_arith_bin
1166 * @opflmodify cf,pf,af,zf,sf,of
1167 */
1168FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1169{
1170 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1171 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
1172 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1173}
1174
1175
1176/**
1177 * @opcode 0x2a
1178 * @opgroup og_gen_arith_bin
1179 * @opflmodify cf,pf,af,zf,sf,of
1180 */
1181FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1182{
1183 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1184 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1185}
1186
1187
1188/**
1189 * @opcode 0x2b
1190 * @opgroup og_gen_arith_bin
1191 * @opflmodify cf,pf,af,zf,sf,of
1192 */
1193FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1194{
1195 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1196 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1197}
1198
1199
1200/**
1201 * @opcode 0x2c
1202 * @opgroup og_gen_arith_bin
1203 * @opflmodify cf,pf,af,zf,sf,of
1204 */
1205FNIEMOP_DEF(iemOp_sub_Al_Ib)
1206{
1207 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1208 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1209}
1210
1211
1212/**
1213 * @opcode 0x2d
1214 * @opgroup og_gen_arith_bin
1215 * @opflmodify cf,pf,af,zf,sf,of
1216 */
1217FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1218{
1219 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1220 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1221}
1222
1223
1224/**
1225 * @opcode 0x2e
1226 * @opmnemonic SEG
1227 * @op1 CS
1228 * @opgroup og_prefix
1229 * @openc prefix
1230 * @opdisenum OP_SEG
1231 * @ophints harmless
1232 */
1233FNIEMOP_DEF(iemOp_seg_CS)
1234{
1235 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1236 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1237 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1238
1239 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1240 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1241}
1242
1243
1244/**
1245 * @opcode 0x2f
1246 * @opfltest af,cf
1247 * @opflmodify cf,pf,af,zf,sf,of
1248 * @opflundef of
1249 */
1250FNIEMOP_DEF(iemOp_das)
1251{
1252 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1253 IEMOP_HLP_NO_64BIT();
1254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1255 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1256 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1257}
1258
1259
1260/**
1261 * @opcode 0x30
1262 * @opgroup og_gen_arith_bin
1263 * @opflmodify cf,pf,af,zf,sf,of
1264 * @opflundef af
1265 * @opflclear of,cf
1266 */
1267FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1268{
1269 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1270 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1271 IEMOP_BODY_BINARY_rm_r8( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
1272 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1273}
1274
1275
1276/**
1277 * @opcode 0x31
1278 * @opgroup og_gen_arith_bin
1279 * @opflmodify cf,pf,af,zf,sf,of
1280 * @opflundef af
1281 * @opflclear of,cf
1282 */
1283FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1284{
1285 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1286 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1287 IEMOP_BODY_BINARY_rm_rv( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
1288 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1289}
1290
1291
1292/**
1293 * @opcode 0x32
1294 * @opgroup og_gen_arith_bin
1295 * @opflmodify cf,pf,af,zf,sf,of
1296 * @opflundef af
1297 * @opflclear of,cf
1298 */
1299FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1300{
1301 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1302 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1303 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1304}
1305
1306
1307/**
1308 * @opcode 0x33
1309 * @opgroup og_gen_arith_bin
1310 * @opflmodify cf,pf,af,zf,sf,of
1311 * @opflundef af
1312 * @opflclear of,cf
1313 */
1314FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1315{
1316 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1318 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1319}
1320
1321
1322/**
1323 * @opcode 0x34
1324 * @opgroup og_gen_arith_bin
1325 * @opflmodify cf,pf,af,zf,sf,of
1326 * @opflundef af
1327 * @opflclear of,cf
1328 */
1329FNIEMOP_DEF(iemOp_xor_Al_Ib)
1330{
1331 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1333 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1334}
1335
1336
1337/**
1338 * @opcode 0x35
1339 * @opgroup og_gen_arith_bin
1340 * @opflmodify cf,pf,af,zf,sf,of
1341 * @opflundef af
1342 * @opflclear of,cf
1343 */
1344FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1345{
1346 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1347 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1348 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1349}
1350
1351
1352/**
1353 * @opcode 0x36
1354 * @opmnemonic SEG
1355 * @op1 SS
1356 * @opgroup og_prefix
1357 * @openc prefix
1358 * @opdisenum OP_SEG
1359 * @ophints harmless
1360 */
1361FNIEMOP_DEF(iemOp_seg_SS)
1362{
1363 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1364 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1365 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1366
1367 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1368 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1369}
1370
1371
1372/**
1373 * @opcode 0x37
1374 * @opfltest af,cf
1375 * @opflmodify cf,pf,af,zf,sf,of
1376 * @opflundef pf,zf,sf,of
1377 * @opgroup og_gen_arith_dec
1378 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1379 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1380 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1381 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1382 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1383 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1384 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1385 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1386 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1387 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1388 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1389 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1390 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1391 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1392 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1393 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1394 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1395 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1396 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1397 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1398 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1399 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1400 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1401 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1402 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1403 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1404 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1405 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1406 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1407 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1408 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1409 */
1410FNIEMOP_DEF(iemOp_aaa)
1411{
1412 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1413 IEMOP_HLP_NO_64BIT();
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1416
1417 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1418}
1419
1420
1421/**
1422 * @opcode 0x38
1423 */
1424FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1425{
1426 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1427 IEMOP_BODY_BINARY_rm_r8(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
1428 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1429}
1430
1431
1432/**
1433 * @opcode 0x39
1434 */
1435FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1436{
1437 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1438 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
1439 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
1440}
1441
1442
1443/**
1444 * @opcode 0x3a
1445 */
1446FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1447{
1448 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1449 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1450}
1451
1452
1453/**
1454 * @opcode 0x3b
1455 */
1456FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1457{
1458 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1459 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1460}
1461
1462
1463/**
1464 * @opcode 0x3c
1465 */
1466FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1467{
1468 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1469 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1470}
1471
1472
1473/**
1474 * @opcode 0x3d
1475 */
1476FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1477{
1478 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1479 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1480}
1481
1482
1483/**
1484 * @opcode 0x3e
1485 */
1486FNIEMOP_DEF(iemOp_seg_DS)
1487{
1488 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1489 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1490 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1491
1492 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1493 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1494}
1495
1496
1497/**
1498 * @opcode 0x3f
1499 * @opfltest af,cf
1500 * @opflmodify cf,pf,af,zf,sf,of
1501 * @opflundef pf,zf,sf,of
1502 * @opgroup og_gen_arith_dec
1503 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1504 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1505 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1506 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1507 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1508 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1509 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1510 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1511 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1512 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1513 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1514 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1515 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1516 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1517 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1518 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1519 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1520 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1521 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1522 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1523 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1524 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1525 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1526 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1527 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1528 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1529 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1530 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1531 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1532 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1533 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1534 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1535 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1536 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1537 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1538 */
1539FNIEMOP_DEF(iemOp_aas)
1540{
1541 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1542 IEMOP_HLP_NO_64BIT();
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1545
1546 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1547}
1548
1549
1550/**
1551 * Common 'inc/dec register' helper.
1552 *
1553 * Not for 64-bit code, only for what became the rex prefixes.
1554 */
1555#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1557 switch (pVCpu->iem.s.enmEffOpSize) \
1558 { \
1559 case IEMMODE_16BIT: \
1560 IEM_MC_BEGIN(2, 0); \
1561 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1562 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1563 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1564 IEM_MC_REF_EFLAGS(pEFlags); \
1565 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1566 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1567 IEM_MC_END(); \
1568 break; \
1569 \
1570 case IEMMODE_32BIT: \
1571 IEM_MC_BEGIN(2, 0); \
1572 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1573 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1574 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1575 IEM_MC_REF_EFLAGS(pEFlags); \
1576 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1577 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1578 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1579 IEM_MC_END(); \
1580 break; \
1581 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1582 } \
1583 (void)0
1584
1585/**
1586 * @opcode 0x40
1587 */
1588FNIEMOP_DEF(iemOp_inc_eAX)
1589{
1590 /*
1591 * This is a REX prefix in 64-bit mode.
1592 */
1593 if (IEM_IS_64BIT_CODE(pVCpu))
1594 {
1595 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1596 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1597
1598 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1599 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1600 }
1601
1602 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1603 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1604}
1605
1606
1607/**
1608 * @opcode 0x41
1609 */
1610FNIEMOP_DEF(iemOp_inc_eCX)
1611{
1612 /*
1613 * This is a REX prefix in 64-bit mode.
1614 */
1615 if (IEM_IS_64BIT_CODE(pVCpu))
1616 {
1617 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1618 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1619 pVCpu->iem.s.uRexB = 1 << 3;
1620
1621 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1622 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1623 }
1624
1625 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1626 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1627}
1628
1629
1630/**
1631 * @opcode 0x42
1632 */
1633FNIEMOP_DEF(iemOp_inc_eDX)
1634{
1635 /*
1636 * This is a REX prefix in 64-bit mode.
1637 */
1638 if (IEM_IS_64BIT_CODE(pVCpu))
1639 {
1640 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1641 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1642 pVCpu->iem.s.uRexIndex = 1 << 3;
1643
1644 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1645 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1646 }
1647
1648 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1649 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1650}
1651
1652
1653
1654/**
1655 * @opcode 0x43
1656 */
1657FNIEMOP_DEF(iemOp_inc_eBX)
1658{
1659 /*
1660 * This is a REX prefix in 64-bit mode.
1661 */
1662 if (IEM_IS_64BIT_CODE(pVCpu))
1663 {
1664 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1665 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1666 pVCpu->iem.s.uRexB = 1 << 3;
1667 pVCpu->iem.s.uRexIndex = 1 << 3;
1668
1669 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1670 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1671 }
1672
1673 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1674 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1675}
1676
1677
1678/**
1679 * @opcode 0x44
1680 */
1681FNIEMOP_DEF(iemOp_inc_eSP)
1682{
1683 /*
1684 * This is a REX prefix in 64-bit mode.
1685 */
1686 if (IEM_IS_64BIT_CODE(pVCpu))
1687 {
1688 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1689 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1690 pVCpu->iem.s.uRexReg = 1 << 3;
1691
1692 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1693 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1694 }
1695
1696 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1697 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1698}
1699
1700
1701/**
1702 * @opcode 0x45
1703 */
1704FNIEMOP_DEF(iemOp_inc_eBP)
1705{
1706 /*
1707 * This is a REX prefix in 64-bit mode.
1708 */
1709 if (IEM_IS_64BIT_CODE(pVCpu))
1710 {
1711 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1712 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1713 pVCpu->iem.s.uRexReg = 1 << 3;
1714 pVCpu->iem.s.uRexB = 1 << 3;
1715
1716 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1717 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1718 }
1719
1720 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1721 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1722}
1723
1724
1725/**
1726 * @opcode 0x46
1727 */
1728FNIEMOP_DEF(iemOp_inc_eSI)
1729{
1730 /*
1731 * This is a REX prefix in 64-bit mode.
1732 */
1733 if (IEM_IS_64BIT_CODE(pVCpu))
1734 {
1735 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1736 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1737 pVCpu->iem.s.uRexReg = 1 << 3;
1738 pVCpu->iem.s.uRexIndex = 1 << 3;
1739
1740 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1741 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1742 }
1743
1744 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1745 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1746}
1747
1748
1749/**
1750 * @opcode 0x47
1751 */
1752FNIEMOP_DEF(iemOp_inc_eDI)
1753{
1754 /*
1755 * This is a REX prefix in 64-bit mode.
1756 */
1757 if (IEM_IS_64BIT_CODE(pVCpu))
1758 {
1759 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1760 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1761 pVCpu->iem.s.uRexReg = 1 << 3;
1762 pVCpu->iem.s.uRexB = 1 << 3;
1763 pVCpu->iem.s.uRexIndex = 1 << 3;
1764
1765 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1766 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1767 }
1768
1769 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1770 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1771}
1772
1773
1774/**
1775 * @opcode 0x48
1776 */
1777FNIEMOP_DEF(iemOp_dec_eAX)
1778{
1779 /*
1780 * This is a REX prefix in 64-bit mode.
1781 */
1782 if (IEM_IS_64BIT_CODE(pVCpu))
1783 {
1784 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1785 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1786 iemRecalEffOpSize(pVCpu);
1787
1788 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1789 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1790 }
1791
1792 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1793 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1794}
1795
1796
1797/**
1798 * @opcode 0x49
1799 */
1800FNIEMOP_DEF(iemOp_dec_eCX)
1801{
1802 /*
1803 * This is a REX prefix in 64-bit mode.
1804 */
1805 if (IEM_IS_64BIT_CODE(pVCpu))
1806 {
1807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1809 pVCpu->iem.s.uRexB = 1 << 3;
1810 iemRecalEffOpSize(pVCpu);
1811
1812 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1813 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1814 }
1815
1816 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1817 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1818}
1819
1820
1821/**
1822 * @opcode 0x4a
1823 */
1824FNIEMOP_DEF(iemOp_dec_eDX)
1825{
1826 /*
1827 * This is a REX prefix in 64-bit mode.
1828 */
1829 if (IEM_IS_64BIT_CODE(pVCpu))
1830 {
1831 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1832 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1833 pVCpu->iem.s.uRexIndex = 1 << 3;
1834 iemRecalEffOpSize(pVCpu);
1835
1836 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1837 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1838 }
1839
1840 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1841 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
1842}
1843
1844
1845/**
1846 * @opcode 0x4b
1847 */
1848FNIEMOP_DEF(iemOp_dec_eBX)
1849{
1850 /*
1851 * This is a REX prefix in 64-bit mode.
1852 */
1853 if (IEM_IS_64BIT_CODE(pVCpu))
1854 {
1855 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1856 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1857 pVCpu->iem.s.uRexB = 1 << 3;
1858 pVCpu->iem.s.uRexIndex = 1 << 3;
1859 iemRecalEffOpSize(pVCpu);
1860
1861 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1862 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1863 }
1864
1865 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1866 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
1867}
1868
1869
1870/**
1871 * @opcode 0x4c
1872 */
1873FNIEMOP_DEF(iemOp_dec_eSP)
1874{
1875 /*
1876 * This is a REX prefix in 64-bit mode.
1877 */
1878 if (IEM_IS_64BIT_CODE(pVCpu))
1879 {
1880 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1881 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1882 pVCpu->iem.s.uRexReg = 1 << 3;
1883 iemRecalEffOpSize(pVCpu);
1884
1885 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1886 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1887 }
1888
1889 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1890 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
1891}
1892
1893
1894/**
1895 * @opcode 0x4d
1896 */
1897FNIEMOP_DEF(iemOp_dec_eBP)
1898{
1899 /*
1900 * This is a REX prefix in 64-bit mode.
1901 */
1902 if (IEM_IS_64BIT_CODE(pVCpu))
1903 {
1904 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1905 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1906 pVCpu->iem.s.uRexReg = 1 << 3;
1907 pVCpu->iem.s.uRexB = 1 << 3;
1908 iemRecalEffOpSize(pVCpu);
1909
1910 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1911 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1912 }
1913
1914 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1915 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
1916}
1917
1918
1919/**
1920 * @opcode 0x4e
1921 */
1922FNIEMOP_DEF(iemOp_dec_eSI)
1923{
1924 /*
1925 * This is a REX prefix in 64-bit mode.
1926 */
1927 if (IEM_IS_64BIT_CODE(pVCpu))
1928 {
1929 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1930 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1931 pVCpu->iem.s.uRexReg = 1 << 3;
1932 pVCpu->iem.s.uRexIndex = 1 << 3;
1933 iemRecalEffOpSize(pVCpu);
1934
1935 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1936 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1937 }
1938
1939 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1940 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
1941}
1942
1943
1944/**
1945 * @opcode 0x4f
1946 */
1947FNIEMOP_DEF(iemOp_dec_eDI)
1948{
1949 /*
1950 * This is a REX prefix in 64-bit mode.
1951 */
1952 if (IEM_IS_64BIT_CODE(pVCpu))
1953 {
1954 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1955 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1956 pVCpu->iem.s.uRexReg = 1 << 3;
1957 pVCpu->iem.s.uRexB = 1 << 3;
1958 pVCpu->iem.s.uRexIndex = 1 << 3;
1959 iemRecalEffOpSize(pVCpu);
1960
1961 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1962 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1963 }
1964
1965 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1966 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
1967}
1968
1969
1970/**
1971 * Common 'push register' helper.
1972 */
1973FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1974{
1975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1976 if (IEM_IS_64BIT_CODE(pVCpu))
1977 {
1978 iReg |= pVCpu->iem.s.uRexB;
1979 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1980 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1981 }
1982
1983 switch (pVCpu->iem.s.enmEffOpSize)
1984 {
1985 case IEMMODE_16BIT:
1986 IEM_MC_BEGIN(0, 1);
1987 IEM_MC_LOCAL(uint16_t, u16Value);
1988 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1989 IEM_MC_PUSH_U16(u16Value);
1990 IEM_MC_ADVANCE_RIP_AND_FINISH();
1991 IEM_MC_END();
1992 break;
1993
1994 case IEMMODE_32BIT:
1995 IEM_MC_BEGIN(0, 1);
1996 IEM_MC_LOCAL(uint32_t, u32Value);
1997 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1998 IEM_MC_PUSH_U32(u32Value);
1999 IEM_MC_ADVANCE_RIP_AND_FINISH();
2000 IEM_MC_END();
2001 break;
2002
2003 case IEMMODE_64BIT:
2004 IEM_MC_BEGIN(0, 1);
2005 IEM_MC_LOCAL(uint64_t, u64Value);
2006 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2007 IEM_MC_PUSH_U64(u64Value);
2008 IEM_MC_ADVANCE_RIP_AND_FINISH();
2009 IEM_MC_END();
2010 break;
2011
2012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2013 }
2014}
2015
2016
2017/**
2018 * @opcode 0x50
2019 */
2020FNIEMOP_DEF(iemOp_push_eAX)
2021{
2022 IEMOP_MNEMONIC(push_rAX, "push rAX");
2023 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2024}
2025
2026
2027/**
2028 * @opcode 0x51
2029 */
2030FNIEMOP_DEF(iemOp_push_eCX)
2031{
2032 IEMOP_MNEMONIC(push_rCX, "push rCX");
2033 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2034}
2035
2036
2037/**
2038 * @opcode 0x52
2039 */
2040FNIEMOP_DEF(iemOp_push_eDX)
2041{
2042 IEMOP_MNEMONIC(push_rDX, "push rDX");
2043 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2044}
2045
2046
2047/**
2048 * @opcode 0x53
2049 */
2050FNIEMOP_DEF(iemOp_push_eBX)
2051{
2052 IEMOP_MNEMONIC(push_rBX, "push rBX");
2053 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2054}
2055
2056
2057/**
2058 * @opcode 0x54
2059 */
2060FNIEMOP_DEF(iemOp_push_eSP)
2061{
2062 IEMOP_MNEMONIC(push_rSP, "push rSP");
2063 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2064 {
2065 IEM_MC_BEGIN(0, 1);
2066 IEM_MC_LOCAL(uint16_t, u16Value);
2067 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2068 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2069 IEM_MC_PUSH_U16(u16Value);
2070 IEM_MC_ADVANCE_RIP_AND_FINISH();
2071 IEM_MC_END();
2072 }
2073 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2074}
2075
2076
2077/**
2078 * @opcode 0x55
2079 */
2080FNIEMOP_DEF(iemOp_push_eBP)
2081{
2082 IEMOP_MNEMONIC(push_rBP, "push rBP");
2083 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2084}
2085
2086
2087/**
2088 * @opcode 0x56
2089 */
2090FNIEMOP_DEF(iemOp_push_eSI)
2091{
2092 IEMOP_MNEMONIC(push_rSI, "push rSI");
2093 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2094}
2095
2096
2097/**
2098 * @opcode 0x57
2099 */
2100FNIEMOP_DEF(iemOp_push_eDI)
2101{
2102 IEMOP_MNEMONIC(push_rDI, "push rDI");
2103 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2104}
2105
2106
2107/**
2108 * Common 'pop register' helper.
2109 */
2110FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2111{
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2113 if (IEM_IS_64BIT_CODE(pVCpu))
2114 {
2115 iReg |= pVCpu->iem.s.uRexB;
2116 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2117 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2118 }
2119
2120 switch (pVCpu->iem.s.enmEffOpSize)
2121 {
2122 case IEMMODE_16BIT:
2123 IEM_MC_BEGIN(0, 1);
2124 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2125 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2126 IEM_MC_POP_U16(pu16Dst);
2127 IEM_MC_ADVANCE_RIP_AND_FINISH();
2128 IEM_MC_END();
2129 break;
2130
2131 case IEMMODE_32BIT:
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2134 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2135 IEM_MC_POP_U32(pu32Dst);
2136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2137 IEM_MC_ADVANCE_RIP_AND_FINISH();
2138 IEM_MC_END();
2139 break;
2140
2141 case IEMMODE_64BIT:
2142 IEM_MC_BEGIN(0, 1);
2143 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2144 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2145 IEM_MC_POP_U64(pu64Dst);
2146 IEM_MC_ADVANCE_RIP_AND_FINISH();
2147 IEM_MC_END();
2148 break;
2149
2150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2151 }
2152}
2153
2154
2155/**
2156 * @opcode 0x58
2157 */
2158FNIEMOP_DEF(iemOp_pop_eAX)
2159{
2160 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2161 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2162}
2163
2164
2165/**
2166 * @opcode 0x59
2167 */
2168FNIEMOP_DEF(iemOp_pop_eCX)
2169{
2170 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2171 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2172}
2173
2174
2175/**
2176 * @opcode 0x5a
2177 */
2178FNIEMOP_DEF(iemOp_pop_eDX)
2179{
2180 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2181 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2182}
2183
2184
2185/**
2186 * @opcode 0x5b
2187 */
2188FNIEMOP_DEF(iemOp_pop_eBX)
2189{
2190 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2191 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2192}
2193
2194
2195/**
2196 * @opcode 0x5c
2197 */
2198FNIEMOP_DEF(iemOp_pop_eSP)
2199{
2200 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2201 if (IEM_IS_64BIT_CODE(pVCpu))
2202 {
2203 if (pVCpu->iem.s.uRexB)
2204 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2205 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2206 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2207 }
2208
2209 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2210 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2211 /** @todo add testcase for this instruction. */
2212 switch (pVCpu->iem.s.enmEffOpSize)
2213 {
2214 case IEMMODE_16BIT:
2215 IEM_MC_BEGIN(0, 1);
2216 IEM_MC_LOCAL(uint16_t, u16Dst);
2217 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2218 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 break;
2222
2223 case IEMMODE_32BIT:
2224 IEM_MC_BEGIN(0, 1);
2225 IEM_MC_LOCAL(uint32_t, u32Dst);
2226 IEM_MC_POP_U32(&u32Dst);
2227 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2228 IEM_MC_ADVANCE_RIP_AND_FINISH();
2229 IEM_MC_END();
2230 break;
2231
2232 case IEMMODE_64BIT:
2233 IEM_MC_BEGIN(0, 1);
2234 IEM_MC_LOCAL(uint64_t, u64Dst);
2235 IEM_MC_POP_U64(&u64Dst);
2236 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2237 IEM_MC_ADVANCE_RIP_AND_FINISH();
2238 IEM_MC_END();
2239 break;
2240
2241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2242 }
2243}
2244
2245
2246/**
2247 * @opcode 0x5d
2248 */
2249FNIEMOP_DEF(iemOp_pop_eBP)
2250{
2251 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2252 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2253}
2254
2255
2256/**
2257 * @opcode 0x5e
2258 */
2259FNIEMOP_DEF(iemOp_pop_eSI)
2260{
2261 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2262 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2263}
2264
2265
2266/**
2267 * @opcode 0x5f
2268 */
2269FNIEMOP_DEF(iemOp_pop_eDI)
2270{
2271 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2272 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2273}
2274
2275
2276/**
2277 * @opcode 0x60
2278 */
2279FNIEMOP_DEF(iemOp_pusha)
2280{
2281 IEMOP_MNEMONIC(pusha, "pusha");
2282 IEMOP_HLP_MIN_186();
2283 IEMOP_HLP_NO_64BIT();
2284 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2285 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2286 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2287 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2288}
2289
2290
2291/**
2292 * @opcode 0x61
2293 */
2294FNIEMOP_DEF(iemOp_popa__mvex)
2295{
2296 if (!IEM_IS_64BIT_CODE(pVCpu))
2297 {
2298 IEMOP_MNEMONIC(popa, "popa");
2299 IEMOP_HLP_MIN_186();
2300 IEMOP_HLP_NO_64BIT();
2301 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2302 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS, iemCImpl_popa_16);
2303 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2304 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS, iemCImpl_popa_32);
2305 }
2306 IEMOP_MNEMONIC(mvex, "mvex");
2307 Log(("mvex prefix is not supported!\n"));
2308 IEMOP_RAISE_INVALID_OPCODE_RET();
2309}
2310
2311
2312/**
2313 * @opcode 0x62
2314 * @opmnemonic bound
2315 * @op1 Gv_RO
2316 * @op2 Ma
2317 * @opmincpu 80186
2318 * @ophints harmless x86_invalid_64
2319 * @optest op1=0 op2=0 ->
2320 * @optest op1=1 op2=0 -> value.xcpt=5
2321 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2322 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2323 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2324 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2325 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2326 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2327 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2328 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2329 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2330 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2331 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2332 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2333 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2334 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2335 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2336 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2337 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2338 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2339 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2340 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2341 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2342 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2343 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2344 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2345 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2346 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2347 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2348 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2349 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2350 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2351 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2352 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2353 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2354 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2355 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2356 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2357 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2358 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2359 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2360 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2361 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2362 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2363 */
2364FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2365{
2366 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2367 compatability mode it is invalid with MOD=3.
2368
2369 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2370 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2371 given as R and X without an exact description, so we assume it builds on
2372 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2373 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2374 uint8_t bRm;
2375 if (!IEM_IS_64BIT_CODE(pVCpu))
2376 {
2377 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2378 IEMOP_HLP_MIN_186();
2379 IEM_OPCODE_GET_NEXT_U8(&bRm);
2380 if (IEM_IS_MODRM_MEM_MODE(bRm))
2381 {
2382 /** @todo testcase: check that there are two memory accesses involved. Check
2383 * whether they're both read before the \#BR triggers. */
2384 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2385 {
2386 IEM_MC_BEGIN(3, 1);
2387 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2388 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2389 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394
2395 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2396 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2397 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2398
2399 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2400 IEM_MC_END();
2401 }
2402 else /* 32-bit operands */
2403 {
2404 IEM_MC_BEGIN(3, 1);
2405 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2406 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2407 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2409
2410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412
2413 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2414 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2415 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2416
2417 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2418 IEM_MC_END();
2419 }
2420 }
2421
2422 /*
2423 * @opdone
2424 */
2425 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2426 {
2427 /* Note that there is no need for the CPU to fetch further bytes
2428 here because MODRM.MOD == 3. */
2429 Log(("evex not supported by the guest CPU!\n"));
2430 IEMOP_RAISE_INVALID_OPCODE_RET();
2431 }
2432 }
2433 else
2434 {
2435 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2436 * does modr/m read, whereas AMD probably doesn't... */
2437 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2438 {
2439 Log(("evex not supported by the guest CPU!\n"));
2440 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2441 }
2442 IEM_OPCODE_GET_NEXT_U8(&bRm);
2443 }
2444
2445 IEMOP_MNEMONIC(evex, "evex");
2446 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2447 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2448 Log(("evex prefix is not implemented!\n"));
2449 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2450}
2451
2452
2453/** Opcode 0x63 - non-64-bit modes. */
2454FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2455{
2456 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2457 IEMOP_HLP_MIN_286();
2458 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2460
2461 if (IEM_IS_MODRM_REG_MODE(bRm))
2462 {
2463 /* Register */
2464 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2465 IEM_MC_BEGIN(3, 0);
2466 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2467 IEM_MC_ARG(uint16_t, u16Src, 1);
2468 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2469
2470 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2471 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2472 IEM_MC_REF_EFLAGS(pEFlags);
2473 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2474
2475 IEM_MC_ADVANCE_RIP_AND_FINISH();
2476 IEM_MC_END();
2477 }
2478 else
2479 {
2480 /* Memory */
2481 IEM_MC_BEGIN(3, 2);
2482 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2483 IEM_MC_ARG(uint16_t, u16Src, 1);
2484 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2486
2487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2488 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2489 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2490 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2491 IEM_MC_FETCH_EFLAGS(EFlags);
2492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2493
2494 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2495 IEM_MC_COMMIT_EFLAGS(EFlags);
2496 IEM_MC_ADVANCE_RIP_AND_FINISH();
2497 IEM_MC_END();
2498 }
2499}
2500
2501
2502/**
2503 * @opcode 0x63
2504 *
2505 * @note This is a weird one. It works like a regular move instruction if
2506 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2507 * @todo This definitely needs a testcase to verify the odd cases. */
2508FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2509{
2510 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2511
2512 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2514
2515 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2516 {
2517 if (IEM_IS_MODRM_REG_MODE(bRm))
2518 {
2519 /*
2520 * Register to register.
2521 */
2522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2523 IEM_MC_BEGIN(0, 1);
2524 IEM_MC_LOCAL(uint64_t, u64Value);
2525 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2526 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2527 IEM_MC_ADVANCE_RIP_AND_FINISH();
2528 IEM_MC_END();
2529 }
2530 else
2531 {
2532 /*
2533 * We're loading a register from memory.
2534 */
2535 IEM_MC_BEGIN(0, 2);
2536 IEM_MC_LOCAL(uint64_t, u64Value);
2537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2540 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2541 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2542 IEM_MC_ADVANCE_RIP_AND_FINISH();
2543 IEM_MC_END();
2544 }
2545 }
2546 else
2547 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2548}
2549
2550
2551/**
2552 * @opcode 0x64
2553 * @opmnemonic segfs
2554 * @opmincpu 80386
2555 * @opgroup og_prefixes
2556 */
2557FNIEMOP_DEF(iemOp_seg_FS)
2558{
2559 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2560 IEMOP_HLP_MIN_386();
2561
2562 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2563 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2564
2565 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2566 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2567}
2568
2569
2570/**
2571 * @opcode 0x65
2572 * @opmnemonic seggs
2573 * @opmincpu 80386
2574 * @opgroup og_prefixes
2575 */
2576FNIEMOP_DEF(iemOp_seg_GS)
2577{
2578 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2579 IEMOP_HLP_MIN_386();
2580
2581 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2582 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2583
2584 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2585 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2586}
2587
2588
2589/**
2590 * @opcode 0x66
2591 * @opmnemonic opsize
2592 * @openc prefix
2593 * @opmincpu 80386
2594 * @ophints harmless
2595 * @opgroup og_prefixes
2596 */
2597FNIEMOP_DEF(iemOp_op_size)
2598{
2599 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2600 IEMOP_HLP_MIN_386();
2601
2602 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2603 iemRecalEffOpSize(pVCpu);
2604
2605 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2606 when REPZ or REPNZ are present. */
2607 if (pVCpu->iem.s.idxPrefix == 0)
2608 pVCpu->iem.s.idxPrefix = 1;
2609
2610 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2611 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2612}
2613
2614
2615/**
2616 * @opcode 0x67
2617 * @opmnemonic addrsize
2618 * @openc prefix
2619 * @opmincpu 80386
2620 * @ophints harmless
2621 * @opgroup og_prefixes
2622 */
2623FNIEMOP_DEF(iemOp_addr_size)
2624{
2625 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2626 IEMOP_HLP_MIN_386();
2627
2628 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2629 switch (pVCpu->iem.s.enmDefAddrMode)
2630 {
2631 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2632 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2633 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2634 default: AssertFailed();
2635 }
2636
2637 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2638 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2639}
2640
2641
2642/**
2643 * @opcode 0x68
2644 */
2645FNIEMOP_DEF(iemOp_push_Iz)
2646{
2647 IEMOP_MNEMONIC(push_Iz, "push Iz");
2648 IEMOP_HLP_MIN_186();
2649 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2650 switch (pVCpu->iem.s.enmEffOpSize)
2651 {
2652 case IEMMODE_16BIT:
2653 {
2654 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656 IEM_MC_BEGIN(0,0);
2657 IEM_MC_PUSH_U16(u16Imm);
2658 IEM_MC_ADVANCE_RIP_AND_FINISH();
2659 IEM_MC_END();
2660 break;
2661 }
2662
2663 case IEMMODE_32BIT:
2664 {
2665 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2667 IEM_MC_BEGIN(0,0);
2668 IEM_MC_PUSH_U32(u32Imm);
2669 IEM_MC_ADVANCE_RIP_AND_FINISH();
2670 IEM_MC_END();
2671 break;
2672 }
2673
2674 case IEMMODE_64BIT:
2675 {
2676 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2678 IEM_MC_BEGIN(0,0);
2679 IEM_MC_PUSH_U64(u64Imm);
2680 IEM_MC_ADVANCE_RIP_AND_FINISH();
2681 IEM_MC_END();
2682 break;
2683 }
2684
2685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2686 }
2687}
2688
2689
2690/**
2691 * @opcode 0x69
2692 */
2693FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2694{
2695 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2696 IEMOP_HLP_MIN_186();
2697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2698 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2699
2700 switch (pVCpu->iem.s.enmEffOpSize)
2701 {
2702 case IEMMODE_16BIT:
2703 {
2704 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2705 if (IEM_IS_MODRM_REG_MODE(bRm))
2706 {
2707 /* register operand */
2708 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2710
2711 IEM_MC_BEGIN(3, 1);
2712 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2713 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2714 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2715 IEM_MC_LOCAL(uint16_t, u16Tmp);
2716
2717 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2718 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2719 IEM_MC_REF_EFLAGS(pEFlags);
2720 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2721 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2722
2723 IEM_MC_ADVANCE_RIP_AND_FINISH();
2724 IEM_MC_END();
2725 }
2726 else
2727 {
2728 /* memory operand */
2729 IEM_MC_BEGIN(3, 2);
2730 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2731 IEM_MC_ARG(uint16_t, u16Src, 1);
2732 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2733 IEM_MC_LOCAL(uint16_t, u16Tmp);
2734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2735
2736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2737 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2738 IEM_MC_ASSIGN(u16Src, u16Imm);
2739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2740 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2741 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2742 IEM_MC_REF_EFLAGS(pEFlags);
2743 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2744 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2745
2746 IEM_MC_ADVANCE_RIP_AND_FINISH();
2747 IEM_MC_END();
2748 }
2749 break;
2750 }
2751
2752 case IEMMODE_32BIT:
2753 {
2754 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2755 if (IEM_IS_MODRM_REG_MODE(bRm))
2756 {
2757 /* register operand */
2758 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2760
2761 IEM_MC_BEGIN(3, 1);
2762 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2763 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2764 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2765 IEM_MC_LOCAL(uint32_t, u32Tmp);
2766
2767 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2768 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2769 IEM_MC_REF_EFLAGS(pEFlags);
2770 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2771 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2772
2773 IEM_MC_ADVANCE_RIP_AND_FINISH();
2774 IEM_MC_END();
2775 }
2776 else
2777 {
2778 /* memory operand */
2779 IEM_MC_BEGIN(3, 2);
2780 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2781 IEM_MC_ARG(uint32_t, u32Src, 1);
2782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2783 IEM_MC_LOCAL(uint32_t, u32Tmp);
2784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2785
2786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2787 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2788 IEM_MC_ASSIGN(u32Src, u32Imm);
2789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2790 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2791 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2792 IEM_MC_REF_EFLAGS(pEFlags);
2793 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2794 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2795
2796 IEM_MC_ADVANCE_RIP_AND_FINISH();
2797 IEM_MC_END();
2798 }
2799 break;
2800 }
2801
2802 case IEMMODE_64BIT:
2803 {
2804 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2805 if (IEM_IS_MODRM_REG_MODE(bRm))
2806 {
2807 /* register operand */
2808 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2810
2811 IEM_MC_BEGIN(3, 1);
2812 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2813 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2814 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2815 IEM_MC_LOCAL(uint64_t, u64Tmp);
2816
2817 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2818 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2819 IEM_MC_REF_EFLAGS(pEFlags);
2820 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2821 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2822
2823 IEM_MC_ADVANCE_RIP_AND_FINISH();
2824 IEM_MC_END();
2825 }
2826 else
2827 {
2828 /* memory operand */
2829 IEM_MC_BEGIN(3, 2);
2830 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2831 IEM_MC_ARG(uint64_t, u64Src, 1);
2832 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2833 IEM_MC_LOCAL(uint64_t, u64Tmp);
2834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2835
2836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2837 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2838 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
2839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2840 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2841 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2842 IEM_MC_REF_EFLAGS(pEFlags);
2843 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2844 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2845
2846 IEM_MC_ADVANCE_RIP_AND_FINISH();
2847 IEM_MC_END();
2848 }
2849 break;
2850 }
2851
2852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2853 }
2854}
2855
2856
2857/**
2858 * @opcode 0x6a
2859 */
2860FNIEMOP_DEF(iemOp_push_Ib)
2861{
2862 IEMOP_MNEMONIC(push_Ib, "push Ib");
2863 IEMOP_HLP_MIN_186();
2864 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2867
2868 switch (pVCpu->iem.s.enmEffOpSize)
2869 {
2870 case IEMMODE_16BIT:
2871 IEM_MC_BEGIN(0,0);
2872 IEM_MC_PUSH_U16(i8Imm);
2873 IEM_MC_ADVANCE_RIP_AND_FINISH();
2874 IEM_MC_END();
2875 break;
2876 case IEMMODE_32BIT:
2877 IEM_MC_BEGIN(0,0);
2878 IEM_MC_PUSH_U32(i8Imm);
2879 IEM_MC_ADVANCE_RIP_AND_FINISH();
2880 IEM_MC_END();
2881 break;
2882 case IEMMODE_64BIT:
2883 IEM_MC_BEGIN(0,0);
2884 IEM_MC_PUSH_U64(i8Imm);
2885 IEM_MC_ADVANCE_RIP_AND_FINISH();
2886 IEM_MC_END();
2887 break;
2888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2889 }
2890}
2891
2892
2893/**
2894 * @opcode 0x6b
2895 */
2896FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2897{
2898 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2899 IEMOP_HLP_MIN_186();
2900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2901 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2902
2903 switch (pVCpu->iem.s.enmEffOpSize)
2904 {
2905 case IEMMODE_16BIT:
2906 {
2907 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2908 if (IEM_IS_MODRM_REG_MODE(bRm))
2909 {
2910 /* register operand */
2911 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913
2914 IEM_MC_BEGIN(3, 1);
2915 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2916 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2917 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2918 IEM_MC_LOCAL(uint16_t, u16Tmp);
2919
2920 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2921 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2922 IEM_MC_REF_EFLAGS(pEFlags);
2923 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2924 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2925
2926 IEM_MC_ADVANCE_RIP_AND_FINISH();
2927 IEM_MC_END();
2928 }
2929 else
2930 {
2931 /* memory operand */
2932 IEM_MC_BEGIN(3, 2);
2933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2934 IEM_MC_ARG(uint16_t, u16Src, 1);
2935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2936 IEM_MC_LOCAL(uint16_t, u16Tmp);
2937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2938
2939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2940 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2941 IEM_MC_ASSIGN(u16Src, u16Imm);
2942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2943 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2944 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2945 IEM_MC_REF_EFLAGS(pEFlags);
2946 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2947 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2948
2949 IEM_MC_ADVANCE_RIP_AND_FINISH();
2950 IEM_MC_END();
2951 }
2952 break;
2953 }
2954
2955 case IEMMODE_32BIT:
2956 {
2957 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2958 if (IEM_IS_MODRM_REG_MODE(bRm))
2959 {
2960 /* register operand */
2961 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963
2964 IEM_MC_BEGIN(3, 1);
2965 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2966 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2967 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2968 IEM_MC_LOCAL(uint32_t, u32Tmp);
2969
2970 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2971 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2972 IEM_MC_REF_EFLAGS(pEFlags);
2973 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2974 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2975
2976 IEM_MC_ADVANCE_RIP_AND_FINISH();
2977 IEM_MC_END();
2978 }
2979 else
2980 {
2981 /* memory operand */
2982 IEM_MC_BEGIN(3, 2);
2983 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2984 IEM_MC_ARG(uint32_t, u32Src, 1);
2985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2986 IEM_MC_LOCAL(uint32_t, u32Tmp);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2988
2989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2990 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2991 IEM_MC_ASSIGN(u32Src, u32Imm);
2992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2993 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2994 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2995 IEM_MC_REF_EFLAGS(pEFlags);
2996 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2997 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2998
2999 IEM_MC_ADVANCE_RIP_AND_FINISH();
3000 IEM_MC_END();
3001 }
3002 break;
3003 }
3004
3005 case IEMMODE_64BIT:
3006 {
3007 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3008 if (IEM_IS_MODRM_REG_MODE(bRm))
3009 {
3010 /* register operand */
3011 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3013
3014 IEM_MC_BEGIN(3, 1);
3015 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3016 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3017 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3018 IEM_MC_LOCAL(uint64_t, u64Tmp);
3019
3020 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3021 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3022 IEM_MC_REF_EFLAGS(pEFlags);
3023 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3024 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3025
3026 IEM_MC_ADVANCE_RIP_AND_FINISH();
3027 IEM_MC_END();
3028 }
3029 else
3030 {
3031 /* memory operand */
3032 IEM_MC_BEGIN(3, 2);
3033 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3034 IEM_MC_ARG(uint64_t, u64Src, 1);
3035 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3036 IEM_MC_LOCAL(uint64_t, u64Tmp);
3037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3038
3039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3040 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3041 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3043 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3044 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3045 IEM_MC_REF_EFLAGS(pEFlags);
3046 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3047 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3048
3049 IEM_MC_ADVANCE_RIP_AND_FINISH();
3050 IEM_MC_END();
3051 }
3052 break;
3053 }
3054
3055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3056 }
3057}
3058
3059
3060/**
3061 * @opcode 0x6c
3062 */
3063FNIEMOP_DEF(iemOp_insb_Yb_DX)
3064{
3065 IEMOP_HLP_MIN_186();
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3068 {
3069 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3070 switch (pVCpu->iem.s.enmEffAddrMode)
3071 {
3072 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr16, false);
3073 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr32, false);
3074 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr64, false);
3075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3076 }
3077 }
3078 else
3079 {
3080 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3081 switch (pVCpu->iem.s.enmEffAddrMode)
3082 {
3083 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr16, false);
3084 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr32, false);
3085 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr64, false);
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088 }
3089}
3090
3091
3092/**
3093 * @opcode 0x6d
3094 */
3095FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3096{
3097 IEMOP_HLP_MIN_186();
3098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3099 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3100 {
3101 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3102 switch (pVCpu->iem.s.enmEffOpSize)
3103 {
3104 case IEMMODE_16BIT:
3105 switch (pVCpu->iem.s.enmEffAddrMode)
3106 {
3107 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr16, false);
3108 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr32, false);
3109 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr64, false);
3110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3111 }
3112 break;
3113 case IEMMODE_64BIT:
3114 case IEMMODE_32BIT:
3115 switch (pVCpu->iem.s.enmEffAddrMode)
3116 {
3117 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr16, false);
3118 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr32, false);
3119 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr64, false);
3120 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3121 }
3122 break;
3123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3124 }
3125 }
3126 else
3127 {
3128 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3129 switch (pVCpu->iem.s.enmEffOpSize)
3130 {
3131 case IEMMODE_16BIT:
3132 switch (pVCpu->iem.s.enmEffAddrMode)
3133 {
3134 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr16, false);
3135 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr32, false);
3136 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr64, false);
3137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3138 }
3139 break;
3140 case IEMMODE_64BIT:
3141 case IEMMODE_32BIT:
3142 switch (pVCpu->iem.s.enmEffAddrMode)
3143 {
3144 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr16, false);
3145 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr32, false);
3146 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr64, false);
3147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3148 }
3149 break;
3150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3151 }
3152 }
3153}
3154
3155
3156/**
3157 * @opcode 0x6e
3158 */
3159FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3160{
3161 IEMOP_HLP_MIN_186();
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3164 {
3165 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3166 switch (pVCpu->iem.s.enmEffAddrMode)
3167 {
3168 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3169 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3170 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3172 }
3173 }
3174 else
3175 {
3176 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3177 switch (pVCpu->iem.s.enmEffAddrMode)
3178 {
3179 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3180 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3181 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3183 }
3184 }
3185}
3186
3187
3188/**
3189 * @opcode 0x6f
3190 */
3191FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3192{
3193 IEMOP_HLP_MIN_186();
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3196 {
3197 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3198 switch (pVCpu->iem.s.enmEffOpSize)
3199 {
3200 case IEMMODE_16BIT:
3201 switch (pVCpu->iem.s.enmEffAddrMode)
3202 {
3203 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3204 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3205 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3207 }
3208 break;
3209 case IEMMODE_64BIT:
3210 case IEMMODE_32BIT:
3211 switch (pVCpu->iem.s.enmEffAddrMode)
3212 {
3213 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3214 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3215 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3217 }
3218 break;
3219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3220 }
3221 }
3222 else
3223 {
3224 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3225 switch (pVCpu->iem.s.enmEffOpSize)
3226 {
3227 case IEMMODE_16BIT:
3228 switch (pVCpu->iem.s.enmEffAddrMode)
3229 {
3230 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3231 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3232 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3234 }
3235 break;
3236 case IEMMODE_64BIT:
3237 case IEMMODE_32BIT:
3238 switch (pVCpu->iem.s.enmEffAddrMode)
3239 {
3240 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3241 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3242 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3244 }
3245 break;
3246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3247 }
3248 }
3249}
3250
3251
3252/**
3253 * @opcode 0x70
3254 */
3255FNIEMOP_DEF(iemOp_jo_Jb)
3256{
3257 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3258 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3260 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3261
3262 IEM_MC_BEGIN(0, 0);
3263 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3264 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3265 } IEM_MC_ELSE() {
3266 IEM_MC_ADVANCE_RIP_AND_FINISH();
3267 } IEM_MC_ENDIF();
3268 IEM_MC_END();
3269}
3270
3271
3272/**
3273 * @opcode 0x71
3274 */
3275FNIEMOP_DEF(iemOp_jno_Jb)
3276{
3277 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3278 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3281
3282 IEM_MC_BEGIN(0, 0);
3283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3284 IEM_MC_ADVANCE_RIP_AND_FINISH();
3285 } IEM_MC_ELSE() {
3286 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3287 } IEM_MC_ENDIF();
3288 IEM_MC_END();
3289}
3290
3291/**
3292 * @opcode 0x72
3293 */
3294FNIEMOP_DEF(iemOp_jc_Jb)
3295{
3296 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3297 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3300
3301 IEM_MC_BEGIN(0, 0);
3302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3303 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3304 } IEM_MC_ELSE() {
3305 IEM_MC_ADVANCE_RIP_AND_FINISH();
3306 } IEM_MC_ENDIF();
3307 IEM_MC_END();
3308}
3309
3310
3311/**
3312 * @opcode 0x73
3313 */
3314FNIEMOP_DEF(iemOp_jnc_Jb)
3315{
3316 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3317 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3320
3321 IEM_MC_BEGIN(0, 0);
3322 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3323 IEM_MC_ADVANCE_RIP_AND_FINISH();
3324 } IEM_MC_ELSE() {
3325 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3326 } IEM_MC_ENDIF();
3327 IEM_MC_END();
3328}
3329
3330
3331/**
3332 * @opcode 0x74
3333 */
3334FNIEMOP_DEF(iemOp_je_Jb)
3335{
3336 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3337 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3339 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3340
3341 IEM_MC_BEGIN(0, 0);
3342 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3343 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3344 } IEM_MC_ELSE() {
3345 IEM_MC_ADVANCE_RIP_AND_FINISH();
3346 } IEM_MC_ENDIF();
3347 IEM_MC_END();
3348}
3349
3350
3351/**
3352 * @opcode 0x75
3353 */
3354FNIEMOP_DEF(iemOp_jne_Jb)
3355{
3356 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3357 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3359 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3360
3361 IEM_MC_BEGIN(0, 0);
3362 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3363 IEM_MC_ADVANCE_RIP_AND_FINISH();
3364 } IEM_MC_ELSE() {
3365 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3366 } IEM_MC_ENDIF();
3367 IEM_MC_END();
3368}
3369
3370
3371/**
3372 * @opcode 0x76
3373 */
3374FNIEMOP_DEF(iemOp_jbe_Jb)
3375{
3376 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3377 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3379 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3380
3381 IEM_MC_BEGIN(0, 0);
3382 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3383 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3384 } IEM_MC_ELSE() {
3385 IEM_MC_ADVANCE_RIP_AND_FINISH();
3386 } IEM_MC_ENDIF();
3387 IEM_MC_END();
3388}
3389
3390
3391/**
3392 * @opcode 0x77
3393 */
3394FNIEMOP_DEF(iemOp_jnbe_Jb)
3395{
3396 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3397 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3399 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3400
3401 IEM_MC_BEGIN(0, 0);
3402 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3403 IEM_MC_ADVANCE_RIP_AND_FINISH();
3404 } IEM_MC_ELSE() {
3405 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3406 } IEM_MC_ENDIF();
3407 IEM_MC_END();
3408}
3409
3410
3411/**
3412 * @opcode 0x78
3413 */
3414FNIEMOP_DEF(iemOp_js_Jb)
3415{
3416 IEMOP_MNEMONIC(js_Jb, "js Jb");
3417 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3419 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3420
3421 IEM_MC_BEGIN(0, 0);
3422 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3423 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3424 } IEM_MC_ELSE() {
3425 IEM_MC_ADVANCE_RIP_AND_FINISH();
3426 } IEM_MC_ENDIF();
3427 IEM_MC_END();
3428}
3429
3430
3431/**
3432 * @opcode 0x79
3433 */
3434FNIEMOP_DEF(iemOp_jns_Jb)
3435{
3436 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3437 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3439 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3440
3441 IEM_MC_BEGIN(0, 0);
3442 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3443 IEM_MC_ADVANCE_RIP_AND_FINISH();
3444 } IEM_MC_ELSE() {
3445 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3446 } IEM_MC_ENDIF();
3447 IEM_MC_END();
3448}
3449
3450
3451/**
3452 * @opcode 0x7a
3453 */
3454FNIEMOP_DEF(iemOp_jp_Jb)
3455{
3456 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3457 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3460
3461 IEM_MC_BEGIN(0, 0);
3462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3463 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3464 } IEM_MC_ELSE() {
3465 IEM_MC_ADVANCE_RIP_AND_FINISH();
3466 } IEM_MC_ENDIF();
3467 IEM_MC_END();
3468}
3469
3470
3471/**
3472 * @opcode 0x7b
3473 */
3474FNIEMOP_DEF(iemOp_jnp_Jb)
3475{
3476 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3477 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3479 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3480
3481 IEM_MC_BEGIN(0, 0);
3482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3483 IEM_MC_ADVANCE_RIP_AND_FINISH();
3484 } IEM_MC_ELSE() {
3485 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3486 } IEM_MC_ENDIF();
3487 IEM_MC_END();
3488}
3489
3490
3491/**
3492 * @opcode 0x7c
3493 */
3494FNIEMOP_DEF(iemOp_jl_Jb)
3495{
3496 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3497 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3500
3501 IEM_MC_BEGIN(0, 0);
3502 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3503 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3504 } IEM_MC_ELSE() {
3505 IEM_MC_ADVANCE_RIP_AND_FINISH();
3506 } IEM_MC_ENDIF();
3507 IEM_MC_END();
3508}
3509
3510
3511/**
3512 * @opcode 0x7d
3513 */
3514FNIEMOP_DEF(iemOp_jnl_Jb)
3515{
3516 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3517 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3520
3521 IEM_MC_BEGIN(0, 0);
3522 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3523 IEM_MC_ADVANCE_RIP_AND_FINISH();
3524 } IEM_MC_ELSE() {
3525 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3526 } IEM_MC_ENDIF();
3527 IEM_MC_END();
3528}
3529
3530
3531/**
3532 * @opcode 0x7e
3533 */
3534FNIEMOP_DEF(iemOp_jle_Jb)
3535{
3536 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3537 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3540
3541 IEM_MC_BEGIN(0, 0);
3542 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3543 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3544 } IEM_MC_ELSE() {
3545 IEM_MC_ADVANCE_RIP_AND_FINISH();
3546 } IEM_MC_ENDIF();
3547 IEM_MC_END();
3548}
3549
3550
3551/**
3552 * @opcode 0x7f
3553 */
3554FNIEMOP_DEF(iemOp_jnle_Jb)
3555{
3556 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3557 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3559 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3560
3561 IEM_MC_BEGIN(0, 0);
3562 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3563 IEM_MC_ADVANCE_RIP_AND_FINISH();
3564 } IEM_MC_ELSE() {
3565 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3566 } IEM_MC_ENDIF();
3567 IEM_MC_END();
3568}
3569
3570
3571/**
3572 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3573 * iemOp_Grp1_Eb_Ib_80.
3574 */
3575#define IEMOP_BODY_BINARY_Eb_Ib(a_fnNormalU8, a_fRW) \
3576 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3577 { \
3578 /* register target */ \
3579 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3581 IEM_MC_BEGIN(3, 0); \
3582 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3583 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3584 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3585 \
3586 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3587 IEM_MC_REF_EFLAGS(pEFlags); \
3588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3589 \
3590 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3591 IEM_MC_END(); \
3592 } \
3593 else \
3594 { \
3595 /* memory target */ \
3596 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3597 { \
3598 IEM_MC_BEGIN(3, 2); \
3599 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3602 \
3603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3604 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3605 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3606 IEMOP_HLP_DONE_DECODING(); \
3607 \
3608 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3609 IEM_MC_FETCH_EFLAGS(EFlags); \
3610 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3611 \
3612 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
3613 IEM_MC_COMMIT_EFLAGS(EFlags); \
3614 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3615 IEM_MC_END(); \
3616 } \
3617 else \
3618 { \
3619 (void)0
3620
3621#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3622 IEMOP_HLP_DONE_DECODING(); \
3623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3624 } \
3625 } \
3626 (void)0
3627
3628#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3629 IEM_MC_BEGIN(3, 2); \
3630 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3631 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3633 \
3634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3635 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3636 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3637 IEMOP_HLP_DONE_DECODING(); \
3638 \
3639 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3640 IEM_MC_FETCH_EFLAGS(EFlags); \
3641 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3642 \
3643 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
3644 IEM_MC_COMMIT_EFLAGS(EFlags); \
3645 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3646 IEM_MC_END(); \
3647 } \
3648 } \
3649 (void)0
3650
3651
3652/**
3653 * @opmaps grp1_80,grp1_83
3654 * @opcode /0
3655 */
3656FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3657{
3658 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3659 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
3660 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3661}
3662
3663
3664/**
3665 * @opmaps grp1_80,grp1_83
3666 * @opcode /1
3667 */
3668FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3669{
3670 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3671 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
3672 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3673}
3674
3675
3676/**
3677 * @opmaps grp1_80,grp1_83
3678 * @opcode /2
3679 */
3680FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
3681{
3682 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
3683 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
3684 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
3685}
3686
3687
3688/**
3689 * @opmaps grp1_80,grp1_83
3690 * @opcode /3
3691 */
3692FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
3693{
3694 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
3695 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
3696 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
3697}
3698
3699
3700/**
3701 * @opmaps grp1_80,grp1_83
3702 * @opcode /4
3703 */
3704FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
3705{
3706 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
3707 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
3708 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
3709}
3710
3711
3712/**
3713 * @opmaps grp1_80,grp1_83
3714 * @opcode /5
3715 */
3716FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
3717{
3718 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
3719 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
3720 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
3721}
3722
3723
3724/**
3725 * @opmaps grp1_80,grp1_83
3726 * @opcode /6
3727 */
3728FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
3729{
3730 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
3731 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
3732 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
3733}
3734
3735
3736/**
3737 * @opmaps grp1_80,grp1_83
3738 * @opcode /7
3739 */
3740FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
3741{
3742 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
3743 IEMOP_BODY_BINARY_Eb_Ib(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
3744 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
3745}
3746
3747
3748/**
3749 * @opcode 0x80
3750 */
3751FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3752{
3753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3754 switch (IEM_GET_MODRM_REG_8(bRm))
3755 {
3756 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
3757 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
3758 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
3759 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
3760 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
3761 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
3762 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
3763 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
3764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3765 }
3766}
3767
3768
3769/**
3770 * Body for a group 1 binary operator.
3771 */
3772#define IEMOP_BODY_BINARY_Ev_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
3773 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3774 { \
3775 /* register target */ \
3776 switch (pVCpu->iem.s.enmEffOpSize) \
3777 { \
3778 case IEMMODE_16BIT: \
3779 { \
3780 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3781 IEMOP_HLP_DONE_DECODING(); \
3782 IEM_MC_BEGIN(3, 0); \
3783 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3784 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
3785 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3786 \
3787 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3788 IEM_MC_REF_EFLAGS(pEFlags); \
3789 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3790 \
3791 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3792 IEM_MC_END(); \
3793 break; \
3794 } \
3795 \
3796 case IEMMODE_32BIT: \
3797 { \
3798 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3799 IEMOP_HLP_DONE_DECODING(); \
3800 IEM_MC_BEGIN(3, 0); \
3801 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3802 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
3803 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3804 \
3805 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3806 IEM_MC_REF_EFLAGS(pEFlags); \
3807 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3808 if (a_fRW == IEM_ACCESS_DATA_RW) \
3809 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
3810 \
3811 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3812 IEM_MC_END(); \
3813 break; \
3814 } \
3815 \
3816 case IEMMODE_64BIT: \
3817 { \
3818 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3820 IEM_MC_BEGIN(3, 0); \
3821 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3822 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
3823 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3824 \
3825 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3826 IEM_MC_REF_EFLAGS(pEFlags); \
3827 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3828 \
3829 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3830 IEM_MC_END(); \
3831 break; \
3832 } \
3833 \
3834 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3835 } \
3836 } \
3837 else \
3838 { \
3839 /* memory target */ \
3840 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3841 { \
3842 switch (pVCpu->iem.s.enmEffOpSize) \
3843 { \
3844 case IEMMODE_16BIT: \
3845 { \
3846 IEM_MC_BEGIN(3, 2); \
3847 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3848 IEM_MC_ARG(uint16_t, u16Src, 1); \
3849 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3851 \
3852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3853 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3854 IEM_MC_ASSIGN(u16Src, u16Imm); \
3855 IEMOP_HLP_DONE_DECODING(); \
3856 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3857 IEM_MC_FETCH_EFLAGS(EFlags); \
3858 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3859 \
3860 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
3861 IEM_MC_COMMIT_EFLAGS(EFlags); \
3862 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3863 IEM_MC_END(); \
3864 break; \
3865 } \
3866 \
3867 case IEMMODE_32BIT: \
3868 { \
3869 IEM_MC_BEGIN(3, 2); \
3870 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3871 IEM_MC_ARG(uint32_t, u32Src, 1); \
3872 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3874 \
3875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3876 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3877 IEM_MC_ASSIGN(u32Src, u32Imm); \
3878 IEMOP_HLP_DONE_DECODING(); \
3879 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3880 IEM_MC_FETCH_EFLAGS(EFlags); \
3881 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3882 \
3883 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
3884 IEM_MC_COMMIT_EFLAGS(EFlags); \
3885 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3886 IEM_MC_END(); \
3887 break; \
3888 } \
3889 \
3890 case IEMMODE_64BIT: \
3891 { \
3892 IEM_MC_BEGIN(3, 2); \
3893 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3894 IEM_MC_ARG(uint64_t, u64Src, 1); \
3895 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3897 \
3898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3899 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3900 IEMOP_HLP_DONE_DECODING(); \
3901 IEM_MC_ASSIGN(u64Src, u64Imm); \
3902 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3903 IEM_MC_FETCH_EFLAGS(EFlags); \
3904 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3905 \
3906 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
3907 IEM_MC_COMMIT_EFLAGS(EFlags); \
3908 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3909 IEM_MC_END(); \
3910 break; \
3911 } \
3912 \
3913 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3914 } \
3915 } \
3916 else \
3917 { \
3918 (void)0
3919
3920#define IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK() \
3921 IEMOP_HLP_DONE_DECODING(); \
3922 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3923 } \
3924 } \
3925 (void)0
3926
3927#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
3928 switch (pVCpu->iem.s.enmEffOpSize) \
3929 { \
3930 case IEMMODE_16BIT: \
3931 { \
3932 IEM_MC_BEGIN(3, 2); \
3933 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3934 IEM_MC_ARG(uint16_t, u16Src, 1); \
3935 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3937 \
3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3939 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3940 IEM_MC_ASSIGN(u16Src, u16Imm); \
3941 IEMOP_HLP_DONE_DECODING(); \
3942 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3943 IEM_MC_FETCH_EFLAGS(EFlags); \
3944 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
3945 \
3946 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
3947 IEM_MC_COMMIT_EFLAGS(EFlags); \
3948 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3949 IEM_MC_END(); \
3950 break; \
3951 } \
3952 \
3953 case IEMMODE_32BIT: \
3954 { \
3955 IEM_MC_BEGIN(3, 2); \
3956 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3957 IEM_MC_ARG(uint32_t, u32Src, 1); \
3958 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3960 \
3961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3962 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3963 IEM_MC_ASSIGN(u32Src, u32Imm); \
3964 IEMOP_HLP_DONE_DECODING(); \
3965 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3966 IEM_MC_FETCH_EFLAGS(EFlags); \
3967 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
3968 \
3969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
3970 IEM_MC_COMMIT_EFLAGS(EFlags); \
3971 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3972 IEM_MC_END(); \
3973 break; \
3974 } \
3975 \
3976 case IEMMODE_64BIT: \
3977 { \
3978 IEM_MC_BEGIN(3, 2); \
3979 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3980 IEM_MC_ARG(uint64_t, u64Src, 1); \
3981 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3983 \
3984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3985 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3986 IEMOP_HLP_DONE_DECODING(); \
3987 IEM_MC_ASSIGN(u64Src, u64Imm); \
3988 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3989 IEM_MC_FETCH_EFLAGS(EFlags); \
3990 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
3991 \
3992 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
3993 IEM_MC_COMMIT_EFLAGS(EFlags); \
3994 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3995 IEM_MC_END(); \
3996 break; \
3997 } \
3998 \
3999 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4000 } \
4001 } \
4002 } \
4003 (void)0
4004
4005
4006/**
4007 * @opmaps grp1_81
4008 * @opcode /0
4009 */
4010FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4011{
4012 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4013 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4014 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4015}
4016
4017
4018/**
4019 * @opmaps grp1_81
4020 * @opcode /1
4021 */
4022FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4023{
4024 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4025 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4026 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4027}
4028
4029
4030/**
4031 * @opmaps grp1_81
4032 * @opcode /2
4033 */
4034FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4035{
4036 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4037 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4038 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4039}
4040
4041
4042/**
4043 * @opmaps grp1_81
4044 * @opcode /3
4045 */
4046FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4047{
4048 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4049 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4050 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4051}
4052
4053
4054/**
4055 * @opmaps grp1_81
4056 * @opcode /4
4057 */
4058FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4059{
4060 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4061 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4062 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4063}
4064
4065
4066/**
4067 * @opmaps grp1_81
4068 * @opcode /5
4069 */
4070FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4071{
4072 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4073 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4074 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4075}
4076
4077
4078/**
4079 * @opmaps grp1_81
4080 * @opcode /6
4081 */
4082FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4083{
4084 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4085 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4086 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4087}
4088
4089
4090/**
4091 * @opmaps grp1_81
4092 * @opcode /7
4093 */
4094FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4095{
4096 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4097 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4098 IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK();
4099}
4100
4101
4102/**
4103 * @opcode 0x81
4104 */
4105FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4106{
4107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4108 switch (IEM_GET_MODRM_REG_8(bRm))
4109 {
4110 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4111 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4112 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4113 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4114 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4115 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4116 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4117 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4119 }
4120}
4121
4122
4123/**
4124 * @opcode 0x82
4125 * @opmnemonic grp1_82
4126 * @opgroup og_groups
4127 */
4128FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4129{
4130 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4131 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4132}
4133
4134
4135/**
4136 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4137 * iemOp_Grp1_Ev_Ib.
4138 */
4139#define IEMOP_BODY_BINARY_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
4140 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4141 { \
4142 /* \
4143 * Register target \
4144 */ \
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4146 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4147 switch (pVCpu->iem.s.enmEffOpSize) \
4148 { \
4149 case IEMMODE_16BIT: \
4150 { \
4151 IEM_MC_BEGIN(3, 0); \
4152 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4153 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4154 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4155 \
4156 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4157 IEM_MC_REF_EFLAGS(pEFlags); \
4158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4159 \
4160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4161 IEM_MC_END(); \
4162 break; \
4163 } \
4164 \
4165 case IEMMODE_32BIT: \
4166 { \
4167 IEM_MC_BEGIN(3, 0); \
4168 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4169 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4170 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4171 \
4172 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4173 IEM_MC_REF_EFLAGS(pEFlags); \
4174 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4175 if ((a_fRW) != IEM_ACCESS_DATA_R) \
4176 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4177 \
4178 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4179 IEM_MC_END(); \
4180 break; \
4181 } \
4182 \
4183 case IEMMODE_64BIT: \
4184 { \
4185 IEM_MC_BEGIN(3, 0); \
4186 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4187 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4188 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4189 \
4190 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4191 IEM_MC_REF_EFLAGS(pEFlags); \
4192 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4193 \
4194 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4195 IEM_MC_END(); \
4196 break; \
4197 } \
4198 \
4199 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4200 } \
4201 } \
4202 else \
4203 { \
4204 /* \
4205 * Memory target. \
4206 */ \
4207 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4208 { \
4209 switch (pVCpu->iem.s.enmEffOpSize) \
4210 { \
4211 case IEMMODE_16BIT: \
4212 { \
4213 IEM_MC_BEGIN(3, 2); \
4214 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4215 IEM_MC_ARG(uint16_t, u16Src, 1); \
4216 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4218 \
4219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4220 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4221 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4222 IEMOP_HLP_DONE_DECODING(); \
4223 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4224 IEM_MC_FETCH_EFLAGS(EFlags); \
4225 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4226 \
4227 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
4228 IEM_MC_COMMIT_EFLAGS(EFlags); \
4229 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4230 IEM_MC_END(); \
4231 break; \
4232 } \
4233 \
4234 case IEMMODE_32BIT: \
4235 { \
4236 IEM_MC_BEGIN(3, 2); \
4237 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4238 IEM_MC_ARG(uint32_t, u32Src, 1); \
4239 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4241 \
4242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4243 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4244 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4245 IEMOP_HLP_DONE_DECODING(); \
4246 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4247 IEM_MC_FETCH_EFLAGS(EFlags); \
4248 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4249 \
4250 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
4251 IEM_MC_COMMIT_EFLAGS(EFlags); \
4252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4253 IEM_MC_END(); \
4254 break; \
4255 } \
4256 \
4257 case IEMMODE_64BIT: \
4258 { \
4259 IEM_MC_BEGIN(3, 2); \
4260 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4261 IEM_MC_ARG(uint64_t, u64Src, 1); \
4262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4264 \
4265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4266 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4267 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4268 IEMOP_HLP_DONE_DECODING(); \
4269 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4270 IEM_MC_FETCH_EFLAGS(EFlags); \
4271 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4272 \
4273 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
4274 IEM_MC_COMMIT_EFLAGS(EFlags); \
4275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4276 IEM_MC_END(); \
4277 break; \
4278 } \
4279 \
4280 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4281 } \
4282 } \
4283 else \
4284 { \
4285 (void)0
4286
4287#define IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK() \
4288 IEMOP_HLP_DONE_DECODING(); \
4289 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4290 } \
4291 } \
4292 (void)0
4293
4294#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4295 switch (pVCpu->iem.s.enmEffOpSize) \
4296 { \
4297 case IEMMODE_16BIT: \
4298 { \
4299 IEM_MC_BEGIN(3, 2); \
4300 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4301 IEM_MC_ARG(uint16_t, u16Src, 1); \
4302 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4304 \
4305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4306 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4307 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4308 IEMOP_HLP_DONE_DECODING(); \
4309 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4310 IEM_MC_FETCH_EFLAGS(EFlags); \
4311 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4312 \
4313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
4314 IEM_MC_COMMIT_EFLAGS(EFlags); \
4315 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4316 IEM_MC_END(); \
4317 break; \
4318 } \
4319 \
4320 case IEMMODE_32BIT: \
4321 { \
4322 IEM_MC_BEGIN(3, 2); \
4323 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4324 IEM_MC_ARG(uint32_t, u32Src, 1); \
4325 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4327 \
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4329 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4330 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4331 IEMOP_HLP_DONE_DECODING(); \
4332 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4333 IEM_MC_FETCH_EFLAGS(EFlags); \
4334 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4335 \
4336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
4337 IEM_MC_COMMIT_EFLAGS(EFlags); \
4338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4339 IEM_MC_END(); \
4340 break; \
4341 } \
4342 \
4343 case IEMMODE_64BIT: \
4344 { \
4345 IEM_MC_BEGIN(3, 2); \
4346 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4347 IEM_MC_ARG(uint64_t, u64Src, 1); \
4348 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4350 \
4351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4352 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4353 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4354 IEMOP_HLP_DONE_DECODING(); \
4355 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4356 IEM_MC_FETCH_EFLAGS(EFlags); \
4357 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4358 \
4359 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4360 IEM_MC_COMMIT_EFLAGS(EFlags); \
4361 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4362 IEM_MC_END(); \
4363 break; \
4364 } \
4365 \
4366 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4367 } \
4368 } \
4369 } \
4370 (void)0
4371
4372/**
4373 * @opmaps grp1_83
4374 * @opcode /0
4375 */
4376FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4377{
4378 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4379 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4380 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4381}
4382
4383
4384/**
4385 * @opmaps grp1_83
4386 * @opcode /1
4387 */
4388FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4389{
4390 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4391 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4392 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4393}
4394
4395
4396/**
4397 * @opmaps grp1_83
4398 * @opcode /2
4399 */
4400FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
4401{
4402 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
4403 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4404 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4405}
4406
4407
4408/**
4409 * @opmaps grp1_83
4410 * @opcode /3
4411 */
4412FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
4413{
4414 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
4415 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4416 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4417}
4418
4419
4420/**
4421 * @opmaps grp1_83
4422 * @opcode /4
4423 */
4424FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
4425{
4426 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
4427 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4428 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4429}
4430
4431
4432/**
4433 * @opmaps grp1_83
4434 * @opcode /5
4435 */
4436FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
4437{
4438 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
4439 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4440 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4441}
4442
4443
4444/**
4445 * @opmaps grp1_83
4446 * @opcode /6
4447 */
4448FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
4449{
4450 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
4451 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4452 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4453}
4454
4455
4456/**
4457 * @opmaps grp1_83
4458 * @opcode /7
4459 */
4460FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
4461{
4462 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
4463 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4464 IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK();
4465}
4466
4467
4468/**
4469 * @opcode 0x83
4470 */
4471FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
4472{
4473 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
4474 to the 386 even if absent in the intel reference manuals and some
4475 3rd party opcode listings. */
4476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4477 switch (IEM_GET_MODRM_REG_8(bRm))
4478 {
4479 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
4480 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
4481 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
4482 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
4483 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
4484 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
4485 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
4486 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
4487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4488 }
4489}
4490
4491
4492/**
4493 * @opcode 0x84
4494 */
4495FNIEMOP_DEF(iemOp_test_Eb_Gb)
4496{
4497 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
4498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4499 IEMOP_BODY_BINARY_rm_r8(iemAImpl_test_u8, IEM_ACCESS_DATA_R);
4500 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
4501}
4502
4503
4504/**
4505 * @opcode 0x85
4506 */
4507FNIEMOP_DEF(iemOp_test_Ev_Gv)
4508{
4509 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
4510 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4511 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R);
4512 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
4513}
4514
4515
4516/**
4517 * @opcode 0x86
4518 */
4519FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
4520{
4521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4522 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
4523
4524 /*
4525 * If rm is denoting a register, no more instruction bytes.
4526 */
4527 if (IEM_IS_MODRM_REG_MODE(bRm))
4528 {
4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4530
4531 IEM_MC_BEGIN(0, 2);
4532 IEM_MC_LOCAL(uint8_t, uTmp1);
4533 IEM_MC_LOCAL(uint8_t, uTmp2);
4534
4535 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4536 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4537 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4538 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4539
4540 IEM_MC_ADVANCE_RIP_AND_FINISH();
4541 IEM_MC_END();
4542 }
4543 else
4544 {
4545 /*
4546 * We're accessing memory.
4547 */
4548/** @todo the register must be committed separately! */
4549 IEM_MC_BEGIN(2, 2);
4550 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
4551 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4553
4554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4555 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4556 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4557 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4558 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
4559 else
4560 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
4561 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
4562
4563 IEM_MC_ADVANCE_RIP_AND_FINISH();
4564 IEM_MC_END();
4565 }
4566}
4567
4568
4569/**
4570 * @opcode 0x87
4571 */
4572FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
4573{
4574 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
4575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4576
4577 /*
4578 * If rm is denoting a register, no more instruction bytes.
4579 */
4580 if (IEM_IS_MODRM_REG_MODE(bRm))
4581 {
4582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4583
4584 switch (pVCpu->iem.s.enmEffOpSize)
4585 {
4586 case IEMMODE_16BIT:
4587 IEM_MC_BEGIN(0, 2);
4588 IEM_MC_LOCAL(uint16_t, uTmp1);
4589 IEM_MC_LOCAL(uint16_t, uTmp2);
4590
4591 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4592 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4593 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4594 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4595
4596 IEM_MC_ADVANCE_RIP_AND_FINISH();
4597 IEM_MC_END();
4598 break;
4599
4600 case IEMMODE_32BIT:
4601 IEM_MC_BEGIN(0, 2);
4602 IEM_MC_LOCAL(uint32_t, uTmp1);
4603 IEM_MC_LOCAL(uint32_t, uTmp2);
4604
4605 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4606 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4607 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4608 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4609
4610 IEM_MC_ADVANCE_RIP_AND_FINISH();
4611 IEM_MC_END();
4612 break;
4613
4614 case IEMMODE_64BIT:
4615 IEM_MC_BEGIN(0, 2);
4616 IEM_MC_LOCAL(uint64_t, uTmp1);
4617 IEM_MC_LOCAL(uint64_t, uTmp2);
4618
4619 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4620 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4621 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4622 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4623
4624 IEM_MC_ADVANCE_RIP_AND_FINISH();
4625 IEM_MC_END();
4626 break;
4627
4628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4629 }
4630 }
4631 else
4632 {
4633 /*
4634 * We're accessing memory.
4635 */
4636 switch (pVCpu->iem.s.enmEffOpSize)
4637 {
4638/** @todo the register must be committed separately! */
4639 case IEMMODE_16BIT:
4640 IEM_MC_BEGIN(2, 2);
4641 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
4642 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4644
4645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4646 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4647 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4648 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4649 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
4650 else
4651 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
4652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
4653
4654 IEM_MC_ADVANCE_RIP_AND_FINISH();
4655 IEM_MC_END();
4656 break;
4657
4658 case IEMMODE_32BIT:
4659 IEM_MC_BEGIN(2, 2);
4660 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
4661 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4663
4664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4665 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4666 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4667 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4668 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
4669 else
4670 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
4671 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
4672
4673 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
4674 IEM_MC_ADVANCE_RIP_AND_FINISH();
4675 IEM_MC_END();
4676 break;
4677
4678 case IEMMODE_64BIT:
4679 IEM_MC_BEGIN(2, 2);
4680 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
4681 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4683
4684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4685 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4686 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4687 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4688 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
4689 else
4690 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
4691 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
4692
4693 IEM_MC_ADVANCE_RIP_AND_FINISH();
4694 IEM_MC_END();
4695 break;
4696
4697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4698 }
4699 }
4700}
4701
4702
4703/**
4704 * @opcode 0x88
4705 */
4706FNIEMOP_DEF(iemOp_mov_Eb_Gb)
4707{
4708 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
4709
4710 uint8_t bRm;
4711 IEM_OPCODE_GET_NEXT_U8(&bRm);
4712
4713 /*
4714 * If rm is denoting a register, no more instruction bytes.
4715 */
4716 if (IEM_IS_MODRM_REG_MODE(bRm))
4717 {
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4719 IEM_MC_BEGIN(0, 1);
4720 IEM_MC_LOCAL(uint8_t, u8Value);
4721 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4722 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
4723 IEM_MC_ADVANCE_RIP_AND_FINISH();
4724 IEM_MC_END();
4725 }
4726 else
4727 {
4728 /*
4729 * We're writing a register to memory.
4730 */
4731 IEM_MC_BEGIN(0, 2);
4732 IEM_MC_LOCAL(uint8_t, u8Value);
4733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4736 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4737 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
4738 IEM_MC_ADVANCE_RIP_AND_FINISH();
4739 IEM_MC_END();
4740 }
4741}
4742
4743
4744/**
4745 * @opcode 0x89
4746 */
4747FNIEMOP_DEF(iemOp_mov_Ev_Gv)
4748{
4749 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
4750
4751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4752
4753 /*
4754 * If rm is denoting a register, no more instruction bytes.
4755 */
4756 if (IEM_IS_MODRM_REG_MODE(bRm))
4757 {
4758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4759 switch (pVCpu->iem.s.enmEffOpSize)
4760 {
4761 case IEMMODE_16BIT:
4762 IEM_MC_BEGIN(0, 1);
4763 IEM_MC_LOCAL(uint16_t, u16Value);
4764 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4765 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4766 IEM_MC_ADVANCE_RIP_AND_FINISH();
4767 IEM_MC_END();
4768 break;
4769
4770 case IEMMODE_32BIT:
4771 IEM_MC_BEGIN(0, 1);
4772 IEM_MC_LOCAL(uint32_t, u32Value);
4773 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4774 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4775 IEM_MC_ADVANCE_RIP_AND_FINISH();
4776 IEM_MC_END();
4777 break;
4778
4779 case IEMMODE_64BIT:
4780 IEM_MC_BEGIN(0, 1);
4781 IEM_MC_LOCAL(uint64_t, u64Value);
4782 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4783 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4784 IEM_MC_ADVANCE_RIP_AND_FINISH();
4785 IEM_MC_END();
4786 break;
4787
4788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4789 }
4790 }
4791 else
4792 {
4793 /*
4794 * We're writing a register to memory.
4795 */
4796 switch (pVCpu->iem.s.enmEffOpSize)
4797 {
4798 case IEMMODE_16BIT:
4799 IEM_MC_BEGIN(0, 2);
4800 IEM_MC_LOCAL(uint16_t, u16Value);
4801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4805 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4806 IEM_MC_ADVANCE_RIP_AND_FINISH();
4807 IEM_MC_END();
4808 break;
4809
4810 case IEMMODE_32BIT:
4811 IEM_MC_BEGIN(0, 2);
4812 IEM_MC_LOCAL(uint32_t, u32Value);
4813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4816 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4817 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
4818 IEM_MC_ADVANCE_RIP_AND_FINISH();
4819 IEM_MC_END();
4820 break;
4821
4822 case IEMMODE_64BIT:
4823 IEM_MC_BEGIN(0, 2);
4824 IEM_MC_LOCAL(uint64_t, u64Value);
4825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4828 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4829 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
4830 IEM_MC_ADVANCE_RIP_AND_FINISH();
4831 IEM_MC_END();
4832 break;
4833
4834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4835 }
4836 }
4837}
4838
4839
4840/**
4841 * @opcode 0x8a
4842 */
4843FNIEMOP_DEF(iemOp_mov_Gb_Eb)
4844{
4845 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
4846
4847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4848
4849 /*
4850 * If rm is denoting a register, no more instruction bytes.
4851 */
4852 if (IEM_IS_MODRM_REG_MODE(bRm))
4853 {
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4855 IEM_MC_BEGIN(0, 1);
4856 IEM_MC_LOCAL(uint8_t, u8Value);
4857 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4858 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4859 IEM_MC_ADVANCE_RIP_AND_FINISH();
4860 IEM_MC_END();
4861 }
4862 else
4863 {
4864 /*
4865 * We're loading a register from memory.
4866 */
4867 IEM_MC_BEGIN(0, 2);
4868 IEM_MC_LOCAL(uint8_t, u8Value);
4869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4873 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4874 IEM_MC_ADVANCE_RIP_AND_FINISH();
4875 IEM_MC_END();
4876 }
4877}
4878
4879
4880/**
4881 * @opcode 0x8b
4882 */
4883FNIEMOP_DEF(iemOp_mov_Gv_Ev)
4884{
4885 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
4886
4887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4888
4889 /*
4890 * If rm is denoting a register, no more instruction bytes.
4891 */
4892 if (IEM_IS_MODRM_REG_MODE(bRm))
4893 {
4894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4895 switch (pVCpu->iem.s.enmEffOpSize)
4896 {
4897 case IEMMODE_16BIT:
4898 IEM_MC_BEGIN(0, 1);
4899 IEM_MC_LOCAL(uint16_t, u16Value);
4900 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4901 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4902 IEM_MC_ADVANCE_RIP_AND_FINISH();
4903 IEM_MC_END();
4904 break;
4905
4906 case IEMMODE_32BIT:
4907 IEM_MC_BEGIN(0, 1);
4908 IEM_MC_LOCAL(uint32_t, u32Value);
4909 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4910 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4911 IEM_MC_ADVANCE_RIP_AND_FINISH();
4912 IEM_MC_END();
4913 break;
4914
4915 case IEMMODE_64BIT:
4916 IEM_MC_BEGIN(0, 1);
4917 IEM_MC_LOCAL(uint64_t, u64Value);
4918 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4919 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4920 IEM_MC_ADVANCE_RIP_AND_FINISH();
4921 IEM_MC_END();
4922 break;
4923
4924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4925 }
4926 }
4927 else
4928 {
4929 /*
4930 * We're loading a register from memory.
4931 */
4932 switch (pVCpu->iem.s.enmEffOpSize)
4933 {
4934 case IEMMODE_16BIT:
4935 IEM_MC_BEGIN(0, 2);
4936 IEM_MC_LOCAL(uint16_t, u16Value);
4937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4940 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4941 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4942 IEM_MC_ADVANCE_RIP_AND_FINISH();
4943 IEM_MC_END();
4944 break;
4945
4946 case IEMMODE_32BIT:
4947 IEM_MC_BEGIN(0, 2);
4948 IEM_MC_LOCAL(uint32_t, u32Value);
4949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4952 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4953 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4954 IEM_MC_ADVANCE_RIP_AND_FINISH();
4955 IEM_MC_END();
4956 break;
4957
4958 case IEMMODE_64BIT:
4959 IEM_MC_BEGIN(0, 2);
4960 IEM_MC_LOCAL(uint64_t, u64Value);
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4964 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4965 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4966 IEM_MC_ADVANCE_RIP_AND_FINISH();
4967 IEM_MC_END();
4968 break;
4969
4970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4971 }
4972 }
4973}
4974
4975
4976/**
4977 * opcode 0x63
4978 * @todo Table fixme
4979 */
4980FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4981{
4982 if (!IEM_IS_64BIT_CODE(pVCpu))
4983 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4984 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4985 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4986 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4987}
4988
4989
4990/**
4991 * @opcode 0x8c
4992 */
4993FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4994{
4995 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4996
4997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4998
4999 /*
5000 * Check that the destination register exists. The REX.R prefix is ignored.
5001 */
5002 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5003 if ( iSegReg > X86_SREG_GS)
5004 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5005
5006 /*
5007 * If rm is denoting a register, no more instruction bytes.
5008 * In that case, the operand size is respected and the upper bits are
5009 * cleared (starting with some pentium).
5010 */
5011 if (IEM_IS_MODRM_REG_MODE(bRm))
5012 {
5013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5014 switch (pVCpu->iem.s.enmEffOpSize)
5015 {
5016 case IEMMODE_16BIT:
5017 IEM_MC_BEGIN(0, 1);
5018 IEM_MC_LOCAL(uint16_t, u16Value);
5019 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5020 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5021 IEM_MC_ADVANCE_RIP_AND_FINISH();
5022 IEM_MC_END();
5023 break;
5024
5025 case IEMMODE_32BIT:
5026 IEM_MC_BEGIN(0, 1);
5027 IEM_MC_LOCAL(uint32_t, u32Value);
5028 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5029 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5030 IEM_MC_ADVANCE_RIP_AND_FINISH();
5031 IEM_MC_END();
5032 break;
5033
5034 case IEMMODE_64BIT:
5035 IEM_MC_BEGIN(0, 1);
5036 IEM_MC_LOCAL(uint64_t, u64Value);
5037 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5038 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5039 IEM_MC_ADVANCE_RIP_AND_FINISH();
5040 IEM_MC_END();
5041 break;
5042
5043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5044 }
5045 }
5046 else
5047 {
5048 /*
5049 * We're saving the register to memory. The access is word sized
5050 * regardless of operand size prefixes.
5051 */
5052#if 0 /* not necessary */
5053 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5054#endif
5055 IEM_MC_BEGIN(0, 2);
5056 IEM_MC_LOCAL(uint16_t, u16Value);
5057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5060 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5061 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5062 IEM_MC_ADVANCE_RIP_AND_FINISH();
5063 IEM_MC_END();
5064 }
5065}
5066
5067
5068
5069
5070/**
5071 * @opcode 0x8d
5072 */
5073FNIEMOP_DEF(iemOp_lea_Gv_M)
5074{
5075 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5077 if (IEM_IS_MODRM_REG_MODE(bRm))
5078 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5079
5080 switch (pVCpu->iem.s.enmEffOpSize)
5081 {
5082 case IEMMODE_16BIT:
5083 IEM_MC_BEGIN(0, 2);
5084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5085 IEM_MC_LOCAL(uint16_t, u16Cast);
5086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5088 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5089 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5090 IEM_MC_ADVANCE_RIP_AND_FINISH();
5091 IEM_MC_END();
5092 break;
5093
5094 case IEMMODE_32BIT:
5095 IEM_MC_BEGIN(0, 2);
5096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5097 IEM_MC_LOCAL(uint32_t, u32Cast);
5098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5100 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5101 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5102 IEM_MC_ADVANCE_RIP_AND_FINISH();
5103 IEM_MC_END();
5104 break;
5105
5106 case IEMMODE_64BIT:
5107 IEM_MC_BEGIN(0, 1);
5108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5111 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5112 IEM_MC_ADVANCE_RIP_AND_FINISH();
5113 IEM_MC_END();
5114 break;
5115
5116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5117 }
5118}
5119
5120
5121/**
5122 * @opcode 0x8e
5123 */
5124FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5125{
5126 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5127
5128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5129
5130 /*
5131 * The practical operand size is 16-bit.
5132 */
5133#if 0 /* not necessary */
5134 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5135#endif
5136
5137 /*
5138 * Check that the destination register exists and can be used with this
5139 * instruction. The REX.R prefix is ignored.
5140 */
5141 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5142 if ( iSegReg == X86_SREG_CS
5143 || iSegReg > X86_SREG_GS)
5144 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5145
5146 /*
5147 * If rm is denoting a register, no more instruction bytes.
5148 */
5149 if (IEM_IS_MODRM_REG_MODE(bRm))
5150 {
5151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5152 IEM_MC_BEGIN(2, 0);
5153 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5154 IEM_MC_ARG(uint16_t, u16Value, 1);
5155 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5156 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5157 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5158 else
5159 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5160 IEM_MC_END();
5161 }
5162 else
5163 {
5164 /*
5165 * We're loading the register from memory. The access is word sized
5166 * regardless of operand size prefixes.
5167 */
5168 IEM_MC_BEGIN(2, 1);
5169 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5170 IEM_MC_ARG(uint16_t, u16Value, 1);
5171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5175 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5176 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5177 else
5178 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5179 IEM_MC_END();
5180 }
5181}
5182
5183
5184/** Opcode 0x8f /0. */
5185FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5186{
5187 /* This bugger is rather annoying as it requires rSP to be updated before
5188 doing the effective address calculations. Will eventually require a
5189 split between the R/M+SIB decoding and the effective address
5190 calculation - which is something that is required for any attempt at
5191 reusing this code for a recompiler. It may also be good to have if we
5192 need to delay #UD exception caused by invalid lock prefixes.
5193
5194 For now, we'll do a mostly safe interpreter-only implementation here. */
5195 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5196 * now until tests show it's checked.. */
5197 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5198
5199 /* Register access is relatively easy and can share code. */
5200 if (IEM_IS_MODRM_REG_MODE(bRm))
5201 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5202
5203 /*
5204 * Memory target.
5205 *
5206 * Intel says that RSP is incremented before it's used in any effective
5207 * address calcuations. This means some serious extra annoyance here since
5208 * we decode and calculate the effective address in one step and like to
5209 * delay committing registers till everything is done.
5210 *
5211 * So, we'll decode and calculate the effective address twice. This will
5212 * require some recoding if turned into a recompiler.
5213 */
5214 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5215
5216#if 1 /* This can be compiled, optimize later if needed. */
5217 switch (pVCpu->iem.s.enmEffOpSize)
5218 {
5219 case IEMMODE_16BIT:
5220 {
5221 IEM_MC_BEGIN(2, 0);
5222 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5223 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5226 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5227 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5228 IEM_MC_END();
5229 }
5230
5231 case IEMMODE_32BIT:
5232 {
5233 IEM_MC_BEGIN(2, 0);
5234 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5235 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5238 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5239 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5240 IEM_MC_END();
5241 }
5242
5243 case IEMMODE_64BIT:
5244 {
5245 IEM_MC_BEGIN(2, 0);
5246 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5247 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5250 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5251 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5252 IEM_MC_END();
5253 }
5254
5255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5256 }
5257
5258#else
5259# ifndef TST_IEM_CHECK_MC
5260 /* Calc effective address with modified ESP. */
5261/** @todo testcase */
5262 RTGCPTR GCPtrEff;
5263 VBOXSTRICTRC rcStrict;
5264 switch (pVCpu->iem.s.enmEffOpSize)
5265 {
5266 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5267 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5268 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5270 }
5271 if (rcStrict != VINF_SUCCESS)
5272 return rcStrict;
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274
5275 /* Perform the operation - this should be CImpl. */
5276 RTUINT64U TmpRsp;
5277 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5278 switch (pVCpu->iem.s.enmEffOpSize)
5279 {
5280 case IEMMODE_16BIT:
5281 {
5282 uint16_t u16Value;
5283 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5284 if (rcStrict == VINF_SUCCESS)
5285 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5286 break;
5287 }
5288
5289 case IEMMODE_32BIT:
5290 {
5291 uint32_t u32Value;
5292 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5293 if (rcStrict == VINF_SUCCESS)
5294 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5295 break;
5296 }
5297
5298 case IEMMODE_64BIT:
5299 {
5300 uint64_t u64Value;
5301 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5302 if (rcStrict == VINF_SUCCESS)
5303 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5304 break;
5305 }
5306
5307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5308 }
5309 if (rcStrict == VINF_SUCCESS)
5310 {
5311 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5312 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5313 }
5314 return rcStrict;
5315
5316# else
5317 return VERR_IEM_IPE_2;
5318# endif
5319#endif
5320}
5321
5322
5323/**
5324 * @opcode 0x8f
5325 */
5326FNIEMOP_DEF(iemOp_Grp1A__xop)
5327{
5328 /*
5329 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5330 * three byte VEX prefix, except that the mmmmm field cannot have the values
5331 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5332 */
5333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5334 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5335 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5336
5337 IEMOP_MNEMONIC(xop, "xop");
5338 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5339 {
5340 /** @todo Test when exctly the XOP conformance checks kick in during
5341 * instruction decoding and fetching (using \#PF). */
5342 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5343 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5344 if ( ( pVCpu->iem.s.fPrefixes
5345 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5346 == 0)
5347 {
5348 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5349 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
5350 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5351 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5352 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5353 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5354 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5355 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5356 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5357
5358 /** @todo XOP: Just use new tables and decoders. */
5359 switch (bRm & 0x1f)
5360 {
5361 case 8: /* xop opcode map 8. */
5362 IEMOP_BITCH_ABOUT_STUB();
5363 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5364
5365 case 9: /* xop opcode map 9. */
5366 IEMOP_BITCH_ABOUT_STUB();
5367 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5368
5369 case 10: /* xop opcode map 10. */
5370 IEMOP_BITCH_ABOUT_STUB();
5371 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5372
5373 default:
5374 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5375 IEMOP_RAISE_INVALID_OPCODE_RET();
5376 }
5377 }
5378 else
5379 Log(("XOP: Invalid prefix mix!\n"));
5380 }
5381 else
5382 Log(("XOP: XOP support disabled!\n"));
5383 IEMOP_RAISE_INVALID_OPCODE_RET();
5384}
5385
5386
5387/**
5388 * Common 'xchg reg,rAX' helper.
5389 */
5390FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
5391{
5392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5393
5394 iReg |= pVCpu->iem.s.uRexB;
5395 switch (pVCpu->iem.s.enmEffOpSize)
5396 {
5397 case IEMMODE_16BIT:
5398 IEM_MC_BEGIN(0, 2);
5399 IEM_MC_LOCAL(uint16_t, u16Tmp1);
5400 IEM_MC_LOCAL(uint16_t, u16Tmp2);
5401 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
5402 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
5403 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
5404 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
5405 IEM_MC_ADVANCE_RIP_AND_FINISH();
5406 IEM_MC_END();
5407 break;
5408
5409 case IEMMODE_32BIT:
5410 IEM_MC_BEGIN(0, 2);
5411 IEM_MC_LOCAL(uint32_t, u32Tmp1);
5412 IEM_MC_LOCAL(uint32_t, u32Tmp2);
5413 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
5414 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
5415 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
5416 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
5417 IEM_MC_ADVANCE_RIP_AND_FINISH();
5418 IEM_MC_END();
5419 break;
5420
5421 case IEMMODE_64BIT:
5422 IEM_MC_BEGIN(0, 2);
5423 IEM_MC_LOCAL(uint64_t, u64Tmp1);
5424 IEM_MC_LOCAL(uint64_t, u64Tmp2);
5425 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
5426 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
5427 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
5428 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
5429 IEM_MC_ADVANCE_RIP_AND_FINISH();
5430 IEM_MC_END();
5431 break;
5432
5433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5434 }
5435}
5436
5437
5438/**
5439 * @opcode 0x90
5440 */
5441FNIEMOP_DEF(iemOp_nop)
5442{
5443 /* R8/R8D and RAX/EAX can be exchanged. */
5444 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
5445 {
5446 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
5447 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
5448 }
5449
5450 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
5451 {
5452 IEMOP_MNEMONIC(pause, "pause");
5453 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
5454 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
5455 if (!IEM_IS_IN_GUEST(pVCpu))
5456 { /* probable */ }
5457#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5458 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
5459 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
5460#endif
5461#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
5462 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
5463 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
5464#endif
5465 }
5466 else
5467 IEMOP_MNEMONIC(nop, "nop");
5468 IEM_MC_BEGIN(0, 0);
5469 IEM_MC_ADVANCE_RIP_AND_FINISH();
5470 IEM_MC_END();
5471}
5472
5473
5474/**
5475 * @opcode 0x91
5476 */
5477FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
5478{
5479 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
5480 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
5481}
5482
5483
5484/**
5485 * @opcode 0x92
5486 */
5487FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
5488{
5489 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
5490 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
5491}
5492
5493
5494/**
5495 * @opcode 0x93
5496 */
5497FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
5498{
5499 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
5500 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
5501}
5502
5503
5504/**
5505 * @opcode 0x94
5506 */
5507FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
5508{
5509 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
5510 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
5511}
5512
5513
5514/**
5515 * @opcode 0x95
5516 */
5517FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
5518{
5519 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
5520 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
5521}
5522
5523
5524/**
5525 * @opcode 0x96
5526 */
5527FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
5528{
5529 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
5530 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
5531}
5532
5533
5534/**
5535 * @opcode 0x97
5536 */
5537FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
5538{
5539 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
5540 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
5541}
5542
5543
5544/**
5545 * @opcode 0x98
5546 */
5547FNIEMOP_DEF(iemOp_cbw)
5548{
5549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5550 switch (pVCpu->iem.s.enmEffOpSize)
5551 {
5552 case IEMMODE_16BIT:
5553 IEMOP_MNEMONIC(cbw, "cbw");
5554 IEM_MC_BEGIN(0, 1);
5555 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
5556 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
5557 } IEM_MC_ELSE() {
5558 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
5559 } IEM_MC_ENDIF();
5560 IEM_MC_ADVANCE_RIP_AND_FINISH();
5561 IEM_MC_END();
5562 break;
5563
5564 case IEMMODE_32BIT:
5565 IEMOP_MNEMONIC(cwde, "cwde");
5566 IEM_MC_BEGIN(0, 1);
5567 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5568 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
5569 } IEM_MC_ELSE() {
5570 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
5571 } IEM_MC_ENDIF();
5572 IEM_MC_ADVANCE_RIP_AND_FINISH();
5573 IEM_MC_END();
5574 break;
5575
5576 case IEMMODE_64BIT:
5577 IEMOP_MNEMONIC(cdqe, "cdqe");
5578 IEM_MC_BEGIN(0, 1);
5579 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5580 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
5581 } IEM_MC_ELSE() {
5582 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
5583 } IEM_MC_ENDIF();
5584 IEM_MC_ADVANCE_RIP_AND_FINISH();
5585 IEM_MC_END();
5586 break;
5587
5588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5589 }
5590}
5591
5592
5593/**
5594 * @opcode 0x99
5595 */
5596FNIEMOP_DEF(iemOp_cwd)
5597{
5598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5599 switch (pVCpu->iem.s.enmEffOpSize)
5600 {
5601 case IEMMODE_16BIT:
5602 IEMOP_MNEMONIC(cwd, "cwd");
5603 IEM_MC_BEGIN(0, 1);
5604 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5605 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
5606 } IEM_MC_ELSE() {
5607 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
5608 } IEM_MC_ENDIF();
5609 IEM_MC_ADVANCE_RIP_AND_FINISH();
5610 IEM_MC_END();
5611 break;
5612
5613 case IEMMODE_32BIT:
5614 IEMOP_MNEMONIC(cdq, "cdq");
5615 IEM_MC_BEGIN(0, 1);
5616 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5617 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
5618 } IEM_MC_ELSE() {
5619 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
5620 } IEM_MC_ENDIF();
5621 IEM_MC_ADVANCE_RIP_AND_FINISH();
5622 IEM_MC_END();
5623 break;
5624
5625 case IEMMODE_64BIT:
5626 IEMOP_MNEMONIC(cqo, "cqo");
5627 IEM_MC_BEGIN(0, 1);
5628 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
5629 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
5630 } IEM_MC_ELSE() {
5631 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
5632 } IEM_MC_ENDIF();
5633 IEM_MC_ADVANCE_RIP_AND_FINISH();
5634 IEM_MC_END();
5635 break;
5636
5637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5638 }
5639}
5640
5641
5642/**
5643 * @opcode 0x9a
5644 */
5645FNIEMOP_DEF(iemOp_call_Ap)
5646{
5647 IEMOP_MNEMONIC(call_Ap, "call Ap");
5648 IEMOP_HLP_NO_64BIT();
5649
5650 /* Decode the far pointer address and pass it on to the far call C implementation. */
5651 uint32_t off32Seg;
5652 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
5653 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
5654 else
5655 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
5656 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5658 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
5659 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
5660}
5661
5662
5663/** Opcode 0x9b. (aka fwait) */
5664FNIEMOP_DEF(iemOp_wait)
5665{
5666 IEMOP_MNEMONIC(wait, "wait");
5667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5668
5669 IEM_MC_BEGIN(0, 0);
5670 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
5671 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5672 IEM_MC_ADVANCE_RIP_AND_FINISH();
5673 IEM_MC_END();
5674}
5675
5676
5677/**
5678 * @opcode 0x9c
5679 */
5680FNIEMOP_DEF(iemOp_pushf_Fv)
5681{
5682 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
5683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5684 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5685 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
5686}
5687
5688
5689/**
5690 * @opcode 0x9d
5691 */
5692FNIEMOP_DEF(iemOp_popf_Fv)
5693{
5694 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
5695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5697 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
5698}
5699
5700
5701/**
5702 * @opcode 0x9e
5703 */
5704FNIEMOP_DEF(iemOp_sahf)
5705{
5706 IEMOP_MNEMONIC(sahf, "sahf");
5707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5708 if ( IEM_IS_64BIT_CODE(pVCpu)
5709 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5710 IEMOP_RAISE_INVALID_OPCODE_RET();
5711 IEM_MC_BEGIN(0, 2);
5712 IEM_MC_LOCAL(uint32_t, u32Flags);
5713 IEM_MC_LOCAL(uint32_t, EFlags);
5714 IEM_MC_FETCH_EFLAGS(EFlags);
5715 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
5716 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5717 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
5718 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
5719 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
5720 IEM_MC_COMMIT_EFLAGS(EFlags);
5721 IEM_MC_ADVANCE_RIP_AND_FINISH();
5722 IEM_MC_END();
5723}
5724
5725
5726/**
5727 * @opcode 0x9f
5728 */
5729FNIEMOP_DEF(iemOp_lahf)
5730{
5731 IEMOP_MNEMONIC(lahf, "lahf");
5732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5733 if ( IEM_IS_64BIT_CODE(pVCpu)
5734 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5735 IEMOP_RAISE_INVALID_OPCODE_RET();
5736 IEM_MC_BEGIN(0, 1);
5737 IEM_MC_LOCAL(uint8_t, u8Flags);
5738 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
5739 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
5740 IEM_MC_ADVANCE_RIP_AND_FINISH();
5741 IEM_MC_END();
5742}
5743
5744
5745/**
5746 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
5747 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
5748 * prefixes. Will return on failures.
5749 * @param a_GCPtrMemOff The variable to store the offset in.
5750 */
5751#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
5752 do \
5753 { \
5754 switch (pVCpu->iem.s.enmEffAddrMode) \
5755 { \
5756 case IEMMODE_16BIT: \
5757 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
5758 break; \
5759 case IEMMODE_32BIT: \
5760 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
5761 break; \
5762 case IEMMODE_64BIT: \
5763 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
5764 break; \
5765 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5766 } \
5767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5768 } while (0)
5769
5770/**
5771 * @opcode 0xa0
5772 */
5773FNIEMOP_DEF(iemOp_mov_AL_Ob)
5774{
5775 /*
5776 * Get the offset and fend off lock prefixes.
5777 */
5778 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
5779 RTGCPTR GCPtrMemOff;
5780 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5781
5782 /*
5783 * Fetch AL.
5784 */
5785 IEM_MC_BEGIN(0,1);
5786 IEM_MC_LOCAL(uint8_t, u8Tmp);
5787 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5788 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
5789 IEM_MC_ADVANCE_RIP_AND_FINISH();
5790 IEM_MC_END();
5791}
5792
5793
5794/**
5795 * @opcode 0xa1
5796 */
5797FNIEMOP_DEF(iemOp_mov_rAX_Ov)
5798{
5799 /*
5800 * Get the offset and fend off lock prefixes.
5801 */
5802 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
5803 RTGCPTR GCPtrMemOff;
5804 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5805
5806 /*
5807 * Fetch rAX.
5808 */
5809 switch (pVCpu->iem.s.enmEffOpSize)
5810 {
5811 case IEMMODE_16BIT:
5812 IEM_MC_BEGIN(0,1);
5813 IEM_MC_LOCAL(uint16_t, u16Tmp);
5814 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5815 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
5816 IEM_MC_ADVANCE_RIP_AND_FINISH();
5817 IEM_MC_END();
5818 break;
5819
5820 case IEMMODE_32BIT:
5821 IEM_MC_BEGIN(0,1);
5822 IEM_MC_LOCAL(uint32_t, u32Tmp);
5823 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5824 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
5825 IEM_MC_ADVANCE_RIP_AND_FINISH();
5826 IEM_MC_END();
5827 break;
5828
5829 case IEMMODE_64BIT:
5830 IEM_MC_BEGIN(0,1);
5831 IEM_MC_LOCAL(uint64_t, u64Tmp);
5832 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5833 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
5834 IEM_MC_ADVANCE_RIP_AND_FINISH();
5835 IEM_MC_END();
5836 break;
5837
5838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5839 }
5840}
5841
5842
5843/**
5844 * @opcode 0xa2
5845 */
5846FNIEMOP_DEF(iemOp_mov_Ob_AL)
5847{
5848 /*
5849 * Get the offset and fend off lock prefixes.
5850 */
5851 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
5852 RTGCPTR GCPtrMemOff;
5853 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5854
5855 /*
5856 * Store AL.
5857 */
5858 IEM_MC_BEGIN(0,1);
5859 IEM_MC_LOCAL(uint8_t, u8Tmp);
5860 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
5861 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
5862 IEM_MC_ADVANCE_RIP_AND_FINISH();
5863 IEM_MC_END();
5864}
5865
5866
5867/**
5868 * @opcode 0xa3
5869 */
5870FNIEMOP_DEF(iemOp_mov_Ov_rAX)
5871{
5872 /*
5873 * Get the offset and fend off lock prefixes.
5874 */
5875 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
5876 RTGCPTR GCPtrMemOff;
5877 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5878
5879 /*
5880 * Store rAX.
5881 */
5882 switch (pVCpu->iem.s.enmEffOpSize)
5883 {
5884 case IEMMODE_16BIT:
5885 IEM_MC_BEGIN(0,1);
5886 IEM_MC_LOCAL(uint16_t, u16Tmp);
5887 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
5888 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
5889 IEM_MC_ADVANCE_RIP_AND_FINISH();
5890 IEM_MC_END();
5891 break;
5892
5893 case IEMMODE_32BIT:
5894 IEM_MC_BEGIN(0,1);
5895 IEM_MC_LOCAL(uint32_t, u32Tmp);
5896 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
5897 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
5898 IEM_MC_ADVANCE_RIP_AND_FINISH();
5899 IEM_MC_END();
5900 break;
5901
5902 case IEMMODE_64BIT:
5903 IEM_MC_BEGIN(0,1);
5904 IEM_MC_LOCAL(uint64_t, u64Tmp);
5905 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
5906 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
5907 IEM_MC_ADVANCE_RIP_AND_FINISH();
5908 IEM_MC_END();
5909 break;
5910
5911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5912 }
5913}
5914
5915/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
5916#define IEM_MOVS_CASE(ValBits, AddrBits) \
5917 IEM_MC_BEGIN(0, 2); \
5918 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5919 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5920 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5921 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5922 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5923 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5924 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5925 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5926 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5927 } IEM_MC_ELSE() { \
5928 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5929 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5930 } IEM_MC_ENDIF(); \
5931 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5932 IEM_MC_END() \
5933
5934/**
5935 * @opcode 0xa4
5936 */
5937FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
5938{
5939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5940
5941 /*
5942 * Use the C implementation if a repeat prefix is encountered.
5943 */
5944 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5945 {
5946 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
5947 switch (pVCpu->iem.s.enmEffAddrMode)
5948 {
5949 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
5950 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
5951 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
5952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5953 }
5954 }
5955 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
5956
5957 /*
5958 * Sharing case implementation with movs[wdq] below.
5959 */
5960 switch (pVCpu->iem.s.enmEffAddrMode)
5961 {
5962 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5963 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5964 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5966 }
5967}
5968
5969
5970/**
5971 * @opcode 0xa5
5972 */
5973FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5974{
5975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5976
5977 /*
5978 * Use the C implementation if a repeat prefix is encountered.
5979 */
5980 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5981 {
5982 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5983 switch (pVCpu->iem.s.enmEffOpSize)
5984 {
5985 case IEMMODE_16BIT:
5986 switch (pVCpu->iem.s.enmEffAddrMode)
5987 {
5988 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5989 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5990 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5992 }
5993 break;
5994 case IEMMODE_32BIT:
5995 switch (pVCpu->iem.s.enmEffAddrMode)
5996 {
5997 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5998 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5999 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6001 }
6002 case IEMMODE_64BIT:
6003 switch (pVCpu->iem.s.enmEffAddrMode)
6004 {
6005 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6006 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6007 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6009 }
6010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6011 }
6012 }
6013 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6014
6015 /*
6016 * Annoying double switch here.
6017 * Using ugly macro for implementing the cases, sharing it with movsb.
6018 */
6019 switch (pVCpu->iem.s.enmEffOpSize)
6020 {
6021 case IEMMODE_16BIT:
6022 switch (pVCpu->iem.s.enmEffAddrMode)
6023 {
6024 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
6025 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
6026 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
6027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6028 }
6029 break;
6030
6031 case IEMMODE_32BIT:
6032 switch (pVCpu->iem.s.enmEffAddrMode)
6033 {
6034 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
6035 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
6036 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
6037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6038 }
6039 break;
6040
6041 case IEMMODE_64BIT:
6042 switch (pVCpu->iem.s.enmEffAddrMode)
6043 {
6044 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6045 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6046 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6048 }
6049 break;
6050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6051 }
6052}
6053
6054#undef IEM_MOVS_CASE
6055
6056/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6057#define IEM_CMPS_CASE(ValBits, AddrBits) \
6058 IEM_MC_BEGIN(3, 3); \
6059 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6060 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6061 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6062 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6063 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6064 \
6065 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6066 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6067 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6068 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6069 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6070 IEM_MC_REF_EFLAGS(pEFlags); \
6071 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6072 \
6073 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6074 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6075 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6076 } IEM_MC_ELSE() { \
6077 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6078 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6079 } IEM_MC_ENDIF(); \
6080 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6081 IEM_MC_END() \
6082
6083/**
6084 * @opcode 0xa6
6085 */
6086FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6087{
6088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6089
6090 /*
6091 * Use the C implementation if a repeat prefix is encountered.
6092 */
6093 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6094 {
6095 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6096 switch (pVCpu->iem.s.enmEffAddrMode)
6097 {
6098 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6099 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6100 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6102 }
6103 }
6104 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6105 {
6106 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6107 switch (pVCpu->iem.s.enmEffAddrMode)
6108 {
6109 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6110 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6111 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6113 }
6114 }
6115 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6116
6117 /*
6118 * Sharing case implementation with cmps[wdq] below.
6119 */
6120 switch (pVCpu->iem.s.enmEffAddrMode)
6121 {
6122 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6123 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6124 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6126 }
6127}
6128
6129
6130/**
6131 * @opcode 0xa7
6132 */
6133FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6134{
6135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6136
6137 /*
6138 * Use the C implementation if a repeat prefix is encountered.
6139 */
6140 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6141 {
6142 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6143 switch (pVCpu->iem.s.enmEffOpSize)
6144 {
6145 case IEMMODE_16BIT:
6146 switch (pVCpu->iem.s.enmEffAddrMode)
6147 {
6148 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6149 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6150 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6152 }
6153 break;
6154 case IEMMODE_32BIT:
6155 switch (pVCpu->iem.s.enmEffAddrMode)
6156 {
6157 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6158 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6159 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6161 }
6162 case IEMMODE_64BIT:
6163 switch (pVCpu->iem.s.enmEffAddrMode)
6164 {
6165 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6166 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6167 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6169 }
6170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6171 }
6172 }
6173
6174 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6175 {
6176 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6177 switch (pVCpu->iem.s.enmEffOpSize)
6178 {
6179 case IEMMODE_16BIT:
6180 switch (pVCpu->iem.s.enmEffAddrMode)
6181 {
6182 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6183 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6184 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6186 }
6187 break;
6188 case IEMMODE_32BIT:
6189 switch (pVCpu->iem.s.enmEffAddrMode)
6190 {
6191 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6192 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6193 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6195 }
6196 case IEMMODE_64BIT:
6197 switch (pVCpu->iem.s.enmEffAddrMode)
6198 {
6199 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6200 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6201 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6203 }
6204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6205 }
6206 }
6207
6208 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6209
6210 /*
6211 * Annoying double switch here.
6212 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6213 */
6214 switch (pVCpu->iem.s.enmEffOpSize)
6215 {
6216 case IEMMODE_16BIT:
6217 switch (pVCpu->iem.s.enmEffAddrMode)
6218 {
6219 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6220 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6221 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6223 }
6224 break;
6225
6226 case IEMMODE_32BIT:
6227 switch (pVCpu->iem.s.enmEffAddrMode)
6228 {
6229 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6230 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6231 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6233 }
6234 break;
6235
6236 case IEMMODE_64BIT:
6237 switch (pVCpu->iem.s.enmEffAddrMode)
6238 {
6239 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6240 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6241 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6243 }
6244 break;
6245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6246 }
6247}
6248
6249#undef IEM_CMPS_CASE
6250
6251/**
6252 * @opcode 0xa8
6253 */
6254FNIEMOP_DEF(iemOp_test_AL_Ib)
6255{
6256 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6257 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6258 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6259}
6260
6261
6262/**
6263 * @opcode 0xa9
6264 */
6265FNIEMOP_DEF(iemOp_test_eAX_Iz)
6266{
6267 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6268 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6269 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6270}
6271
6272
6273/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6274#define IEM_STOS_CASE(ValBits, AddrBits) \
6275 IEM_MC_BEGIN(0, 2); \
6276 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6277 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6278 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6279 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6280 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6282 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6283 } IEM_MC_ELSE() { \
6284 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6285 } IEM_MC_ENDIF(); \
6286 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6287 IEM_MC_END() \
6288
6289/**
6290 * @opcode 0xaa
6291 */
6292FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6293{
6294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6295
6296 /*
6297 * Use the C implementation if a repeat prefix is encountered.
6298 */
6299 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6300 {
6301 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6302 switch (pVCpu->iem.s.enmEffAddrMode)
6303 {
6304 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6305 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6306 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6308 }
6309 }
6310 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6311
6312 /*
6313 * Sharing case implementation with stos[wdq] below.
6314 */
6315 switch (pVCpu->iem.s.enmEffAddrMode)
6316 {
6317 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6318 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6319 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6321 }
6322}
6323
6324
6325/**
6326 * @opcode 0xab
6327 */
6328FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6329{
6330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6331
6332 /*
6333 * Use the C implementation if a repeat prefix is encountered.
6334 */
6335 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6336 {
6337 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6338 switch (pVCpu->iem.s.enmEffOpSize)
6339 {
6340 case IEMMODE_16BIT:
6341 switch (pVCpu->iem.s.enmEffAddrMode)
6342 {
6343 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
6344 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
6345 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
6346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6347 }
6348 break;
6349 case IEMMODE_32BIT:
6350 switch (pVCpu->iem.s.enmEffAddrMode)
6351 {
6352 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
6353 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
6354 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
6355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6356 }
6357 case IEMMODE_64BIT:
6358 switch (pVCpu->iem.s.enmEffAddrMode)
6359 {
6360 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6361 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
6362 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
6363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6364 }
6365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6366 }
6367 }
6368 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
6369
6370 /*
6371 * Annoying double switch here.
6372 * Using ugly macro for implementing the cases, sharing it with stosb.
6373 */
6374 switch (pVCpu->iem.s.enmEffOpSize)
6375 {
6376 case IEMMODE_16BIT:
6377 switch (pVCpu->iem.s.enmEffAddrMode)
6378 {
6379 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
6380 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
6381 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
6382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6383 }
6384 break;
6385
6386 case IEMMODE_32BIT:
6387 switch (pVCpu->iem.s.enmEffAddrMode)
6388 {
6389 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
6390 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
6391 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
6392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6393 }
6394 break;
6395
6396 case IEMMODE_64BIT:
6397 switch (pVCpu->iem.s.enmEffAddrMode)
6398 {
6399 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6400 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
6401 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
6402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6403 }
6404 break;
6405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6406 }
6407}
6408
6409#undef IEM_STOS_CASE
6410
6411/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
6412#define IEM_LODS_CASE(ValBits, AddrBits) \
6413 IEM_MC_BEGIN(0, 2); \
6414 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6415 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6416 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6417 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6418 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
6419 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6420 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6421 } IEM_MC_ELSE() { \
6422 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6423 } IEM_MC_ENDIF(); \
6424 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6425 IEM_MC_END() \
6426
6427/**
6428 * @opcode 0xac
6429 */
6430FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
6431{
6432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6433
6434 /*
6435 * Use the C implementation if a repeat prefix is encountered.
6436 */
6437 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6438 {
6439 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
6440 switch (pVCpu->iem.s.enmEffAddrMode)
6441 {
6442 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
6443 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
6444 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
6445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6446 }
6447 }
6448 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
6449
6450 /*
6451 * Sharing case implementation with stos[wdq] below.
6452 */
6453 switch (pVCpu->iem.s.enmEffAddrMode)
6454 {
6455 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
6456 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
6457 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
6458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6459 }
6460}
6461
6462
6463/**
6464 * @opcode 0xad
6465 */
6466FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
6467{
6468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6469
6470 /*
6471 * Use the C implementation if a repeat prefix is encountered.
6472 */
6473 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6474 {
6475 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
6476 switch (pVCpu->iem.s.enmEffOpSize)
6477 {
6478 case IEMMODE_16BIT:
6479 switch (pVCpu->iem.s.enmEffAddrMode)
6480 {
6481 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
6482 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
6483 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
6484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6485 }
6486 break;
6487 case IEMMODE_32BIT:
6488 switch (pVCpu->iem.s.enmEffAddrMode)
6489 {
6490 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
6491 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
6492 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
6493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6494 }
6495 case IEMMODE_64BIT:
6496 switch (pVCpu->iem.s.enmEffAddrMode)
6497 {
6498 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
6499 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
6500 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
6501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6502 }
6503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6504 }
6505 }
6506 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
6507
6508 /*
6509 * Annoying double switch here.
6510 * Using ugly macro for implementing the cases, sharing it with lodsb.
6511 */
6512 switch (pVCpu->iem.s.enmEffOpSize)
6513 {
6514 case IEMMODE_16BIT:
6515 switch (pVCpu->iem.s.enmEffAddrMode)
6516 {
6517 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
6518 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
6519 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
6520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6521 }
6522 break;
6523
6524 case IEMMODE_32BIT:
6525 switch (pVCpu->iem.s.enmEffAddrMode)
6526 {
6527 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
6528 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
6529 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
6530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6531 }
6532 break;
6533
6534 case IEMMODE_64BIT:
6535 switch (pVCpu->iem.s.enmEffAddrMode)
6536 {
6537 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6538 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
6539 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
6540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6541 }
6542 break;
6543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6544 }
6545}
6546
6547#undef IEM_LODS_CASE
6548
6549/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
6550#define IEM_SCAS_CASE(ValBits, AddrBits) \
6551 IEM_MC_BEGIN(3, 2); \
6552 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
6553 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
6554 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6555 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6556 \
6557 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6558 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
6559 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
6560 IEM_MC_REF_EFLAGS(pEFlags); \
6561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
6562 \
6563 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6564 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6565 } IEM_MC_ELSE() { \
6566 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6567 } IEM_MC_ENDIF(); \
6568 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6569 IEM_MC_END();
6570
6571/**
6572 * @opcode 0xae
6573 */
6574FNIEMOP_DEF(iemOp_scasb_AL_Xb)
6575{
6576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6577
6578 /*
6579 * Use the C implementation if a repeat prefix is encountered.
6580 */
6581 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6582 {
6583 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
6584 switch (pVCpu->iem.s.enmEffAddrMode)
6585 {
6586 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
6587 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
6588 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
6589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6590 }
6591 }
6592 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6593 {
6594 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
6595 switch (pVCpu->iem.s.enmEffAddrMode)
6596 {
6597 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
6598 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
6599 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
6600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6601 }
6602 }
6603 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
6604
6605 /*
6606 * Sharing case implementation with stos[wdq] below.
6607 */
6608 switch (pVCpu->iem.s.enmEffAddrMode)
6609 {
6610 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
6611 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
6612 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
6613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6614 }
6615}
6616
6617
6618/**
6619 * @opcode 0xaf
6620 */
6621FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
6622{
6623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6624
6625 /*
6626 * Use the C implementation if a repeat prefix is encountered.
6627 */
6628 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6629 {
6630 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
6631 switch (pVCpu->iem.s.enmEffOpSize)
6632 {
6633 case IEMMODE_16BIT:
6634 switch (pVCpu->iem.s.enmEffAddrMode)
6635 {
6636 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
6637 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
6638 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
6639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6640 }
6641 break;
6642 case IEMMODE_32BIT:
6643 switch (pVCpu->iem.s.enmEffAddrMode)
6644 {
6645 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
6646 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
6647 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
6648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6649 }
6650 case IEMMODE_64BIT:
6651 switch (pVCpu->iem.s.enmEffAddrMode)
6652 {
6653 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
6654 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
6655 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
6656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6657 }
6658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6659 }
6660 }
6661 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6662 {
6663 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
6664 switch (pVCpu->iem.s.enmEffOpSize)
6665 {
6666 case IEMMODE_16BIT:
6667 switch (pVCpu->iem.s.enmEffAddrMode)
6668 {
6669 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
6670 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
6671 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
6672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6673 }
6674 break;
6675 case IEMMODE_32BIT:
6676 switch (pVCpu->iem.s.enmEffAddrMode)
6677 {
6678 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
6679 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
6680 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
6681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6682 }
6683 case IEMMODE_64BIT:
6684 switch (pVCpu->iem.s.enmEffAddrMode)
6685 {
6686 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
6687 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
6688 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
6689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6690 }
6691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6692 }
6693 }
6694 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
6695
6696 /*
6697 * Annoying double switch here.
6698 * Using ugly macro for implementing the cases, sharing it with scasb.
6699 */
6700 switch (pVCpu->iem.s.enmEffOpSize)
6701 {
6702 case IEMMODE_16BIT:
6703 switch (pVCpu->iem.s.enmEffAddrMode)
6704 {
6705 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
6706 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
6707 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
6708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6709 }
6710 break;
6711
6712 case IEMMODE_32BIT:
6713 switch (pVCpu->iem.s.enmEffAddrMode)
6714 {
6715 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
6716 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
6717 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
6718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6719 }
6720 break;
6721
6722 case IEMMODE_64BIT:
6723 switch (pVCpu->iem.s.enmEffAddrMode)
6724 {
6725 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6726 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
6727 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
6728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6729 }
6730 break;
6731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6732 }
6733}
6734
6735#undef IEM_SCAS_CASE
6736
6737/**
6738 * Common 'mov r8, imm8' helper.
6739 */
6740FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
6741{
6742 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6744
6745 IEM_MC_BEGIN(0, 1);
6746 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
6747 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
6748 IEM_MC_ADVANCE_RIP_AND_FINISH();
6749 IEM_MC_END();
6750}
6751
6752
6753/**
6754 * @opcode 0xb0
6755 */
6756FNIEMOP_DEF(iemOp_mov_AL_Ib)
6757{
6758 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
6759 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6760}
6761
6762
6763/**
6764 * @opcode 0xb1
6765 */
6766FNIEMOP_DEF(iemOp_CL_Ib)
6767{
6768 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
6769 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6770}
6771
6772
6773/**
6774 * @opcode 0xb2
6775 */
6776FNIEMOP_DEF(iemOp_DL_Ib)
6777{
6778 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
6779 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6780}
6781
6782
6783/**
6784 * @opcode 0xb3
6785 */
6786FNIEMOP_DEF(iemOp_BL_Ib)
6787{
6788 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
6789 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6790}
6791
6792
6793/**
6794 * @opcode 0xb4
6795 */
6796FNIEMOP_DEF(iemOp_mov_AH_Ib)
6797{
6798 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
6799 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6800}
6801
6802
6803/**
6804 * @opcode 0xb5
6805 */
6806FNIEMOP_DEF(iemOp_CH_Ib)
6807{
6808 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
6809 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6810}
6811
6812
6813/**
6814 * @opcode 0xb6
6815 */
6816FNIEMOP_DEF(iemOp_DH_Ib)
6817{
6818 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
6819 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6820}
6821
6822
6823/**
6824 * @opcode 0xb7
6825 */
6826FNIEMOP_DEF(iemOp_BH_Ib)
6827{
6828 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
6829 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6830}
6831
6832
6833/**
6834 * Common 'mov regX,immX' helper.
6835 */
6836FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
6837{
6838 switch (pVCpu->iem.s.enmEffOpSize)
6839 {
6840 case IEMMODE_16BIT:
6841 {
6842 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6844
6845 IEM_MC_BEGIN(0, 1);
6846 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
6847 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
6848 IEM_MC_ADVANCE_RIP_AND_FINISH();
6849 IEM_MC_END();
6850 break;
6851 }
6852
6853 case IEMMODE_32BIT:
6854 {
6855 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6857
6858 IEM_MC_BEGIN(0, 1);
6859 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
6860 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
6861 IEM_MC_ADVANCE_RIP_AND_FINISH();
6862 IEM_MC_END();
6863 break;
6864 }
6865 case IEMMODE_64BIT:
6866 {
6867 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
6868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6869
6870 IEM_MC_BEGIN(0, 1);
6871 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
6872 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
6873 IEM_MC_ADVANCE_RIP_AND_FINISH();
6874 IEM_MC_END();
6875 break;
6876 }
6877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6878 }
6879}
6880
6881
6882/**
6883 * @opcode 0xb8
6884 */
6885FNIEMOP_DEF(iemOp_eAX_Iv)
6886{
6887 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
6888 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6889}
6890
6891
6892/**
6893 * @opcode 0xb9
6894 */
6895FNIEMOP_DEF(iemOp_eCX_Iv)
6896{
6897 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
6898 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6899}
6900
6901
6902/**
6903 * @opcode 0xba
6904 */
6905FNIEMOP_DEF(iemOp_eDX_Iv)
6906{
6907 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
6908 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6909}
6910
6911
6912/**
6913 * @opcode 0xbb
6914 */
6915FNIEMOP_DEF(iemOp_eBX_Iv)
6916{
6917 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
6918 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6919}
6920
6921
6922/**
6923 * @opcode 0xbc
6924 */
6925FNIEMOP_DEF(iemOp_eSP_Iv)
6926{
6927 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
6928 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6929}
6930
6931
6932/**
6933 * @opcode 0xbd
6934 */
6935FNIEMOP_DEF(iemOp_eBP_Iv)
6936{
6937 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
6938 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6939}
6940
6941
6942/**
6943 * @opcode 0xbe
6944 */
6945FNIEMOP_DEF(iemOp_eSI_Iv)
6946{
6947 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6948 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6949}
6950
6951
6952/**
6953 * @opcode 0xbf
6954 */
6955FNIEMOP_DEF(iemOp_eDI_Iv)
6956{
6957 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6958 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6959}
6960
6961
6962/**
6963 * @opcode 0xc0
6964 */
6965FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6966{
6967 IEMOP_HLP_MIN_186();
6968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6969 PCIEMOPSHIFTSIZES pImpl;
6970 switch (IEM_GET_MODRM_REG_8(bRm))
6971 {
6972 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6973 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6974 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6975 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6976 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6977 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6978 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6979 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
6980 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6981 }
6982 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6983
6984 if (IEM_IS_MODRM_REG_MODE(bRm))
6985 {
6986 /* register */
6987 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6989 IEM_MC_BEGIN(3, 0);
6990 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6991 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6992 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6993 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6994 IEM_MC_REF_EFLAGS(pEFlags);
6995 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6996 IEM_MC_ADVANCE_RIP_AND_FINISH();
6997 IEM_MC_END();
6998 }
6999 else
7000 {
7001 /* memory */
7002 IEM_MC_BEGIN(3, 2);
7003 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7004 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7005 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7007
7008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7009 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7010 IEM_MC_ASSIGN(cShiftArg, cShift);
7011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7012 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7013 IEM_MC_FETCH_EFLAGS(EFlags);
7014 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7015
7016 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7017 IEM_MC_COMMIT_EFLAGS(EFlags);
7018 IEM_MC_ADVANCE_RIP_AND_FINISH();
7019 IEM_MC_END();
7020 }
7021}
7022
7023
7024/**
7025 * @opcode 0xc1
7026 */
7027FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7028{
7029 IEMOP_HLP_MIN_186();
7030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7031 PCIEMOPSHIFTSIZES pImpl;
7032 switch (IEM_GET_MODRM_REG_8(bRm))
7033 {
7034 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7035 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7036 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7037 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7038 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7039 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7040 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7041 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7042 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7043 }
7044 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7045
7046 if (IEM_IS_MODRM_REG_MODE(bRm))
7047 {
7048 /* register */
7049 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7051 switch (pVCpu->iem.s.enmEffOpSize)
7052 {
7053 case IEMMODE_16BIT:
7054 IEM_MC_BEGIN(3, 0);
7055 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7056 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7057 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7058 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7059 IEM_MC_REF_EFLAGS(pEFlags);
7060 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7061 IEM_MC_ADVANCE_RIP_AND_FINISH();
7062 IEM_MC_END();
7063 break;
7064
7065 case IEMMODE_32BIT:
7066 IEM_MC_BEGIN(3, 0);
7067 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7068 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7069 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7070 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7071 IEM_MC_REF_EFLAGS(pEFlags);
7072 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7074 IEM_MC_ADVANCE_RIP_AND_FINISH();
7075 IEM_MC_END();
7076 break;
7077
7078 case IEMMODE_64BIT:
7079 IEM_MC_BEGIN(3, 0);
7080 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7081 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7082 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7083 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7084 IEM_MC_REF_EFLAGS(pEFlags);
7085 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7086 IEM_MC_ADVANCE_RIP_AND_FINISH();
7087 IEM_MC_END();
7088 break;
7089
7090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7091 }
7092 }
7093 else
7094 {
7095 /* memory */
7096 switch (pVCpu->iem.s.enmEffOpSize)
7097 {
7098 case IEMMODE_16BIT:
7099 IEM_MC_BEGIN(3, 2);
7100 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7101 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7102 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7104
7105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7106 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7107 IEM_MC_ASSIGN(cShiftArg, cShift);
7108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7109 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7110 IEM_MC_FETCH_EFLAGS(EFlags);
7111 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7112
7113 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7114 IEM_MC_COMMIT_EFLAGS(EFlags);
7115 IEM_MC_ADVANCE_RIP_AND_FINISH();
7116 IEM_MC_END();
7117 break;
7118
7119 case IEMMODE_32BIT:
7120 IEM_MC_BEGIN(3, 2);
7121 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7122 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7123 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7125
7126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7127 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7128 IEM_MC_ASSIGN(cShiftArg, cShift);
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7131 IEM_MC_FETCH_EFLAGS(EFlags);
7132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7133
7134 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7135 IEM_MC_COMMIT_EFLAGS(EFlags);
7136 IEM_MC_ADVANCE_RIP_AND_FINISH();
7137 IEM_MC_END();
7138 break;
7139
7140 case IEMMODE_64BIT:
7141 IEM_MC_BEGIN(3, 2);
7142 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7143 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7144 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7146
7147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7148 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7149 IEM_MC_ASSIGN(cShiftArg, cShift);
7150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7151 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7152 IEM_MC_FETCH_EFLAGS(EFlags);
7153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7154
7155 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7156 IEM_MC_COMMIT_EFLAGS(EFlags);
7157 IEM_MC_ADVANCE_RIP_AND_FINISH();
7158 IEM_MC_END();
7159 break;
7160
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 }
7164}
7165
7166
7167/**
7168 * @opcode 0xc2
7169 */
7170FNIEMOP_DEF(iemOp_retn_Iw)
7171{
7172 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7173 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7176 switch (pVCpu->iem.s.enmEffOpSize)
7177 {
7178 case IEMMODE_16BIT:
7179 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_iw_16, u16Imm);
7180 case IEMMODE_32BIT:
7181 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_iw_32, u16Imm);
7182 case IEMMODE_64BIT:
7183 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_iw_64, u16Imm);
7184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7185 }
7186}
7187
7188
7189/**
7190 * @opcode 0xc3
7191 */
7192FNIEMOP_DEF(iemOp_retn)
7193{
7194 IEMOP_MNEMONIC(retn, "retn");
7195 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7197 switch (pVCpu->iem.s.enmEffOpSize)
7198 {
7199 case IEMMODE_16BIT:
7200 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_16);
7201 case IEMMODE_32BIT:
7202 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_32);
7203 case IEMMODE_64BIT:
7204 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_64);
7205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7206 }
7207}
7208
7209
7210/**
7211 * @opcode 0xc4
7212 */
7213FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7214{
7215 /* The LDS instruction is invalid 64-bit mode. In legacy and
7216 compatability mode it is invalid with MOD=3.
7217 The use as a VEX prefix is made possible by assigning the inverted
7218 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7219 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7221 if ( IEM_IS_64BIT_CODE(pVCpu)
7222 || IEM_IS_MODRM_REG_MODE(bRm) )
7223 {
7224 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7225 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7226 {
7227 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7228 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7229 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7230 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7231 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7232 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7233 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7234 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7235 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7236 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7237 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7238 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7239 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7240
7241 switch (bRm & 0x1f)
7242 {
7243 case 1: /* 0x0f lead opcode byte. */
7244#ifdef IEM_WITH_VEX
7245 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7246#else
7247 IEMOP_BITCH_ABOUT_STUB();
7248 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7249#endif
7250
7251 case 2: /* 0x0f 0x38 lead opcode bytes. */
7252#ifdef IEM_WITH_VEX
7253 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7254#else
7255 IEMOP_BITCH_ABOUT_STUB();
7256 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7257#endif
7258
7259 case 3: /* 0x0f 0x3a lead opcode bytes. */
7260#ifdef IEM_WITH_VEX
7261 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7262#else
7263 IEMOP_BITCH_ABOUT_STUB();
7264 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7265#endif
7266
7267 default:
7268 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7269 IEMOP_RAISE_INVALID_OPCODE_RET();
7270 }
7271 }
7272 Log(("VEX3: VEX support disabled!\n"));
7273 IEMOP_RAISE_INVALID_OPCODE_RET();
7274 }
7275
7276 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7277 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7278}
7279
7280
7281/**
7282 * @opcode 0xc5
7283 */
7284FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7285{
7286 /* The LES instruction is invalid 64-bit mode. In legacy and
7287 compatability mode it is invalid with MOD=3.
7288 The use as a VEX prefix is made possible by assigning the inverted
7289 REX.R to the top MOD bit, and the top bit in the inverted register
7290 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7291 to accessing registers 0..7 in this VEX form. */
7292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7293 if ( IEM_IS_64BIT_CODE(pVCpu)
7294 || IEM_IS_MODRM_REG_MODE(bRm))
7295 {
7296 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7297 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7298 {
7299 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7300 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7301 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7302 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7303 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7304 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7305 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7306 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7307
7308#ifdef IEM_WITH_VEX
7309 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7310#else
7311 IEMOP_BITCH_ABOUT_STUB();
7312 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7313#endif
7314 }
7315
7316 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7317 Log(("VEX2: VEX support disabled!\n"));
7318 IEMOP_RAISE_INVALID_OPCODE_RET();
7319 }
7320
7321 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7322 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7323}
7324
7325
7326/**
7327 * @opcode 0xc6
7328 */
7329FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7330{
7331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7332 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7333 IEMOP_RAISE_INVALID_OPCODE_RET();
7334 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7335
7336 if (IEM_IS_MODRM_REG_MODE(bRm))
7337 {
7338 /* register access */
7339 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7341 IEM_MC_BEGIN(0, 0);
7342 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7343 IEM_MC_ADVANCE_RIP_AND_FINISH();
7344 IEM_MC_END();
7345 }
7346 else
7347 {
7348 /* memory access. */
7349 IEM_MC_BEGIN(0, 1);
7350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7352 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7354 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7355 IEM_MC_ADVANCE_RIP_AND_FINISH();
7356 IEM_MC_END();
7357 }
7358}
7359
7360
7361/**
7362 * @opcode 0xc7
7363 */
7364FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7365{
7366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7367 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7368 IEMOP_RAISE_INVALID_OPCODE_RET();
7369 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7370
7371 if (IEM_IS_MODRM_REG_MODE(bRm))
7372 {
7373 /* register access */
7374 switch (pVCpu->iem.s.enmEffOpSize)
7375 {
7376 case IEMMODE_16BIT:
7377 IEM_MC_BEGIN(0, 0);
7378 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7380 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
7381 IEM_MC_ADVANCE_RIP_AND_FINISH();
7382 IEM_MC_END();
7383 break;
7384
7385 case IEMMODE_32BIT:
7386 IEM_MC_BEGIN(0, 0);
7387 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7389 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
7390 IEM_MC_ADVANCE_RIP_AND_FINISH();
7391 IEM_MC_END();
7392 break;
7393
7394 case IEMMODE_64BIT:
7395 IEM_MC_BEGIN(0, 0);
7396 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7398 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
7399 IEM_MC_ADVANCE_RIP_AND_FINISH();
7400 IEM_MC_END();
7401 break;
7402
7403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7404 }
7405 }
7406 else
7407 {
7408 /* memory access. */
7409 switch (pVCpu->iem.s.enmEffOpSize)
7410 {
7411 case IEMMODE_16BIT:
7412 IEM_MC_BEGIN(0, 1);
7413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7415 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7417 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
7418 IEM_MC_ADVANCE_RIP_AND_FINISH();
7419 IEM_MC_END();
7420 break;
7421
7422 case IEMMODE_32BIT:
7423 IEM_MC_BEGIN(0, 1);
7424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7426 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7428 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
7429 IEM_MC_ADVANCE_RIP_AND_FINISH();
7430 IEM_MC_END();
7431 break;
7432
7433 case IEMMODE_64BIT:
7434 IEM_MC_BEGIN(0, 1);
7435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7437 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7439 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
7440 IEM_MC_ADVANCE_RIP_AND_FINISH();
7441 IEM_MC_END();
7442 break;
7443
7444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7445 }
7446 }
7447}
7448
7449
7450
7451
7452/**
7453 * @opcode 0xc8
7454 */
7455FNIEMOP_DEF(iemOp_enter_Iw_Ib)
7456{
7457 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
7458 IEMOP_HLP_MIN_186();
7459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7460 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
7461 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
7462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7463 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
7464}
7465
7466
7467/**
7468 * @opcode 0xc9
7469 */
7470FNIEMOP_DEF(iemOp_leave)
7471{
7472 IEMOP_MNEMONIC(leave, "leave");
7473 IEMOP_HLP_MIN_186();
7474 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7476 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
7477}
7478
7479
7480/**
7481 * @opcode 0xca
7482 */
7483FNIEMOP_DEF(iemOp_retf_Iw)
7484{
7485 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
7486 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7488 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH, iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
7489}
7490
7491
7492/**
7493 * @opcode 0xcb
7494 */
7495FNIEMOP_DEF(iemOp_retf)
7496{
7497 IEMOP_MNEMONIC(retf, "retf");
7498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7499 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH, iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
7500}
7501
7502
7503/**
7504 * @opcode 0xcc
7505 */
7506FNIEMOP_DEF(iemOp_int3)
7507{
7508 IEMOP_MNEMONIC(int3, "int3");
7509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7510 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7511 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
7512}
7513
7514
7515/**
7516 * @opcode 0xcd
7517 */
7518FNIEMOP_DEF(iemOp_int_Ib)
7519{
7520 IEMOP_MNEMONIC(int_Ib, "int Ib");
7521 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
7522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7523 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7524 iemCImpl_int, u8Int, IEMINT_INTN);
7525}
7526
7527
7528/**
7529 * @opcode 0xce
7530 */
7531FNIEMOP_DEF(iemOp_into)
7532{
7533 IEMOP_MNEMONIC(into, "into");
7534 IEMOP_HLP_NO_64BIT();
7535 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7536 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
7537}
7538
7539
7540/**
7541 * @opcode 0xcf
7542 */
7543FNIEMOP_DEF(iemOp_iret)
7544{
7545 IEMOP_MNEMONIC(iret, "iret");
7546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7547 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
7548 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
7549}
7550
7551
7552/**
7553 * @opcode 0xd0
7554 */
7555FNIEMOP_DEF(iemOp_Grp2_Eb_1)
7556{
7557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7558 PCIEMOPSHIFTSIZES pImpl;
7559 switch (IEM_GET_MODRM_REG_8(bRm))
7560 {
7561 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
7562 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
7563 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
7564 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
7565 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
7566 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
7567 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
7568 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7569 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7570 }
7571 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7572
7573 if (IEM_IS_MODRM_REG_MODE(bRm))
7574 {
7575 /* register */
7576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7577 IEM_MC_BEGIN(3, 0);
7578 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7579 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7580 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7581 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7582 IEM_MC_REF_EFLAGS(pEFlags);
7583 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7584 IEM_MC_ADVANCE_RIP_AND_FINISH();
7585 IEM_MC_END();
7586 }
7587 else
7588 {
7589 /* memory */
7590 IEM_MC_BEGIN(3, 2);
7591 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7592 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7593 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7595
7596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7598 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7599 IEM_MC_FETCH_EFLAGS(EFlags);
7600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7601
7602 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7603 IEM_MC_COMMIT_EFLAGS(EFlags);
7604 IEM_MC_ADVANCE_RIP_AND_FINISH();
7605 IEM_MC_END();
7606 }
7607}
7608
7609
7610
7611/**
7612 * @opcode 0xd1
7613 */
7614FNIEMOP_DEF(iemOp_Grp2_Ev_1)
7615{
7616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7617 PCIEMOPSHIFTSIZES pImpl;
7618 switch (IEM_GET_MODRM_REG_8(bRm))
7619 {
7620 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
7621 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
7622 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
7623 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
7624 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
7625 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
7626 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
7627 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7628 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7629 }
7630 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7631
7632 if (IEM_IS_MODRM_REG_MODE(bRm))
7633 {
7634 /* register */
7635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7636 switch (pVCpu->iem.s.enmEffOpSize)
7637 {
7638 case IEMMODE_16BIT:
7639 IEM_MC_BEGIN(3, 0);
7640 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7641 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7642 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7643 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7644 IEM_MC_REF_EFLAGS(pEFlags);
7645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7646 IEM_MC_ADVANCE_RIP_AND_FINISH();
7647 IEM_MC_END();
7648 break;
7649
7650 case IEMMODE_32BIT:
7651 IEM_MC_BEGIN(3, 0);
7652 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7653 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7654 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7655 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7656 IEM_MC_REF_EFLAGS(pEFlags);
7657 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7658 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7659 IEM_MC_ADVANCE_RIP_AND_FINISH();
7660 IEM_MC_END();
7661 break;
7662
7663 case IEMMODE_64BIT:
7664 IEM_MC_BEGIN(3, 0);
7665 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7666 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7667 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7668 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7669 IEM_MC_REF_EFLAGS(pEFlags);
7670 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7671 IEM_MC_ADVANCE_RIP_AND_FINISH();
7672 IEM_MC_END();
7673 break;
7674
7675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7676 }
7677 }
7678 else
7679 {
7680 /* memory */
7681 switch (pVCpu->iem.s.enmEffOpSize)
7682 {
7683 case IEMMODE_16BIT:
7684 IEM_MC_BEGIN(3, 2);
7685 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7686 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7687 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7689
7690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7692 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7693 IEM_MC_FETCH_EFLAGS(EFlags);
7694 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7695
7696 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7697 IEM_MC_COMMIT_EFLAGS(EFlags);
7698 IEM_MC_ADVANCE_RIP_AND_FINISH();
7699 IEM_MC_END();
7700 break;
7701
7702 case IEMMODE_32BIT:
7703 IEM_MC_BEGIN(3, 2);
7704 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7705 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7706 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7708
7709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7711 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7712 IEM_MC_FETCH_EFLAGS(EFlags);
7713 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7714
7715 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7716 IEM_MC_COMMIT_EFLAGS(EFlags);
7717 IEM_MC_ADVANCE_RIP_AND_FINISH();
7718 IEM_MC_END();
7719 break;
7720
7721 case IEMMODE_64BIT:
7722 IEM_MC_BEGIN(3, 2);
7723 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7724 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7725 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7727
7728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7730 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7731 IEM_MC_FETCH_EFLAGS(EFlags);
7732 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7733
7734 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7735 IEM_MC_COMMIT_EFLAGS(EFlags);
7736 IEM_MC_ADVANCE_RIP_AND_FINISH();
7737 IEM_MC_END();
7738 break;
7739
7740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7741 }
7742 }
7743}
7744
7745
7746/**
7747 * @opcode 0xd2
7748 */
7749FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
7750{
7751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7752 PCIEMOPSHIFTSIZES pImpl;
7753 switch (IEM_GET_MODRM_REG_8(bRm))
7754 {
7755 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
7756 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
7757 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
7758 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
7759 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
7760 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
7761 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
7762 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7763 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
7764 }
7765 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7766
7767 if (IEM_IS_MODRM_REG_MODE(bRm))
7768 {
7769 /* register */
7770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7771 IEM_MC_BEGIN(3, 0);
7772 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7773 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7774 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7775 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7776 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7777 IEM_MC_REF_EFLAGS(pEFlags);
7778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7779 IEM_MC_ADVANCE_RIP_AND_FINISH();
7780 IEM_MC_END();
7781 }
7782 else
7783 {
7784 /* memory */
7785 IEM_MC_BEGIN(3, 2);
7786 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7787 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7788 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7790
7791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7793 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7794 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7795 IEM_MC_FETCH_EFLAGS(EFlags);
7796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7797
7798 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7799 IEM_MC_COMMIT_EFLAGS(EFlags);
7800 IEM_MC_ADVANCE_RIP_AND_FINISH();
7801 IEM_MC_END();
7802 }
7803}
7804
7805
7806/**
7807 * @opcode 0xd3
7808 */
7809FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
7810{
7811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7812 PCIEMOPSHIFTSIZES pImpl;
7813 switch (IEM_GET_MODRM_REG_8(bRm))
7814 {
7815 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
7816 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
7817 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
7818 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
7819 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
7820 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
7821 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
7822 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7823 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7824 }
7825 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7826
7827 if (IEM_IS_MODRM_REG_MODE(bRm))
7828 {
7829 /* register */
7830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7831 switch (pVCpu->iem.s.enmEffOpSize)
7832 {
7833 case IEMMODE_16BIT:
7834 IEM_MC_BEGIN(3, 0);
7835 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7836 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7837 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7838 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7839 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7840 IEM_MC_REF_EFLAGS(pEFlags);
7841 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7842 IEM_MC_ADVANCE_RIP_AND_FINISH();
7843 IEM_MC_END();
7844 break;
7845
7846 case IEMMODE_32BIT:
7847 IEM_MC_BEGIN(3, 0);
7848 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7849 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7850 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7851 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7852 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7853 IEM_MC_REF_EFLAGS(pEFlags);
7854 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7855 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7856 IEM_MC_ADVANCE_RIP_AND_FINISH();
7857 IEM_MC_END();
7858 break;
7859
7860 case IEMMODE_64BIT:
7861 IEM_MC_BEGIN(3, 0);
7862 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7863 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7864 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7865 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7866 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7867 IEM_MC_REF_EFLAGS(pEFlags);
7868 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7869 IEM_MC_ADVANCE_RIP_AND_FINISH();
7870 IEM_MC_END();
7871 break;
7872
7873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7874 }
7875 }
7876 else
7877 {
7878 /* memory */
7879 switch (pVCpu->iem.s.enmEffOpSize)
7880 {
7881 case IEMMODE_16BIT:
7882 IEM_MC_BEGIN(3, 2);
7883 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7884 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7885 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7887
7888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7890 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7891 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7892 IEM_MC_FETCH_EFLAGS(EFlags);
7893 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7894
7895 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7896 IEM_MC_COMMIT_EFLAGS(EFlags);
7897 IEM_MC_ADVANCE_RIP_AND_FINISH();
7898 IEM_MC_END();
7899 break;
7900
7901 case IEMMODE_32BIT:
7902 IEM_MC_BEGIN(3, 2);
7903 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7904 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7905 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7907
7908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7910 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7911 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7912 IEM_MC_FETCH_EFLAGS(EFlags);
7913 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7914
7915 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7916 IEM_MC_COMMIT_EFLAGS(EFlags);
7917 IEM_MC_ADVANCE_RIP_AND_FINISH();
7918 IEM_MC_END();
7919 break;
7920
7921 case IEMMODE_64BIT:
7922 IEM_MC_BEGIN(3, 2);
7923 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7924 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7925 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7927
7928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7930 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7931 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7932 IEM_MC_FETCH_EFLAGS(EFlags);
7933 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7934
7935 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7936 IEM_MC_COMMIT_EFLAGS(EFlags);
7937 IEM_MC_ADVANCE_RIP_AND_FINISH();
7938 IEM_MC_END();
7939 break;
7940
7941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7942 }
7943 }
7944}
7945
7946/**
7947 * @opcode 0xd4
7948 */
7949FNIEMOP_DEF(iemOp_aam_Ib)
7950{
7951 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
7952 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7954 IEMOP_HLP_NO_64BIT();
7955 if (!bImm)
7956 IEMOP_RAISE_DIVIDE_ERROR_RET();
7957 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
7958}
7959
7960
7961/**
7962 * @opcode 0xd5
7963 */
7964FNIEMOP_DEF(iemOp_aad_Ib)
7965{
7966 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7967 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7969 IEMOP_HLP_NO_64BIT();
7970 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
7971}
7972
7973
7974/**
7975 * @opcode 0xd6
7976 */
7977FNIEMOP_DEF(iemOp_salc)
7978{
7979 IEMOP_MNEMONIC(salc, "salc");
7980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7981 IEMOP_HLP_NO_64BIT();
7982
7983 IEM_MC_BEGIN(0, 0);
7984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7985 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7986 } IEM_MC_ELSE() {
7987 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7988 } IEM_MC_ENDIF();
7989 IEM_MC_ADVANCE_RIP_AND_FINISH();
7990 IEM_MC_END();
7991}
7992
7993
7994/**
7995 * @opcode 0xd7
7996 */
7997FNIEMOP_DEF(iemOp_xlat)
7998{
7999 IEMOP_MNEMONIC(xlat, "xlat");
8000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8001 switch (pVCpu->iem.s.enmEffAddrMode)
8002 {
8003 case IEMMODE_16BIT:
8004 IEM_MC_BEGIN(2, 0);
8005 IEM_MC_LOCAL(uint8_t, u8Tmp);
8006 IEM_MC_LOCAL(uint16_t, u16Addr);
8007 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8008 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8009 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8010 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8011 IEM_MC_ADVANCE_RIP_AND_FINISH();
8012 IEM_MC_END();
8013 break;
8014
8015 case IEMMODE_32BIT:
8016 IEM_MC_BEGIN(2, 0);
8017 IEM_MC_LOCAL(uint8_t, u8Tmp);
8018 IEM_MC_LOCAL(uint32_t, u32Addr);
8019 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8020 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8021 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8022 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8023 IEM_MC_ADVANCE_RIP_AND_FINISH();
8024 IEM_MC_END();
8025 break;
8026
8027 case IEMMODE_64BIT:
8028 IEM_MC_BEGIN(2, 0);
8029 IEM_MC_LOCAL(uint8_t, u8Tmp);
8030 IEM_MC_LOCAL(uint64_t, u64Addr);
8031 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8032 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8033 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8034 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8035 IEM_MC_ADVANCE_RIP_AND_FINISH();
8036 IEM_MC_END();
8037 break;
8038
8039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8040 }
8041}
8042
8043
8044/**
8045 * Common worker for FPU instructions working on ST0 and STn, and storing the
8046 * result in ST0.
8047 *
8048 * @param bRm Mod R/M byte.
8049 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8050 */
8051FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8052{
8053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8054
8055 IEM_MC_BEGIN(3, 1);
8056 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8057 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8058 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8059 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8060
8061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8063 IEM_MC_PREPARE_FPU_USAGE();
8064 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8065 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8066 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8067 } IEM_MC_ELSE() {
8068 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8069 } IEM_MC_ENDIF();
8070 IEM_MC_ADVANCE_RIP_AND_FINISH();
8071
8072 IEM_MC_END();
8073}
8074
8075
8076/**
8077 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8078 * flags.
8079 *
8080 * @param bRm Mod R/M byte.
8081 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8082 */
8083FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8084{
8085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8086
8087 IEM_MC_BEGIN(3, 1);
8088 IEM_MC_LOCAL(uint16_t, u16Fsw);
8089 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8091 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8092
8093 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8094 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8095 IEM_MC_PREPARE_FPU_USAGE();
8096 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8097 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8098 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8099 } IEM_MC_ELSE() {
8100 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8101 } IEM_MC_ENDIF();
8102 IEM_MC_ADVANCE_RIP_AND_FINISH();
8103
8104 IEM_MC_END();
8105}
8106
8107
8108/**
8109 * Common worker for FPU instructions working on ST0 and STn, only affecting
8110 * flags, and popping when done.
8111 *
8112 * @param bRm Mod R/M byte.
8113 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8114 */
8115FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8116{
8117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8118
8119 IEM_MC_BEGIN(3, 1);
8120 IEM_MC_LOCAL(uint16_t, u16Fsw);
8121 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8122 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8123 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8124
8125 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8126 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8127 IEM_MC_PREPARE_FPU_USAGE();
8128 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8129 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8130 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8131 } IEM_MC_ELSE() {
8132 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8133 } IEM_MC_ENDIF();
8134 IEM_MC_ADVANCE_RIP_AND_FINISH();
8135
8136 IEM_MC_END();
8137}
8138
8139
8140/** Opcode 0xd8 11/0. */
8141FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8142{
8143 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8144 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8145}
8146
8147
8148/** Opcode 0xd8 11/1. */
8149FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8150{
8151 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8152 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8153}
8154
8155
8156/** Opcode 0xd8 11/2. */
8157FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8158{
8159 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8160 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8161}
8162
8163
8164/** Opcode 0xd8 11/3. */
8165FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8166{
8167 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8168 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8169}
8170
8171
8172/** Opcode 0xd8 11/4. */
8173FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8174{
8175 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8176 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8177}
8178
8179
8180/** Opcode 0xd8 11/5. */
8181FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8182{
8183 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8184 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8185}
8186
8187
8188/** Opcode 0xd8 11/6. */
8189FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8190{
8191 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8192 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8193}
8194
8195
8196/** Opcode 0xd8 11/7. */
8197FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8198{
8199 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8200 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8201}
8202
8203
8204/**
8205 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8206 * the result in ST0.
8207 *
8208 * @param bRm Mod R/M byte.
8209 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8210 */
8211FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8212{
8213 IEM_MC_BEGIN(3, 3);
8214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8215 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8216 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8217 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8218 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8219 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8220
8221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8223
8224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8226 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8227
8228 IEM_MC_PREPARE_FPU_USAGE();
8229 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8230 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8231 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8232 } IEM_MC_ELSE() {
8233 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8234 } IEM_MC_ENDIF();
8235 IEM_MC_ADVANCE_RIP_AND_FINISH();
8236
8237 IEM_MC_END();
8238}
8239
8240
8241/** Opcode 0xd8 !11/0. */
8242FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8243{
8244 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8245 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8246}
8247
8248
8249/** Opcode 0xd8 !11/1. */
8250FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8251{
8252 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8253 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8254}
8255
8256
8257/** Opcode 0xd8 !11/2. */
8258FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8259{
8260 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8261
8262 IEM_MC_BEGIN(3, 3);
8263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8264 IEM_MC_LOCAL(uint16_t, u16Fsw);
8265 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8266 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8267 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8268 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8269
8270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8272
8273 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8274 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8275 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8276
8277 IEM_MC_PREPARE_FPU_USAGE();
8278 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8279 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8280 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8281 } IEM_MC_ELSE() {
8282 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8283 } IEM_MC_ENDIF();
8284 IEM_MC_ADVANCE_RIP_AND_FINISH();
8285
8286 IEM_MC_END();
8287}
8288
8289
8290/** Opcode 0xd8 !11/3. */
8291FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8292{
8293 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8294
8295 IEM_MC_BEGIN(3, 3);
8296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8297 IEM_MC_LOCAL(uint16_t, u16Fsw);
8298 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8299 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8301 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8302
8303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8305
8306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8308 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8309
8310 IEM_MC_PREPARE_FPU_USAGE();
8311 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8312 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8313 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8314 } IEM_MC_ELSE() {
8315 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8316 } IEM_MC_ENDIF();
8317 IEM_MC_ADVANCE_RIP_AND_FINISH();
8318
8319 IEM_MC_END();
8320}
8321
8322
8323/** Opcode 0xd8 !11/4. */
8324FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8325{
8326 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8327 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8328}
8329
8330
8331/** Opcode 0xd8 !11/5. */
8332FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8333{
8334 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8335 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8336}
8337
8338
8339/** Opcode 0xd8 !11/6. */
8340FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8341{
8342 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8343 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8344}
8345
8346
8347/** Opcode 0xd8 !11/7. */
8348FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8349{
8350 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8351 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8352}
8353
8354
8355/**
8356 * @opcode 0xd8
8357 */
8358FNIEMOP_DEF(iemOp_EscF0)
8359{
8360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8361 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
8362
8363 if (IEM_IS_MODRM_REG_MODE(bRm))
8364 {
8365 switch (IEM_GET_MODRM_REG_8(bRm))
8366 {
8367 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
8368 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
8369 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
8370 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8371 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
8372 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
8373 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
8374 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
8375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8376 }
8377 }
8378 else
8379 {
8380 switch (IEM_GET_MODRM_REG_8(bRm))
8381 {
8382 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
8383 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
8384 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
8385 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
8386 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
8387 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
8388 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
8389 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
8390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8391 }
8392 }
8393}
8394
8395
8396/** Opcode 0xd9 /0 mem32real
8397 * @sa iemOp_fld_m64r */
8398FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
8399{
8400 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
8401
8402 IEM_MC_BEGIN(2, 3);
8403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8404 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8405 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
8406 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8407 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
8408
8409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8411
8412 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8413 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8414 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8415 IEM_MC_PREPARE_FPU_USAGE();
8416 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8417 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
8418 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8419 } IEM_MC_ELSE() {
8420 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8421 } IEM_MC_ENDIF();
8422 IEM_MC_ADVANCE_RIP_AND_FINISH();
8423
8424 IEM_MC_END();
8425}
8426
8427
8428/** Opcode 0xd9 !11/2 mem32real */
8429FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
8430{
8431 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
8432 IEM_MC_BEGIN(3, 2);
8433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8434 IEM_MC_LOCAL(uint16_t, u16Fsw);
8435 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8436 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8437 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8438
8439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8442 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8443
8444 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8445 IEM_MC_PREPARE_FPU_USAGE();
8446 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8447 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8448 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8449 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8450 } IEM_MC_ELSE() {
8451 IEM_MC_IF_FCW_IM() {
8452 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8453 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8454 } IEM_MC_ENDIF();
8455 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8456 } IEM_MC_ENDIF();
8457 IEM_MC_ADVANCE_RIP_AND_FINISH();
8458
8459 IEM_MC_END();
8460}
8461
8462
8463/** Opcode 0xd9 !11/3 */
8464FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
8465{
8466 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
8467 IEM_MC_BEGIN(3, 2);
8468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8469 IEM_MC_LOCAL(uint16_t, u16Fsw);
8470 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8471 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8472 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8473
8474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8476 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8478
8479 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8480 IEM_MC_PREPARE_FPU_USAGE();
8481 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8482 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8483 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8484 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8485 } IEM_MC_ELSE() {
8486 IEM_MC_IF_FCW_IM() {
8487 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8488 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8489 } IEM_MC_ENDIF();
8490 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8491 } IEM_MC_ENDIF();
8492 IEM_MC_ADVANCE_RIP_AND_FINISH();
8493
8494 IEM_MC_END();
8495}
8496
8497
8498/** Opcode 0xd9 !11/4 */
8499FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
8500{
8501 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
8502 IEM_MC_BEGIN(3, 0);
8503 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8504 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8505 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8508 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8509 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8510 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8511 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8512 IEM_MC_END();
8513}
8514
8515
8516/** Opcode 0xd9 !11/5 */
8517FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
8518{
8519 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
8520 IEM_MC_BEGIN(1, 1);
8521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8522 IEM_MC_ARG(uint16_t, u16Fsw, 0);
8523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8526 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8527 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8528 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
8529 IEM_MC_END();
8530}
8531
8532
8533/** Opcode 0xd9 !11/6 */
8534FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
8535{
8536 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
8537 IEM_MC_BEGIN(3, 0);
8538 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8539 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8540 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8544 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8545 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8546 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
8547 IEM_MC_END();
8548}
8549
8550
8551/** Opcode 0xd9 !11/7 */
8552FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
8553{
8554 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
8555 IEM_MC_BEGIN(2, 0);
8556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8557 IEM_MC_LOCAL(uint16_t, u16Fcw);
8558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8561 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8562 IEM_MC_FETCH_FCW(u16Fcw);
8563 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
8564 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8565 IEM_MC_END();
8566}
8567
8568
8569/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
8570FNIEMOP_DEF(iemOp_fnop)
8571{
8572 IEMOP_MNEMONIC(fnop, "fnop");
8573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8574
8575 IEM_MC_BEGIN(0, 0);
8576 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8577 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8578 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8579 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
8580 * intel optimizations. Investigate. */
8581 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
8582 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8583 IEM_MC_END();
8584}
8585
8586
8587/** Opcode 0xd9 11/0 stN */
8588FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
8589{
8590 IEMOP_MNEMONIC(fld_stN, "fld stN");
8591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8592
8593 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8594 * indicates that it does. */
8595 IEM_MC_BEGIN(0, 2);
8596 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8597 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8598 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8599 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8600
8601 IEM_MC_PREPARE_FPU_USAGE();
8602 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
8603 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8604 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
8605 } IEM_MC_ELSE() {
8606 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
8607 } IEM_MC_ENDIF();
8608
8609 IEM_MC_ADVANCE_RIP_AND_FINISH();
8610 IEM_MC_END();
8611}
8612
8613
8614/** Opcode 0xd9 11/3 stN */
8615FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
8616{
8617 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
8618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8619
8620 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8621 * indicates that it does. */
8622 IEM_MC_BEGIN(2, 3);
8623 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
8624 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
8625 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8626 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
8627 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
8628 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8629 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8630
8631 IEM_MC_PREPARE_FPU_USAGE();
8632 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8633 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
8634 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
8635 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8636 } IEM_MC_ELSE() {
8637 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
8638 } IEM_MC_ENDIF();
8639
8640 IEM_MC_ADVANCE_RIP_AND_FINISH();
8641 IEM_MC_END();
8642}
8643
8644
8645/** Opcode 0xd9 11/4, 0xdd 11/2. */
8646FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
8647{
8648 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
8649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8650
8651 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
8652 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
8653 if (!iDstReg)
8654 {
8655 IEM_MC_BEGIN(0, 1);
8656 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
8657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8659
8660 IEM_MC_PREPARE_FPU_USAGE();
8661 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
8662 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8663 } IEM_MC_ELSE() {
8664 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
8665 } IEM_MC_ENDIF();
8666
8667 IEM_MC_ADVANCE_RIP_AND_FINISH();
8668 IEM_MC_END();
8669 }
8670 else
8671 {
8672 IEM_MC_BEGIN(0, 2);
8673 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8674 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8677
8678 IEM_MC_PREPARE_FPU_USAGE();
8679 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8680 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8681 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
8682 } IEM_MC_ELSE() {
8683 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
8684 } IEM_MC_ENDIF();
8685
8686 IEM_MC_ADVANCE_RIP_AND_FINISH();
8687 IEM_MC_END();
8688 }
8689}
8690
8691
8692/**
8693 * Common worker for FPU instructions working on ST0 and replaces it with the
8694 * result, i.e. unary operators.
8695 *
8696 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8697 */
8698FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
8699{
8700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8701
8702 IEM_MC_BEGIN(2, 1);
8703 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8704 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8705 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8706
8707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8708 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8709 IEM_MC_PREPARE_FPU_USAGE();
8710 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8711 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
8712 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8713 } IEM_MC_ELSE() {
8714 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8715 } IEM_MC_ENDIF();
8716 IEM_MC_ADVANCE_RIP_AND_FINISH();
8717
8718 IEM_MC_END();
8719}
8720
8721
8722/** Opcode 0xd9 0xe0. */
8723FNIEMOP_DEF(iemOp_fchs)
8724{
8725 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
8726 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
8727}
8728
8729
8730/** Opcode 0xd9 0xe1. */
8731FNIEMOP_DEF(iemOp_fabs)
8732{
8733 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
8734 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
8735}
8736
8737
8738/** Opcode 0xd9 0xe4. */
8739FNIEMOP_DEF(iemOp_ftst)
8740{
8741 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
8742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8743
8744 IEM_MC_BEGIN(2, 1);
8745 IEM_MC_LOCAL(uint16_t, u16Fsw);
8746 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8747 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8748
8749 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8750 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8751 IEM_MC_PREPARE_FPU_USAGE();
8752 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8753 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
8754 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8755 } IEM_MC_ELSE() {
8756 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8757 } IEM_MC_ENDIF();
8758 IEM_MC_ADVANCE_RIP_AND_FINISH();
8759
8760 IEM_MC_END();
8761}
8762
8763
8764/** Opcode 0xd9 0xe5. */
8765FNIEMOP_DEF(iemOp_fxam)
8766{
8767 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
8768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8769
8770 IEM_MC_BEGIN(2, 1);
8771 IEM_MC_LOCAL(uint16_t, u16Fsw);
8772 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8773 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8774
8775 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8776 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8777 IEM_MC_PREPARE_FPU_USAGE();
8778 IEM_MC_REF_FPUREG(pr80Value, 0);
8779 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
8780 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8781 IEM_MC_ADVANCE_RIP_AND_FINISH();
8782
8783 IEM_MC_END();
8784}
8785
8786
8787/**
8788 * Common worker for FPU instructions pushing a constant onto the FPU stack.
8789 *
8790 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8791 */
8792FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
8793{
8794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8795
8796 IEM_MC_BEGIN(1, 1);
8797 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8798 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8799
8800 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8801 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8802 IEM_MC_PREPARE_FPU_USAGE();
8803 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8804 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
8805 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
8806 } IEM_MC_ELSE() {
8807 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
8808 } IEM_MC_ENDIF();
8809 IEM_MC_ADVANCE_RIP_AND_FINISH();
8810
8811 IEM_MC_END();
8812}
8813
8814
8815/** Opcode 0xd9 0xe8. */
8816FNIEMOP_DEF(iemOp_fld1)
8817{
8818 IEMOP_MNEMONIC(fld1, "fld1");
8819 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
8820}
8821
8822
8823/** Opcode 0xd9 0xe9. */
8824FNIEMOP_DEF(iemOp_fldl2t)
8825{
8826 IEMOP_MNEMONIC(fldl2t, "fldl2t");
8827 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
8828}
8829
8830
8831/** Opcode 0xd9 0xea. */
8832FNIEMOP_DEF(iemOp_fldl2e)
8833{
8834 IEMOP_MNEMONIC(fldl2e, "fldl2e");
8835 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
8836}
8837
8838/** Opcode 0xd9 0xeb. */
8839FNIEMOP_DEF(iemOp_fldpi)
8840{
8841 IEMOP_MNEMONIC(fldpi, "fldpi");
8842 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
8843}
8844
8845
8846/** Opcode 0xd9 0xec. */
8847FNIEMOP_DEF(iemOp_fldlg2)
8848{
8849 IEMOP_MNEMONIC(fldlg2, "fldlg2");
8850 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
8851}
8852
8853/** Opcode 0xd9 0xed. */
8854FNIEMOP_DEF(iemOp_fldln2)
8855{
8856 IEMOP_MNEMONIC(fldln2, "fldln2");
8857 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
8858}
8859
8860
8861/** Opcode 0xd9 0xee. */
8862FNIEMOP_DEF(iemOp_fldz)
8863{
8864 IEMOP_MNEMONIC(fldz, "fldz");
8865 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
8866}
8867
8868
8869/** Opcode 0xd9 0xf0.
8870 *
8871 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
8872 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
8873 * to produce proper results for +Inf and -Inf.
8874 *
8875 * This is probably usful in the implementation pow() and similar.
8876 */
8877FNIEMOP_DEF(iemOp_f2xm1)
8878{
8879 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
8880 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
8881}
8882
8883
8884/**
8885 * Common worker for FPU instructions working on STn and ST0, storing the result
8886 * in STn, and popping the stack unless IE, DE or ZE was raised.
8887 *
8888 * @param bRm Mod R/M byte.
8889 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8890 */
8891FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8892{
8893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8894
8895 IEM_MC_BEGIN(3, 1);
8896 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8897 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8898 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8899 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8900
8901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8903
8904 IEM_MC_PREPARE_FPU_USAGE();
8905 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
8906 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8907 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
8908 } IEM_MC_ELSE() {
8909 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
8910 } IEM_MC_ENDIF();
8911 IEM_MC_ADVANCE_RIP_AND_FINISH();
8912
8913 IEM_MC_END();
8914}
8915
8916
8917/** Opcode 0xd9 0xf1. */
8918FNIEMOP_DEF(iemOp_fyl2x)
8919{
8920 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
8921 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
8922}
8923
8924
8925/**
8926 * Common worker for FPU instructions working on ST0 and having two outputs, one
8927 * replacing ST0 and one pushed onto the stack.
8928 *
8929 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8930 */
8931FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8932{
8933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8934
8935 IEM_MC_BEGIN(2, 1);
8936 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8937 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8938 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8939
8940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8942 IEM_MC_PREPARE_FPU_USAGE();
8943 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8944 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8945 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
8946 } IEM_MC_ELSE() {
8947 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
8948 } IEM_MC_ENDIF();
8949 IEM_MC_ADVANCE_RIP_AND_FINISH();
8950
8951 IEM_MC_END();
8952}
8953
8954
8955/** Opcode 0xd9 0xf2. */
8956FNIEMOP_DEF(iemOp_fptan)
8957{
8958 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8959 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8960}
8961
8962
8963/** Opcode 0xd9 0xf3. */
8964FNIEMOP_DEF(iemOp_fpatan)
8965{
8966 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8967 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8968}
8969
8970
8971/** Opcode 0xd9 0xf4. */
8972FNIEMOP_DEF(iemOp_fxtract)
8973{
8974 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8975 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8976}
8977
8978
8979/** Opcode 0xd9 0xf5. */
8980FNIEMOP_DEF(iemOp_fprem1)
8981{
8982 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8983 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8984}
8985
8986
8987/** Opcode 0xd9 0xf6. */
8988FNIEMOP_DEF(iemOp_fdecstp)
8989{
8990 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8992 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8993 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8994 * FINCSTP and FDECSTP. */
8995
8996 IEM_MC_BEGIN(0,0);
8997
8998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8999 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9000
9001 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9002 IEM_MC_FPU_STACK_DEC_TOP();
9003 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9004
9005 IEM_MC_ADVANCE_RIP_AND_FINISH();
9006 IEM_MC_END();
9007}
9008
9009
9010/** Opcode 0xd9 0xf7. */
9011FNIEMOP_DEF(iemOp_fincstp)
9012{
9013 IEMOP_MNEMONIC(fincstp, "fincstp");
9014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9015 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9016 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9017 * FINCSTP and FDECSTP. */
9018
9019 IEM_MC_BEGIN(0,0);
9020
9021 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9022 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9023
9024 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9025 IEM_MC_FPU_STACK_INC_TOP();
9026 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9027
9028 IEM_MC_ADVANCE_RIP_AND_FINISH();
9029 IEM_MC_END();
9030}
9031
9032
9033/** Opcode 0xd9 0xf8. */
9034FNIEMOP_DEF(iemOp_fprem)
9035{
9036 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9037 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9038}
9039
9040
9041/** Opcode 0xd9 0xf9. */
9042FNIEMOP_DEF(iemOp_fyl2xp1)
9043{
9044 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9045 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9046}
9047
9048
9049/** Opcode 0xd9 0xfa. */
9050FNIEMOP_DEF(iemOp_fsqrt)
9051{
9052 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9053 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9054}
9055
9056
9057/** Opcode 0xd9 0xfb. */
9058FNIEMOP_DEF(iemOp_fsincos)
9059{
9060 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9061 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9062}
9063
9064
9065/** Opcode 0xd9 0xfc. */
9066FNIEMOP_DEF(iemOp_frndint)
9067{
9068 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9069 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9070}
9071
9072
9073/** Opcode 0xd9 0xfd. */
9074FNIEMOP_DEF(iemOp_fscale)
9075{
9076 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9077 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9078}
9079
9080
9081/** Opcode 0xd9 0xfe. */
9082FNIEMOP_DEF(iemOp_fsin)
9083{
9084 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9085 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9086}
9087
9088
9089/** Opcode 0xd9 0xff. */
9090FNIEMOP_DEF(iemOp_fcos)
9091{
9092 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9093 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9094}
9095
9096
9097/** Used by iemOp_EscF1. */
9098IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9099{
9100 /* 0xe0 */ iemOp_fchs,
9101 /* 0xe1 */ iemOp_fabs,
9102 /* 0xe2 */ iemOp_Invalid,
9103 /* 0xe3 */ iemOp_Invalid,
9104 /* 0xe4 */ iemOp_ftst,
9105 /* 0xe5 */ iemOp_fxam,
9106 /* 0xe6 */ iemOp_Invalid,
9107 /* 0xe7 */ iemOp_Invalid,
9108 /* 0xe8 */ iemOp_fld1,
9109 /* 0xe9 */ iemOp_fldl2t,
9110 /* 0xea */ iemOp_fldl2e,
9111 /* 0xeb */ iemOp_fldpi,
9112 /* 0xec */ iemOp_fldlg2,
9113 /* 0xed */ iemOp_fldln2,
9114 /* 0xee */ iemOp_fldz,
9115 /* 0xef */ iemOp_Invalid,
9116 /* 0xf0 */ iemOp_f2xm1,
9117 /* 0xf1 */ iemOp_fyl2x,
9118 /* 0xf2 */ iemOp_fptan,
9119 /* 0xf3 */ iemOp_fpatan,
9120 /* 0xf4 */ iemOp_fxtract,
9121 /* 0xf5 */ iemOp_fprem1,
9122 /* 0xf6 */ iemOp_fdecstp,
9123 /* 0xf7 */ iemOp_fincstp,
9124 /* 0xf8 */ iemOp_fprem,
9125 /* 0xf9 */ iemOp_fyl2xp1,
9126 /* 0xfa */ iemOp_fsqrt,
9127 /* 0xfb */ iemOp_fsincos,
9128 /* 0xfc */ iemOp_frndint,
9129 /* 0xfd */ iemOp_fscale,
9130 /* 0xfe */ iemOp_fsin,
9131 /* 0xff */ iemOp_fcos
9132};
9133
9134
9135/**
9136 * @opcode 0xd9
9137 */
9138FNIEMOP_DEF(iemOp_EscF1)
9139{
9140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9141 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9142
9143 if (IEM_IS_MODRM_REG_MODE(bRm))
9144 {
9145 switch (IEM_GET_MODRM_REG_8(bRm))
9146 {
9147 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9148 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9149 case 2:
9150 if (bRm == 0xd0)
9151 return FNIEMOP_CALL(iemOp_fnop);
9152 IEMOP_RAISE_INVALID_OPCODE_RET();
9153 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9154 case 4:
9155 case 5:
9156 case 6:
9157 case 7:
9158 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9159 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9161 }
9162 }
9163 else
9164 {
9165 switch (IEM_GET_MODRM_REG_8(bRm))
9166 {
9167 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9168 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9169 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9170 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9171 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9172 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9173 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9174 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9176 }
9177 }
9178}
9179
9180
9181/** Opcode 0xda 11/0. */
9182FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9183{
9184 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9186
9187 IEM_MC_BEGIN(0, 1);
9188 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9189
9190 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9191 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9192
9193 IEM_MC_PREPARE_FPU_USAGE();
9194 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9195 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9196 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9197 } IEM_MC_ENDIF();
9198 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9199 } IEM_MC_ELSE() {
9200 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9201 } IEM_MC_ENDIF();
9202 IEM_MC_ADVANCE_RIP_AND_FINISH();
9203
9204 IEM_MC_END();
9205}
9206
9207
9208/** Opcode 0xda 11/1. */
9209FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9210{
9211 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9213
9214 IEM_MC_BEGIN(0, 1);
9215 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9216
9217 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9218 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9219
9220 IEM_MC_PREPARE_FPU_USAGE();
9221 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9223 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9224 } IEM_MC_ENDIF();
9225 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9226 } IEM_MC_ELSE() {
9227 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9228 } IEM_MC_ENDIF();
9229 IEM_MC_ADVANCE_RIP_AND_FINISH();
9230
9231 IEM_MC_END();
9232}
9233
9234
9235/** Opcode 0xda 11/2. */
9236FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9237{
9238 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9240
9241 IEM_MC_BEGIN(0, 1);
9242 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9243
9244 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9245 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9246
9247 IEM_MC_PREPARE_FPU_USAGE();
9248 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9249 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9250 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9251 } IEM_MC_ENDIF();
9252 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9253 } IEM_MC_ELSE() {
9254 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9255 } IEM_MC_ENDIF();
9256 IEM_MC_ADVANCE_RIP_AND_FINISH();
9257
9258 IEM_MC_END();
9259}
9260
9261
9262/** Opcode 0xda 11/3. */
9263FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9264{
9265 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9267
9268 IEM_MC_BEGIN(0, 1);
9269 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9270
9271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9272 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9273
9274 IEM_MC_PREPARE_FPU_USAGE();
9275 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9276 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9277 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9278 } IEM_MC_ENDIF();
9279 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9280 } IEM_MC_ELSE() {
9281 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9282 } IEM_MC_ENDIF();
9283 IEM_MC_ADVANCE_RIP_AND_FINISH();
9284
9285 IEM_MC_END();
9286}
9287
9288
9289/**
9290 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9291 * flags, and popping twice when done.
9292 *
9293 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9294 */
9295FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9296{
9297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9298
9299 IEM_MC_BEGIN(3, 1);
9300 IEM_MC_LOCAL(uint16_t, u16Fsw);
9301 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9302 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9303 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9304
9305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9307
9308 IEM_MC_PREPARE_FPU_USAGE();
9309 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9310 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9311 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9312 } IEM_MC_ELSE() {
9313 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9314 } IEM_MC_ENDIF();
9315 IEM_MC_ADVANCE_RIP_AND_FINISH();
9316
9317 IEM_MC_END();
9318}
9319
9320
9321/** Opcode 0xda 0xe9. */
9322FNIEMOP_DEF(iemOp_fucompp)
9323{
9324 IEMOP_MNEMONIC(fucompp, "fucompp");
9325 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9326}
9327
9328
9329/**
9330 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9331 * the result in ST0.
9332 *
9333 * @param bRm Mod R/M byte.
9334 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9335 */
9336FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9337{
9338 IEM_MC_BEGIN(3, 3);
9339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9340 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9341 IEM_MC_LOCAL(int32_t, i32Val2);
9342 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9343 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9344 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9345
9346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9348
9349 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9350 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9351 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9352
9353 IEM_MC_PREPARE_FPU_USAGE();
9354 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9355 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9356 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9357 } IEM_MC_ELSE() {
9358 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9359 } IEM_MC_ENDIF();
9360 IEM_MC_ADVANCE_RIP_AND_FINISH();
9361
9362 IEM_MC_END();
9363}
9364
9365
9366/** Opcode 0xda !11/0. */
9367FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9368{
9369 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
9370 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
9371}
9372
9373
9374/** Opcode 0xda !11/1. */
9375FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
9376{
9377 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
9378 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
9379}
9380
9381
9382/** Opcode 0xda !11/2. */
9383FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
9384{
9385 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
9386
9387 IEM_MC_BEGIN(3, 3);
9388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9389 IEM_MC_LOCAL(uint16_t, u16Fsw);
9390 IEM_MC_LOCAL(int32_t, i32Val2);
9391 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9392 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9393 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9394
9395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9397
9398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9399 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9400 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9401
9402 IEM_MC_PREPARE_FPU_USAGE();
9403 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9404 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9405 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9406 } IEM_MC_ELSE() {
9407 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9408 } IEM_MC_ENDIF();
9409 IEM_MC_ADVANCE_RIP_AND_FINISH();
9410
9411 IEM_MC_END();
9412}
9413
9414
9415/** Opcode 0xda !11/3. */
9416FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
9417{
9418 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
9419
9420 IEM_MC_BEGIN(3, 3);
9421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9422 IEM_MC_LOCAL(uint16_t, u16Fsw);
9423 IEM_MC_LOCAL(int32_t, i32Val2);
9424 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9426 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9427
9428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9430
9431 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9432 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9433 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9434
9435 IEM_MC_PREPARE_FPU_USAGE();
9436 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9437 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9438 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9439 } IEM_MC_ELSE() {
9440 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9441 } IEM_MC_ENDIF();
9442 IEM_MC_ADVANCE_RIP_AND_FINISH();
9443
9444 IEM_MC_END();
9445}
9446
9447
9448/** Opcode 0xda !11/4. */
9449FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
9450{
9451 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
9452 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
9453}
9454
9455
9456/** Opcode 0xda !11/5. */
9457FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
9458{
9459 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
9460 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
9461}
9462
9463
9464/** Opcode 0xda !11/6. */
9465FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
9466{
9467 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
9468 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
9469}
9470
9471
9472/** Opcode 0xda !11/7. */
9473FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
9474{
9475 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
9476 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
9477}
9478
9479
9480/**
9481 * @opcode 0xda
9482 */
9483FNIEMOP_DEF(iemOp_EscF2)
9484{
9485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9486 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
9487 if (IEM_IS_MODRM_REG_MODE(bRm))
9488 {
9489 switch (IEM_GET_MODRM_REG_8(bRm))
9490 {
9491 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
9492 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
9493 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
9494 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
9495 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9496 case 5:
9497 if (bRm == 0xe9)
9498 return FNIEMOP_CALL(iemOp_fucompp);
9499 IEMOP_RAISE_INVALID_OPCODE_RET();
9500 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9501 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9503 }
9504 }
9505 else
9506 {
9507 switch (IEM_GET_MODRM_REG_8(bRm))
9508 {
9509 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
9510 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
9511 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
9512 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
9513 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
9514 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
9515 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
9516 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
9517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9518 }
9519 }
9520}
9521
9522
9523/** Opcode 0xdb !11/0. */
9524FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
9525{
9526 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
9527
9528 IEM_MC_BEGIN(2, 3);
9529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9530 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9531 IEM_MC_LOCAL(int32_t, i32Val);
9532 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9533 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
9534
9535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9537
9538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9540 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9541
9542 IEM_MC_PREPARE_FPU_USAGE();
9543 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9544 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
9545 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9546 } IEM_MC_ELSE() {
9547 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9548 } IEM_MC_ENDIF();
9549 IEM_MC_ADVANCE_RIP_AND_FINISH();
9550
9551 IEM_MC_END();
9552}
9553
9554
9555/** Opcode 0xdb !11/1. */
9556FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
9557{
9558 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
9559 IEM_MC_BEGIN(3, 2);
9560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9561 IEM_MC_LOCAL(uint16_t, u16Fsw);
9562 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9563 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9564 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9565
9566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9568 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9569 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9570
9571 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9572 IEM_MC_PREPARE_FPU_USAGE();
9573 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9574 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9575 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9576 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9577 } IEM_MC_ELSE() {
9578 IEM_MC_IF_FCW_IM() {
9579 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9580 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9581 } IEM_MC_ENDIF();
9582 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9583 } IEM_MC_ENDIF();
9584 IEM_MC_ADVANCE_RIP_AND_FINISH();
9585
9586 IEM_MC_END();
9587}
9588
9589
9590/** Opcode 0xdb !11/2. */
9591FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
9592{
9593 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
9594 IEM_MC_BEGIN(3, 2);
9595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9596 IEM_MC_LOCAL(uint16_t, u16Fsw);
9597 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9598 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9599 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9600
9601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9605
9606 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9607 IEM_MC_PREPARE_FPU_USAGE();
9608 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9609 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9610 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9611 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9612 } IEM_MC_ELSE() {
9613 IEM_MC_IF_FCW_IM() {
9614 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9615 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9616 } IEM_MC_ENDIF();
9617 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9618 } IEM_MC_ENDIF();
9619 IEM_MC_ADVANCE_RIP_AND_FINISH();
9620
9621 IEM_MC_END();
9622}
9623
9624
9625/** Opcode 0xdb !11/3. */
9626FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
9627{
9628 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
9629 IEM_MC_BEGIN(3, 2);
9630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9631 IEM_MC_LOCAL(uint16_t, u16Fsw);
9632 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9633 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9634 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9635
9636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9639 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9640
9641 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9642 IEM_MC_PREPARE_FPU_USAGE();
9643 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9644 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9645 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9646 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9647 } IEM_MC_ELSE() {
9648 IEM_MC_IF_FCW_IM() {
9649 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9650 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9651 } IEM_MC_ENDIF();
9652 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9653 } IEM_MC_ENDIF();
9654 IEM_MC_ADVANCE_RIP_AND_FINISH();
9655
9656 IEM_MC_END();
9657}
9658
9659
9660/** Opcode 0xdb !11/5. */
9661FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
9662{
9663 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
9664
9665 IEM_MC_BEGIN(2, 3);
9666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9667 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9668 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
9669 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9670 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
9671
9672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9674
9675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9677 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9678
9679 IEM_MC_PREPARE_FPU_USAGE();
9680 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9681 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
9682 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9683 } IEM_MC_ELSE() {
9684 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9685 } IEM_MC_ENDIF();
9686 IEM_MC_ADVANCE_RIP_AND_FINISH();
9687
9688 IEM_MC_END();
9689}
9690
9691
9692/** Opcode 0xdb !11/7. */
9693FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
9694{
9695 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
9696 IEM_MC_BEGIN(3, 2);
9697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9698 IEM_MC_LOCAL(uint16_t, u16Fsw);
9699 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9700 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
9701 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9702
9703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9705 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9706 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9707
9708 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
9709 IEM_MC_PREPARE_FPU_USAGE();
9710 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9711 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
9712 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
9713 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9714 } IEM_MC_ELSE() {
9715 IEM_MC_IF_FCW_IM() {
9716 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
9717 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
9718 } IEM_MC_ENDIF();
9719 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9720 } IEM_MC_ENDIF();
9721 IEM_MC_ADVANCE_RIP_AND_FINISH();
9722
9723 IEM_MC_END();
9724}
9725
9726
9727/** Opcode 0xdb 11/0. */
9728FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
9729{
9730 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
9731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9732
9733 IEM_MC_BEGIN(0, 1);
9734 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9735
9736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9738
9739 IEM_MC_PREPARE_FPU_USAGE();
9740 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9741 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
9742 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9743 } IEM_MC_ENDIF();
9744 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9745 } IEM_MC_ELSE() {
9746 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9747 } IEM_MC_ENDIF();
9748 IEM_MC_ADVANCE_RIP_AND_FINISH();
9749
9750 IEM_MC_END();
9751}
9752
9753
9754/** Opcode 0xdb 11/1. */
9755FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
9756{
9757 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
9758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9759
9760 IEM_MC_BEGIN(0, 1);
9761 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9762
9763 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9764 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9765
9766 IEM_MC_PREPARE_FPU_USAGE();
9767 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9768 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9769 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9770 } IEM_MC_ENDIF();
9771 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9772 } IEM_MC_ELSE() {
9773 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9774 } IEM_MC_ENDIF();
9775 IEM_MC_ADVANCE_RIP_AND_FINISH();
9776
9777 IEM_MC_END();
9778}
9779
9780
9781/** Opcode 0xdb 11/2. */
9782FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
9783{
9784 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
9785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9786
9787 IEM_MC_BEGIN(0, 1);
9788 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9789
9790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9792
9793 IEM_MC_PREPARE_FPU_USAGE();
9794 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9795 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9796 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9797 } IEM_MC_ENDIF();
9798 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9799 } IEM_MC_ELSE() {
9800 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9801 } IEM_MC_ENDIF();
9802 IEM_MC_ADVANCE_RIP_AND_FINISH();
9803
9804 IEM_MC_END();
9805}
9806
9807
9808/** Opcode 0xdb 11/3. */
9809FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
9810{
9811 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
9812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9813
9814 IEM_MC_BEGIN(0, 1);
9815 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9816
9817 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9818 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9819
9820 IEM_MC_PREPARE_FPU_USAGE();
9821 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9822 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
9823 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9824 } IEM_MC_ENDIF();
9825 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9826 } IEM_MC_ELSE() {
9827 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9828 } IEM_MC_ENDIF();
9829 IEM_MC_ADVANCE_RIP_AND_FINISH();
9830
9831 IEM_MC_END();
9832}
9833
9834
9835/** Opcode 0xdb 0xe0. */
9836FNIEMOP_DEF(iemOp_fneni)
9837{
9838 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
9839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9840 IEM_MC_BEGIN(0,0);
9841 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9842 IEM_MC_ADVANCE_RIP_AND_FINISH();
9843 IEM_MC_END();
9844}
9845
9846
9847/** Opcode 0xdb 0xe1. */
9848FNIEMOP_DEF(iemOp_fndisi)
9849{
9850 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
9851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9852 IEM_MC_BEGIN(0,0);
9853 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9854 IEM_MC_ADVANCE_RIP_AND_FINISH();
9855 IEM_MC_END();
9856}
9857
9858
9859/** Opcode 0xdb 0xe2. */
9860FNIEMOP_DEF(iemOp_fnclex)
9861{
9862 IEMOP_MNEMONIC(fnclex, "fnclex");
9863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9864
9865 IEM_MC_BEGIN(0,0);
9866 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9867 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9868 IEM_MC_CLEAR_FSW_EX();
9869 IEM_MC_ADVANCE_RIP_AND_FINISH();
9870 IEM_MC_END();
9871}
9872
9873
9874/** Opcode 0xdb 0xe3. */
9875FNIEMOP_DEF(iemOp_fninit)
9876{
9877 IEMOP_MNEMONIC(fninit, "fninit");
9878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9879 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
9880}
9881
9882
9883/** Opcode 0xdb 0xe4. */
9884FNIEMOP_DEF(iemOp_fnsetpm)
9885{
9886 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
9887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9888 IEM_MC_BEGIN(0,0);
9889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9890 IEM_MC_ADVANCE_RIP_AND_FINISH();
9891 IEM_MC_END();
9892}
9893
9894
9895/** Opcode 0xdb 0xe5. */
9896FNIEMOP_DEF(iemOp_frstpm)
9897{
9898 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
9899#if 0 /* #UDs on newer CPUs */
9900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9901 IEM_MC_BEGIN(0,0);
9902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9903 IEM_MC_ADVANCE_RIP_AND_FINISH();
9904 IEM_MC_END();
9905 return VINF_SUCCESS;
9906#else
9907 IEMOP_RAISE_INVALID_OPCODE_RET();
9908#endif
9909}
9910
9911
9912/** Opcode 0xdb 11/5. */
9913FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9914{
9915 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9916 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
9917 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
9918 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9919}
9920
9921
9922/** Opcode 0xdb 11/6. */
9923FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9924{
9925 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9926 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
9927 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
9928 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9929}
9930
9931
9932/**
9933 * @opcode 0xdb
9934 */
9935FNIEMOP_DEF(iemOp_EscF3)
9936{
9937 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9938 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9939 if (IEM_IS_MODRM_REG_MODE(bRm))
9940 {
9941 switch (IEM_GET_MODRM_REG_8(bRm))
9942 {
9943 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9944 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9945 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9946 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9947 case 4:
9948 switch (bRm)
9949 {
9950 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9951 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9952 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9953 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9954 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9955 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9956 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
9957 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
9958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9959 }
9960 break;
9961 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9962 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9963 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9964 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9965 }
9966 }
9967 else
9968 {
9969 switch (IEM_GET_MODRM_REG_8(bRm))
9970 {
9971 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9972 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9973 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9974 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9975 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9976 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9977 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9978 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9979 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9980 }
9981 }
9982}
9983
9984
9985/**
9986 * Common worker for FPU instructions working on STn and ST0, and storing the
9987 * result in STn unless IE, DE or ZE was raised.
9988 *
9989 * @param bRm Mod R/M byte.
9990 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9991 */
9992FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9993{
9994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9995
9996 IEM_MC_BEGIN(3, 1);
9997 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9998 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9999 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10000 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10001
10002 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10003 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10004
10005 IEM_MC_PREPARE_FPU_USAGE();
10006 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10007 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10008 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10009 } IEM_MC_ELSE() {
10010 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10011 } IEM_MC_ENDIF();
10012 IEM_MC_ADVANCE_RIP_AND_FINISH();
10013
10014 IEM_MC_END();
10015}
10016
10017
10018/** Opcode 0xdc 11/0. */
10019FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10020{
10021 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10022 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10023}
10024
10025
10026/** Opcode 0xdc 11/1. */
10027FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10028{
10029 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10030 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10031}
10032
10033
10034/** Opcode 0xdc 11/4. */
10035FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10036{
10037 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10038 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10039}
10040
10041
10042/** Opcode 0xdc 11/5. */
10043FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10044{
10045 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10046 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10047}
10048
10049
10050/** Opcode 0xdc 11/6. */
10051FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10052{
10053 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10054 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10055}
10056
10057
10058/** Opcode 0xdc 11/7. */
10059FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10060{
10061 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10062 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10063}
10064
10065
10066/**
10067 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10068 * memory operand, and storing the result in ST0.
10069 *
10070 * @param bRm Mod R/M byte.
10071 * @param pfnImpl Pointer to the instruction implementation (assembly).
10072 */
10073FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10074{
10075 IEM_MC_BEGIN(3, 3);
10076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10077 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10078 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10079 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10080 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10081 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10082
10083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10085 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10086 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10087
10088 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10089 IEM_MC_PREPARE_FPU_USAGE();
10090 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10091 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10092 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10093 } IEM_MC_ELSE() {
10094 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10095 } IEM_MC_ENDIF();
10096 IEM_MC_ADVANCE_RIP_AND_FINISH();
10097
10098 IEM_MC_END();
10099}
10100
10101
10102/** Opcode 0xdc !11/0. */
10103FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10104{
10105 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10106 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10107}
10108
10109
10110/** Opcode 0xdc !11/1. */
10111FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10112{
10113 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10114 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10115}
10116
10117
10118/** Opcode 0xdc !11/2. */
10119FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10120{
10121 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10122
10123 IEM_MC_BEGIN(3, 3);
10124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10125 IEM_MC_LOCAL(uint16_t, u16Fsw);
10126 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10127 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10128 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10129 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10130
10131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10133
10134 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10135 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10136 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10137
10138 IEM_MC_PREPARE_FPU_USAGE();
10139 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10140 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10141 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10142 } IEM_MC_ELSE() {
10143 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10144 } IEM_MC_ENDIF();
10145 IEM_MC_ADVANCE_RIP_AND_FINISH();
10146
10147 IEM_MC_END();
10148}
10149
10150
10151/** Opcode 0xdc !11/3. */
10152FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10153{
10154 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10155
10156 IEM_MC_BEGIN(3, 3);
10157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10158 IEM_MC_LOCAL(uint16_t, u16Fsw);
10159 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10160 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10161 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10162 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10163
10164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10166
10167 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10168 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10169 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10170
10171 IEM_MC_PREPARE_FPU_USAGE();
10172 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10173 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10174 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10175 } IEM_MC_ELSE() {
10176 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10177 } IEM_MC_ENDIF();
10178 IEM_MC_ADVANCE_RIP_AND_FINISH();
10179
10180 IEM_MC_END();
10181}
10182
10183
10184/** Opcode 0xdc !11/4. */
10185FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10186{
10187 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10188 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10189}
10190
10191
10192/** Opcode 0xdc !11/5. */
10193FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10194{
10195 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10196 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10197}
10198
10199
10200/** Opcode 0xdc !11/6. */
10201FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10202{
10203 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10204 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10205}
10206
10207
10208/** Opcode 0xdc !11/7. */
10209FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10210{
10211 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10212 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10213}
10214
10215
10216/**
10217 * @opcode 0xdc
10218 */
10219FNIEMOP_DEF(iemOp_EscF4)
10220{
10221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10222 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10223 if (IEM_IS_MODRM_REG_MODE(bRm))
10224 {
10225 switch (IEM_GET_MODRM_REG_8(bRm))
10226 {
10227 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10228 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10229 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10230 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10231 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10232 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10233 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10234 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10236 }
10237 }
10238 else
10239 {
10240 switch (IEM_GET_MODRM_REG_8(bRm))
10241 {
10242 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10243 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10244 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10245 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10246 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10247 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10248 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10249 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10251 }
10252 }
10253}
10254
10255
10256/** Opcode 0xdd !11/0.
10257 * @sa iemOp_fld_m32r */
10258FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10259{
10260 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10261
10262 IEM_MC_BEGIN(2, 3);
10263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10264 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10265 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10266 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10267 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10268
10269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10272 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10273
10274 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10275 IEM_MC_PREPARE_FPU_USAGE();
10276 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10277 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10278 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10279 } IEM_MC_ELSE() {
10280 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10281 } IEM_MC_ENDIF();
10282 IEM_MC_ADVANCE_RIP_AND_FINISH();
10283
10284 IEM_MC_END();
10285}
10286
10287
10288/** Opcode 0xdd !11/0. */
10289FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10290{
10291 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10292 IEM_MC_BEGIN(3, 2);
10293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10294 IEM_MC_LOCAL(uint16_t, u16Fsw);
10295 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10296 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10297 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10298
10299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10301 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10302 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10303
10304 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10305 IEM_MC_PREPARE_FPU_USAGE();
10306 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10307 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10308 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10309 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10310 } IEM_MC_ELSE() {
10311 IEM_MC_IF_FCW_IM() {
10312 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10313 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10314 } IEM_MC_ENDIF();
10315 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10316 } IEM_MC_ENDIF();
10317 IEM_MC_ADVANCE_RIP_AND_FINISH();
10318
10319 IEM_MC_END();
10320}
10321
10322
10323/** Opcode 0xdd !11/0. */
10324FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10325{
10326 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10327 IEM_MC_BEGIN(3, 2);
10328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10329 IEM_MC_LOCAL(uint16_t, u16Fsw);
10330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10331 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10332 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10333
10334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10336 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10337 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10338
10339 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10340 IEM_MC_PREPARE_FPU_USAGE();
10341 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10342 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10343 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10344 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10345 } IEM_MC_ELSE() {
10346 IEM_MC_IF_FCW_IM() {
10347 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10348 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10349 } IEM_MC_ENDIF();
10350 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10351 } IEM_MC_ENDIF();
10352 IEM_MC_ADVANCE_RIP_AND_FINISH();
10353
10354 IEM_MC_END();
10355}
10356
10357
10358
10359
10360/** Opcode 0xdd !11/0. */
10361FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10362{
10363 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10364 IEM_MC_BEGIN(3, 2);
10365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10366 IEM_MC_LOCAL(uint16_t, u16Fsw);
10367 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10368 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10369 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10370
10371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10373 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10374 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10375
10376 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10377 IEM_MC_PREPARE_FPU_USAGE();
10378 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10379 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10380 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10381 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10382 } IEM_MC_ELSE() {
10383 IEM_MC_IF_FCW_IM() {
10384 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10385 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10386 } IEM_MC_ENDIF();
10387 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10388 } IEM_MC_ENDIF();
10389 IEM_MC_ADVANCE_RIP_AND_FINISH();
10390
10391 IEM_MC_END();
10392}
10393
10394
10395/** Opcode 0xdd !11/0. */
10396FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
10397{
10398 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
10399 IEM_MC_BEGIN(3, 0);
10400 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10401 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10402 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10405 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10406 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10407 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10408 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10409 IEM_MC_END();
10410}
10411
10412
10413/** Opcode 0xdd !11/0. */
10414FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
10415{
10416 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
10417 IEM_MC_BEGIN(3, 0);
10418 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10419 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10420 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10423 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10424 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
10425 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10426 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
10427 IEM_MC_END();
10428}
10429
10430/** Opcode 0xdd !11/0. */
10431FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
10432{
10433 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
10434
10435 IEM_MC_BEGIN(0, 2);
10436 IEM_MC_LOCAL(uint16_t, u16Tmp);
10437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10438
10439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10442
10443 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10444 IEM_MC_FETCH_FSW(u16Tmp);
10445 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
10446 IEM_MC_ADVANCE_RIP_AND_FINISH();
10447
10448/** @todo Debug / drop a hint to the verifier that things may differ
10449 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
10450 * NT4SP1. (X86_FSW_PE) */
10451 IEM_MC_END();
10452}
10453
10454
10455/** Opcode 0xdd 11/0. */
10456FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
10457{
10458 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
10459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10460 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
10461 unmodified. */
10462
10463 IEM_MC_BEGIN(0, 0);
10464
10465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10466 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10467
10468 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10469 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10470 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10471
10472 IEM_MC_ADVANCE_RIP_AND_FINISH();
10473 IEM_MC_END();
10474}
10475
10476
10477/** Opcode 0xdd 11/1. */
10478FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
10479{
10480 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
10481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10482
10483 IEM_MC_BEGIN(0, 2);
10484 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10485 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10488
10489 IEM_MC_PREPARE_FPU_USAGE();
10490 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10491 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10492 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10493 } IEM_MC_ELSE() {
10494 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10495 } IEM_MC_ENDIF();
10496
10497 IEM_MC_ADVANCE_RIP_AND_FINISH();
10498 IEM_MC_END();
10499}
10500
10501
10502/** Opcode 0xdd 11/3. */
10503FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
10504{
10505 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
10506 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
10507}
10508
10509
10510/** Opcode 0xdd 11/4. */
10511FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
10512{
10513 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
10514 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
10515}
10516
10517
10518/**
10519 * @opcode 0xdd
10520 */
10521FNIEMOP_DEF(iemOp_EscF5)
10522{
10523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10524 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
10525 if (IEM_IS_MODRM_REG_MODE(bRm))
10526 {
10527 switch (IEM_GET_MODRM_REG_8(bRm))
10528 {
10529 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
10530 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
10531 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
10532 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
10533 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
10534 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
10535 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10536 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10538 }
10539 }
10540 else
10541 {
10542 switch (IEM_GET_MODRM_REG_8(bRm))
10543 {
10544 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
10545 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
10546 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
10547 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
10548 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
10549 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
10550 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
10551 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
10552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10553 }
10554 }
10555}
10556
10557
10558/** Opcode 0xde 11/0. */
10559FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
10560{
10561 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
10562 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
10563}
10564
10565
10566/** Opcode 0xde 11/0. */
10567FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
10568{
10569 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
10570 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
10571}
10572
10573
10574/** Opcode 0xde 0xd9. */
10575FNIEMOP_DEF(iemOp_fcompp)
10576{
10577 IEMOP_MNEMONIC(fcompp, "fcompp");
10578 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
10579}
10580
10581
10582/** Opcode 0xde 11/4. */
10583FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
10584{
10585 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
10586 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
10587}
10588
10589
10590/** Opcode 0xde 11/5. */
10591FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
10592{
10593 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
10594 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
10595}
10596
10597
10598/** Opcode 0xde 11/6. */
10599FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
10600{
10601 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
10602 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
10603}
10604
10605
10606/** Opcode 0xde 11/7. */
10607FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
10608{
10609 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
10610 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
10611}
10612
10613
10614/**
10615 * Common worker for FPU instructions working on ST0 and an m16i, and storing
10616 * the result in ST0.
10617 *
10618 * @param bRm Mod R/M byte.
10619 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10620 */
10621FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
10622{
10623 IEM_MC_BEGIN(3, 3);
10624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10625 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10626 IEM_MC_LOCAL(int16_t, i16Val2);
10627 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10628 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10629 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10630
10631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10633
10634 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10635 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10636 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10637
10638 IEM_MC_PREPARE_FPU_USAGE();
10639 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10640 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
10641 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10642 } IEM_MC_ELSE() {
10643 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10644 } IEM_MC_ENDIF();
10645 IEM_MC_ADVANCE_RIP_AND_FINISH();
10646
10647 IEM_MC_END();
10648}
10649
10650
10651/** Opcode 0xde !11/0. */
10652FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
10653{
10654 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
10655 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
10656}
10657
10658
10659/** Opcode 0xde !11/1. */
10660FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
10661{
10662 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
10663 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
10664}
10665
10666
10667/** Opcode 0xde !11/2. */
10668FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
10669{
10670 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
10671
10672 IEM_MC_BEGIN(3, 3);
10673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10674 IEM_MC_LOCAL(uint16_t, u16Fsw);
10675 IEM_MC_LOCAL(int16_t, i16Val2);
10676 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10677 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10678 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10679
10680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10682
10683 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10684 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10685 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10686
10687 IEM_MC_PREPARE_FPU_USAGE();
10688 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10689 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10690 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10691 } IEM_MC_ELSE() {
10692 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10693 } IEM_MC_ENDIF();
10694 IEM_MC_ADVANCE_RIP_AND_FINISH();
10695
10696 IEM_MC_END();
10697}
10698
10699
10700/** Opcode 0xde !11/3. */
10701FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
10702{
10703 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
10704
10705 IEM_MC_BEGIN(3, 3);
10706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10707 IEM_MC_LOCAL(uint16_t, u16Fsw);
10708 IEM_MC_LOCAL(int16_t, i16Val2);
10709 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10710 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10711 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10712
10713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10715
10716 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10717 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10718 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10719
10720 IEM_MC_PREPARE_FPU_USAGE();
10721 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10722 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10723 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10724 } IEM_MC_ELSE() {
10725 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10726 } IEM_MC_ENDIF();
10727 IEM_MC_ADVANCE_RIP_AND_FINISH();
10728
10729 IEM_MC_END();
10730}
10731
10732
10733/** Opcode 0xde !11/4. */
10734FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
10735{
10736 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
10737 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
10738}
10739
10740
10741/** Opcode 0xde !11/5. */
10742FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
10743{
10744 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
10745 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
10746}
10747
10748
10749/** Opcode 0xde !11/6. */
10750FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
10751{
10752 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
10753 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
10754}
10755
10756
10757/** Opcode 0xde !11/7. */
10758FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
10759{
10760 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
10761 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
10762}
10763
10764
10765/**
10766 * @opcode 0xde
10767 */
10768FNIEMOP_DEF(iemOp_EscF6)
10769{
10770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10771 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
10772 if (IEM_IS_MODRM_REG_MODE(bRm))
10773 {
10774 switch (IEM_GET_MODRM_REG_8(bRm))
10775 {
10776 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
10777 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
10778 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10779 case 3: if (bRm == 0xd9)
10780 return FNIEMOP_CALL(iemOp_fcompp);
10781 IEMOP_RAISE_INVALID_OPCODE_RET();
10782 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
10783 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
10784 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
10785 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
10786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10787 }
10788 }
10789 else
10790 {
10791 switch (IEM_GET_MODRM_REG_8(bRm))
10792 {
10793 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
10794 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
10795 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
10796 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
10797 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
10798 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
10799 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
10800 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
10801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10802 }
10803 }
10804}
10805
10806
10807/** Opcode 0xdf 11/0.
10808 * Undocument instruction, assumed to work like ffree + fincstp. */
10809FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
10810{
10811 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
10812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10813
10814 IEM_MC_BEGIN(0, 0);
10815
10816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10817 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10818
10819 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10820 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10821 IEM_MC_FPU_STACK_INC_TOP();
10822 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10823
10824 IEM_MC_ADVANCE_RIP_AND_FINISH();
10825 IEM_MC_END();
10826}
10827
10828
10829/** Opcode 0xdf 0xe0. */
10830FNIEMOP_DEF(iemOp_fnstsw_ax)
10831{
10832 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
10833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10834
10835 IEM_MC_BEGIN(0, 1);
10836 IEM_MC_LOCAL(uint16_t, u16Tmp);
10837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10838 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10839 IEM_MC_FETCH_FSW(u16Tmp);
10840 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10841 IEM_MC_ADVANCE_RIP_AND_FINISH();
10842 IEM_MC_END();
10843}
10844
10845
10846/** Opcode 0xdf 11/5. */
10847FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
10848{
10849 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
10850 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10851 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10852 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10853}
10854
10855
10856/** Opcode 0xdf 11/6. */
10857FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
10858{
10859 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
10860 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10861 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10862 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10863}
10864
10865
10866/** Opcode 0xdf !11/0. */
10867FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
10868{
10869 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
10870
10871 IEM_MC_BEGIN(2, 3);
10872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10873 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10874 IEM_MC_LOCAL(int16_t, i16Val);
10875 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10876 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
10877
10878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10880
10881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10883 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10884
10885 IEM_MC_PREPARE_FPU_USAGE();
10886 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10887 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
10888 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10889 } IEM_MC_ELSE() {
10890 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10891 } IEM_MC_ENDIF();
10892 IEM_MC_ADVANCE_RIP_AND_FINISH();
10893
10894 IEM_MC_END();
10895}
10896
10897
10898/** Opcode 0xdf !11/1. */
10899FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10900{
10901 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10902 IEM_MC_BEGIN(3, 2);
10903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10904 IEM_MC_LOCAL(uint16_t, u16Fsw);
10905 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10906 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10907 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10908
10909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10912 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10913
10914 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10915 IEM_MC_PREPARE_FPU_USAGE();
10916 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10917 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10918 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10919 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10920 } IEM_MC_ELSE() {
10921 IEM_MC_IF_FCW_IM() {
10922 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10923 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10924 } IEM_MC_ENDIF();
10925 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10926 } IEM_MC_ENDIF();
10927 IEM_MC_ADVANCE_RIP_AND_FINISH();
10928
10929 IEM_MC_END();
10930}
10931
10932
10933/** Opcode 0xdf !11/2. */
10934FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10935{
10936 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10937 IEM_MC_BEGIN(3, 2);
10938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10939 IEM_MC_LOCAL(uint16_t, u16Fsw);
10940 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10941 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10942 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10943
10944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10946 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10947 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10948
10949 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10950 IEM_MC_PREPARE_FPU_USAGE();
10951 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10952 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10953 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10954 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10955 } IEM_MC_ELSE() {
10956 IEM_MC_IF_FCW_IM() {
10957 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10958 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10959 } IEM_MC_ENDIF();
10960 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10961 } IEM_MC_ENDIF();
10962 IEM_MC_ADVANCE_RIP_AND_FINISH();
10963
10964 IEM_MC_END();
10965}
10966
10967
10968/** Opcode 0xdf !11/3. */
10969FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10970{
10971 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10972 IEM_MC_BEGIN(3, 2);
10973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10974 IEM_MC_LOCAL(uint16_t, u16Fsw);
10975 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10976 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10977 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10978
10979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10981 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10982 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10983
10984 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10985 IEM_MC_PREPARE_FPU_USAGE();
10986 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10987 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10988 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10989 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10990 } IEM_MC_ELSE() {
10991 IEM_MC_IF_FCW_IM() {
10992 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10993 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10994 } IEM_MC_ENDIF();
10995 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10996 } IEM_MC_ENDIF();
10997 IEM_MC_ADVANCE_RIP_AND_FINISH();
10998
10999 IEM_MC_END();
11000}
11001
11002
11003/** Opcode 0xdf !11/4. */
11004FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11005{
11006 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11007
11008 IEM_MC_BEGIN(2, 3);
11009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11010 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11011 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11012 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11013 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11014
11015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11017
11018 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11019 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11020 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11021
11022 IEM_MC_PREPARE_FPU_USAGE();
11023 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11024 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11025 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11026 } IEM_MC_ELSE() {
11027 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11028 } IEM_MC_ENDIF();
11029 IEM_MC_ADVANCE_RIP_AND_FINISH();
11030
11031 IEM_MC_END();
11032}
11033
11034
11035/** Opcode 0xdf !11/5. */
11036FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11037{
11038 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11039
11040 IEM_MC_BEGIN(2, 3);
11041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11042 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11043 IEM_MC_LOCAL(int64_t, i64Val);
11044 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11045 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11046
11047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11049
11050 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11051 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11052 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11053
11054 IEM_MC_PREPARE_FPU_USAGE();
11055 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11056 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11057 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11058 } IEM_MC_ELSE() {
11059 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11060 } IEM_MC_ENDIF();
11061 IEM_MC_ADVANCE_RIP_AND_FINISH();
11062
11063 IEM_MC_END();
11064}
11065
11066
11067/** Opcode 0xdf !11/6. */
11068FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11069{
11070 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11071 IEM_MC_BEGIN(3, 2);
11072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11073 IEM_MC_LOCAL(uint16_t, u16Fsw);
11074 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11075 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11076 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11077
11078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11081 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11082
11083 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11084 IEM_MC_PREPARE_FPU_USAGE();
11085 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11086 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11087 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11088 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11089 } IEM_MC_ELSE() {
11090 IEM_MC_IF_FCW_IM() {
11091 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11092 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11093 } IEM_MC_ENDIF();
11094 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11095 } IEM_MC_ENDIF();
11096 IEM_MC_ADVANCE_RIP_AND_FINISH();
11097
11098 IEM_MC_END();
11099}
11100
11101
11102/** Opcode 0xdf !11/7. */
11103FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11104{
11105 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11106 IEM_MC_BEGIN(3, 2);
11107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11108 IEM_MC_LOCAL(uint16_t, u16Fsw);
11109 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11110 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11111 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11112
11113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11115 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11116 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11117
11118 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11119 IEM_MC_PREPARE_FPU_USAGE();
11120 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11121 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11122 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11123 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11124 } IEM_MC_ELSE() {
11125 IEM_MC_IF_FCW_IM() {
11126 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11127 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11128 } IEM_MC_ENDIF();
11129 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11130 } IEM_MC_ENDIF();
11131 IEM_MC_ADVANCE_RIP_AND_FINISH();
11132
11133 IEM_MC_END();
11134}
11135
11136
11137/**
11138 * @opcode 0xdf
11139 */
11140FNIEMOP_DEF(iemOp_EscF7)
11141{
11142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11143 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11144 if (IEM_IS_MODRM_REG_MODE(bRm))
11145 {
11146 switch (IEM_GET_MODRM_REG_8(bRm))
11147 {
11148 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11149 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11150 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11151 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11152 case 4: if (bRm == 0xe0)
11153 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11154 IEMOP_RAISE_INVALID_OPCODE_RET();
11155 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11156 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11157 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11159 }
11160 }
11161 else
11162 {
11163 switch (IEM_GET_MODRM_REG_8(bRm))
11164 {
11165 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11166 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11167 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11168 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11169 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11170 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11171 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11172 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11174 }
11175 }
11176}
11177
11178
11179/**
11180 * @opcode 0xe0
11181 */
11182FNIEMOP_DEF(iemOp_loopne_Jb)
11183{
11184 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11185 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11187 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11188
11189 switch (pVCpu->iem.s.enmEffAddrMode)
11190 {
11191 case IEMMODE_16BIT:
11192 IEM_MC_BEGIN(0,0);
11193 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11194 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11195 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11196 } IEM_MC_ELSE() {
11197 IEM_MC_ADVANCE_RIP_AND_FINISH();
11198 } IEM_MC_ENDIF();
11199 IEM_MC_END();
11200 break;
11201
11202 case IEMMODE_32BIT:
11203 IEM_MC_BEGIN(0,0);
11204 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11205 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11206 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11207 } IEM_MC_ELSE() {
11208 IEM_MC_ADVANCE_RIP_AND_FINISH();
11209 } IEM_MC_ENDIF();
11210 IEM_MC_END();
11211 break;
11212
11213 case IEMMODE_64BIT:
11214 IEM_MC_BEGIN(0,0);
11215 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11216 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11217 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11218 } IEM_MC_ELSE() {
11219 IEM_MC_ADVANCE_RIP_AND_FINISH();
11220 } IEM_MC_ENDIF();
11221 IEM_MC_END();
11222 break;
11223
11224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11225 }
11226}
11227
11228
11229/**
11230 * @opcode 0xe1
11231 */
11232FNIEMOP_DEF(iemOp_loope_Jb)
11233{
11234 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11235 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11237 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11238
11239 switch (pVCpu->iem.s.enmEffAddrMode)
11240 {
11241 case IEMMODE_16BIT:
11242 IEM_MC_BEGIN(0,0);
11243 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11244 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11245 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11246 } IEM_MC_ELSE() {
11247 IEM_MC_ADVANCE_RIP_AND_FINISH();
11248 } IEM_MC_ENDIF();
11249 IEM_MC_END();
11250 break;
11251
11252 case IEMMODE_32BIT:
11253 IEM_MC_BEGIN(0,0);
11254 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11255 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11256 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11257 } IEM_MC_ELSE() {
11258 IEM_MC_ADVANCE_RIP_AND_FINISH();
11259 } IEM_MC_ENDIF();
11260 IEM_MC_END();
11261 break;
11262
11263 case IEMMODE_64BIT:
11264 IEM_MC_BEGIN(0,0);
11265 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11266 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11267 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11268 } IEM_MC_ELSE() {
11269 IEM_MC_ADVANCE_RIP_AND_FINISH();
11270 } IEM_MC_ENDIF();
11271 IEM_MC_END();
11272 break;
11273
11274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11275 }
11276}
11277
11278
11279/**
11280 * @opcode 0xe2
11281 */
11282FNIEMOP_DEF(iemOp_loop_Jb)
11283{
11284 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11285 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11288
11289 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11290 * using the 32-bit operand size override. How can that be restarted? See
11291 * weird pseudo code in intel manual. */
11292
11293 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11294 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11295 * the loop causes guest crashes, but when logging it's nice to skip a few million
11296 * lines of useless output. */
11297#if defined(LOG_ENABLED)
11298 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11299 switch (pVCpu->iem.s.enmEffAddrMode)
11300 {
11301 case IEMMODE_16BIT:
11302 IEM_MC_BEGIN(0,0);
11303 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11304 IEM_MC_ADVANCE_RIP_AND_FINISH();
11305 IEM_MC_END();
11306 break;
11307
11308 case IEMMODE_32BIT:
11309 IEM_MC_BEGIN(0,0);
11310 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11311 IEM_MC_ADVANCE_RIP_AND_FINISH();
11312 IEM_MC_END();
11313 break;
11314
11315 case IEMMODE_64BIT:
11316 IEM_MC_BEGIN(0,0);
11317 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11318 IEM_MC_ADVANCE_RIP_AND_FINISH();
11319 IEM_MC_END();
11320 break;
11321
11322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11323 }
11324#endif
11325
11326 switch (pVCpu->iem.s.enmEffAddrMode)
11327 {
11328 case IEMMODE_16BIT:
11329 IEM_MC_BEGIN(0,0);
11330
11331 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11332 IEM_MC_IF_CX_IS_NZ() {
11333 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11334 } IEM_MC_ELSE() {
11335 IEM_MC_ADVANCE_RIP_AND_FINISH();
11336 } IEM_MC_ENDIF();
11337 IEM_MC_END();
11338 break;
11339
11340 case IEMMODE_32BIT:
11341 IEM_MC_BEGIN(0,0);
11342 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11343 IEM_MC_IF_ECX_IS_NZ() {
11344 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11345 } IEM_MC_ELSE() {
11346 IEM_MC_ADVANCE_RIP_AND_FINISH();
11347 } IEM_MC_ENDIF();
11348 IEM_MC_END();
11349 break;
11350
11351 case IEMMODE_64BIT:
11352 IEM_MC_BEGIN(0,0);
11353 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11354 IEM_MC_IF_RCX_IS_NZ() {
11355 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11356 } IEM_MC_ELSE() {
11357 IEM_MC_ADVANCE_RIP_AND_FINISH();
11358 } IEM_MC_ENDIF();
11359 IEM_MC_END();
11360 break;
11361
11362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11363 }
11364}
11365
11366
11367/**
11368 * @opcode 0xe3
11369 */
11370FNIEMOP_DEF(iemOp_jecxz_Jb)
11371{
11372 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
11373 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11375 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11376
11377 switch (pVCpu->iem.s.enmEffAddrMode)
11378 {
11379 case IEMMODE_16BIT:
11380 IEM_MC_BEGIN(0,0);
11381 IEM_MC_IF_CX_IS_NZ() {
11382 IEM_MC_ADVANCE_RIP_AND_FINISH();
11383 } IEM_MC_ELSE() {
11384 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11385 } IEM_MC_ENDIF();
11386 IEM_MC_END();
11387 break;
11388
11389 case IEMMODE_32BIT:
11390 IEM_MC_BEGIN(0,0);
11391 IEM_MC_IF_ECX_IS_NZ() {
11392 IEM_MC_ADVANCE_RIP_AND_FINISH();
11393 } IEM_MC_ELSE() {
11394 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11395 } IEM_MC_ENDIF();
11396 IEM_MC_END();
11397 break;
11398
11399 case IEMMODE_64BIT:
11400 IEM_MC_BEGIN(0,0);
11401 IEM_MC_IF_RCX_IS_NZ() {
11402 IEM_MC_ADVANCE_RIP_AND_FINISH();
11403 } IEM_MC_ELSE() {
11404 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11405 } IEM_MC_ENDIF();
11406 IEM_MC_END();
11407 break;
11408
11409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11410 }
11411}
11412
11413
11414/** Opcode 0xe4 */
11415FNIEMOP_DEF(iemOp_in_AL_Ib)
11416{
11417 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
11418 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11420 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11421}
11422
11423
11424/** Opcode 0xe5 */
11425FNIEMOP_DEF(iemOp_in_eAX_Ib)
11426{
11427 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
11428 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11430 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11431 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11432}
11433
11434
11435/** Opcode 0xe6 */
11436FNIEMOP_DEF(iemOp_out_Ib_AL)
11437{
11438 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
11439 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11441 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11442}
11443
11444
11445/** Opcode 0xe7 */
11446FNIEMOP_DEF(iemOp_out_Ib_eAX)
11447{
11448 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
11449 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11451 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11452 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11453}
11454
11455
11456/**
11457 * @opcode 0xe8
11458 */
11459FNIEMOP_DEF(iemOp_call_Jv)
11460{
11461 IEMOP_MNEMONIC(call_Jv, "call Jv");
11462 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11463 switch (pVCpu->iem.s.enmEffOpSize)
11464 {
11465 case IEMMODE_16BIT:
11466 {
11467 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11468 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_call_rel_16, (int16_t)u16Imm);
11469 }
11470
11471 case IEMMODE_32BIT:
11472 {
11473 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11474 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_call_rel_32, (int32_t)u32Imm);
11475 }
11476
11477 case IEMMODE_64BIT:
11478 {
11479 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11480 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_call_rel_64, u64Imm);
11481 }
11482
11483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11484 }
11485}
11486
11487
11488/**
11489 * @opcode 0xe9
11490 */
11491FNIEMOP_DEF(iemOp_jmp_Jv)
11492{
11493 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
11494 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11495 switch (pVCpu->iem.s.enmEffOpSize)
11496 {
11497 case IEMMODE_16BIT:
11498 {
11499 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
11500 IEM_MC_BEGIN(0, 0);
11501 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
11502 IEM_MC_END();
11503 break;
11504 }
11505
11506 case IEMMODE_64BIT:
11507 case IEMMODE_32BIT:
11508 {
11509 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
11510 IEM_MC_BEGIN(0, 0);
11511 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
11512 IEM_MC_END();
11513 break;
11514 }
11515
11516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11517 }
11518}
11519
11520
11521/**
11522 * @opcode 0xea
11523 */
11524FNIEMOP_DEF(iemOp_jmp_Ap)
11525{
11526 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
11527 IEMOP_HLP_NO_64BIT();
11528
11529 /* Decode the far pointer address and pass it on to the far call C implementation. */
11530 uint32_t off32Seg;
11531 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11532 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
11533 else
11534 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
11535 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
11536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11537 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
11538 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
11539}
11540
11541
11542/**
11543 * @opcode 0xeb
11544 */
11545FNIEMOP_DEF(iemOp_jmp_Jb)
11546{
11547 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
11548 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11550 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11551
11552 IEM_MC_BEGIN(0, 0);
11553 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11554 IEM_MC_END();
11555}
11556
11557
11558/** Opcode 0xec */
11559FNIEMOP_DEF(iemOp_in_AL_DX)
11560{
11561 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
11562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11563 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
11564}
11565
11566
11567/** Opcode 0xed */
11568FNIEMOP_DEF(iemOp_in_eAX_DX)
11569{
11570 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
11571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11572 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
11573 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11574 pVCpu->iem.s.enmEffAddrMode);
11575}
11576
11577
11578/** Opcode 0xee */
11579FNIEMOP_DEF(iemOp_out_DX_AL)
11580{
11581 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
11582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11583 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
11584}
11585
11586
11587/** Opcode 0xef */
11588FNIEMOP_DEF(iemOp_out_DX_eAX)
11589{
11590 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
11591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11592 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
11593 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11594 pVCpu->iem.s.enmEffAddrMode);
11595}
11596
11597
11598/**
11599 * @opcode 0xf0
11600 */
11601FNIEMOP_DEF(iemOp_lock)
11602{
11603 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
11604 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11605 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
11606
11607 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11608 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11609}
11610
11611
11612/**
11613 * @opcode 0xf1
11614 */
11615FNIEMOP_DEF(iemOp_int1)
11616{
11617 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
11618 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
11619 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
11620 * LOADALL memo. Needs some testing. */
11621 IEMOP_HLP_MIN_386();
11622 /** @todo testcase! */
11623 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
11624 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
11625}
11626
11627
11628/**
11629 * @opcode 0xf2
11630 */
11631FNIEMOP_DEF(iemOp_repne)
11632{
11633 /* This overrides any previous REPE prefix. */
11634 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
11635 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
11636 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
11637
11638 /* For the 4 entry opcode tables, REPNZ overrides any previous
11639 REPZ and operand size prefixes. */
11640 pVCpu->iem.s.idxPrefix = 3;
11641
11642 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11643 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11644}
11645
11646
11647/**
11648 * @opcode 0xf3
11649 */
11650FNIEMOP_DEF(iemOp_repe)
11651{
11652 /* This overrides any previous REPNE prefix. */
11653 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
11654 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
11655 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
11656
11657 /* For the 4 entry opcode tables, REPNZ overrides any previous
11658 REPNZ and operand size prefixes. */
11659 pVCpu->iem.s.idxPrefix = 2;
11660
11661 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11662 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11663}
11664
11665
11666/**
11667 * @opcode 0xf4
11668 */
11669FNIEMOP_DEF(iemOp_hlt)
11670{
11671 IEMOP_MNEMONIC(hlt, "hlt");
11672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11673 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
11674}
11675
11676
11677/**
11678 * @opcode 0xf5
11679 */
11680FNIEMOP_DEF(iemOp_cmc)
11681{
11682 IEMOP_MNEMONIC(cmc, "cmc");
11683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11684 IEM_MC_BEGIN(0, 0);
11685 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
11686 IEM_MC_ADVANCE_RIP_AND_FINISH();
11687 IEM_MC_END();
11688}
11689
11690
11691/**
11692 * Body for of 'inc/dec/not/neg Eb'.
11693 */
11694#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
11695 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
11696 { \
11697 /* register access */ \
11698 IEMOP_HLP_DONE_DECODING(); \
11699 IEM_MC_BEGIN(2, 0); \
11700 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11701 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11702 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
11703 IEM_MC_REF_EFLAGS(pEFlags); \
11704 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11705 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11706 IEM_MC_END(); \
11707 } \
11708 else \
11709 { \
11710 /* memory access. */ \
11711 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11712 { \
11713 IEM_MC_BEGIN(2, 2); \
11714 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11715 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11717 \
11718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11719 IEMOP_HLP_DONE_DECODING(); \
11720 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11721 IEM_MC_FETCH_EFLAGS(EFlags); \
11722 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11723 \
11724 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11725 IEM_MC_COMMIT_EFLAGS(EFlags); \
11726 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11727 IEM_MC_END(); \
11728 } \
11729 else \
11730 { \
11731 IEM_MC_BEGIN(2, 2); \
11732 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11733 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11735 \
11736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11737 IEMOP_HLP_DONE_DECODING(); \
11738 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11739 IEM_MC_FETCH_EFLAGS(EFlags); \
11740 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
11741 \
11742 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11743 IEM_MC_COMMIT_EFLAGS(EFlags); \
11744 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11745 IEM_MC_END(); \
11746 } \
11747 } \
11748 (void)0
11749
11750
11751/**
11752 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
11753 */
11754#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
11755 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11756 { \
11757 /* \
11758 * Register target \
11759 */ \
11760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11761 switch (pVCpu->iem.s.enmEffOpSize) \
11762 { \
11763 case IEMMODE_16BIT: \
11764 IEM_MC_BEGIN(2, 0); \
11765 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11766 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11767 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11768 IEM_MC_REF_EFLAGS(pEFlags); \
11769 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11770 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11771 IEM_MC_END(); \
11772 break; \
11773 \
11774 case IEMMODE_32BIT: \
11775 IEM_MC_BEGIN(2, 0); \
11776 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11777 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11778 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11779 IEM_MC_REF_EFLAGS(pEFlags); \
11780 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11781 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
11782 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11783 IEM_MC_END(); \
11784 break; \
11785 \
11786 case IEMMODE_64BIT: \
11787 IEM_MC_BEGIN(2, 0); \
11788 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11789 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11790 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11791 IEM_MC_REF_EFLAGS(pEFlags); \
11792 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11793 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11794 IEM_MC_END(); \
11795 break; \
11796 \
11797 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11798 } \
11799 } \
11800 else \
11801 { \
11802 /* \
11803 * Memory target. \
11804 */ \
11805 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11806 { \
11807 switch (pVCpu->iem.s.enmEffOpSize) \
11808 { \
11809 case IEMMODE_16BIT: \
11810 IEM_MC_BEGIN(2, 2); \
11811 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11812 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11814 \
11815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11816 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11817 IEM_MC_FETCH_EFLAGS(EFlags); \
11818 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11819 \
11820 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11821 IEM_MC_COMMIT_EFLAGS(EFlags); \
11822 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11823 IEM_MC_END(); \
11824 break; \
11825 \
11826 case IEMMODE_32BIT: \
11827 IEM_MC_BEGIN(2, 2); \
11828 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11829 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11831 \
11832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11833 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11834 IEM_MC_FETCH_EFLAGS(EFlags); \
11835 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11836 \
11837 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11838 IEM_MC_COMMIT_EFLAGS(EFlags); \
11839 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11840 IEM_MC_END(); \
11841 break; \
11842 \
11843 case IEMMODE_64BIT: \
11844 IEM_MC_BEGIN(2, 2); \
11845 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11846 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11848 \
11849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11850 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11851 IEM_MC_FETCH_EFLAGS(EFlags); \
11852 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11853 \
11854 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11855 IEM_MC_COMMIT_EFLAGS(EFlags); \
11856 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11857 IEM_MC_END(); \
11858 break; \
11859 \
11860 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11861 } \
11862 } \
11863 else \
11864 { \
11865 (void)0
11866
11867#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
11868 switch (pVCpu->iem.s.enmEffOpSize) \
11869 { \
11870 case IEMMODE_16BIT: \
11871 IEM_MC_BEGIN(2, 2); \
11872 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11873 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11875 \
11876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11877 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11878 IEM_MC_FETCH_EFLAGS(EFlags); \
11879 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
11880 \
11881 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11882 IEM_MC_COMMIT_EFLAGS(EFlags); \
11883 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11884 IEM_MC_END(); \
11885 break; \
11886 \
11887 case IEMMODE_32BIT: \
11888 IEM_MC_BEGIN(2, 2); \
11889 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11892 \
11893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11894 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11895 IEM_MC_FETCH_EFLAGS(EFlags); \
11896 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
11897 \
11898 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11899 IEM_MC_COMMIT_EFLAGS(EFlags); \
11900 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11901 IEM_MC_END(); \
11902 break; \
11903 \
11904 case IEMMODE_64BIT: \
11905 IEM_MC_BEGIN(2, 2); \
11906 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11907 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11909 \
11910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11911 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11912 IEM_MC_FETCH_EFLAGS(EFlags); \
11913 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
11914 \
11915 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11916 IEM_MC_COMMIT_EFLAGS(EFlags); \
11917 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11918 IEM_MC_END(); \
11919 break; \
11920 \
11921 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11922 } \
11923 } \
11924 } \
11925 (void)0
11926
11927
11928/**
11929 * @opmaps grp3_f6
11930 * @opcode /0
11931 * @todo also /1
11932 */
11933FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
11934{
11935 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
11936 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11937
11938 if (IEM_IS_MODRM_REG_MODE(bRm))
11939 {
11940 /* register access */
11941 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11943
11944 IEM_MC_BEGIN(3, 0);
11945 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11946 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
11947 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11948 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11949 IEM_MC_REF_EFLAGS(pEFlags);
11950 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11951 IEM_MC_ADVANCE_RIP_AND_FINISH();
11952 IEM_MC_END();
11953 }
11954 else
11955 {
11956 /* memory access. */
11957 IEM_MC_BEGIN(3, 2);
11958 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11959 IEM_MC_ARG(uint8_t, u8Src, 1);
11960 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11962
11963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11964 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11965 IEM_MC_ASSIGN(u8Src, u8Imm);
11966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11967 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11968 IEM_MC_FETCH_EFLAGS(EFlags);
11969 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11970
11971 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
11972 IEM_MC_COMMIT_EFLAGS(EFlags);
11973 IEM_MC_ADVANCE_RIP_AND_FINISH();
11974 IEM_MC_END();
11975 }
11976}
11977
11978
11979/** Opcode 0xf6 /4, /5, /6 and /7. */
11980FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11981{
11982 if (IEM_IS_MODRM_REG_MODE(bRm))
11983 {
11984 /* register access */
11985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11986 IEM_MC_BEGIN(3, 1);
11987 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11988 IEM_MC_ARG(uint8_t, u8Value, 1);
11989 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11990 IEM_MC_LOCAL(int32_t, rc);
11991
11992 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11993 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11994 IEM_MC_REF_EFLAGS(pEFlags);
11995 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11996 IEM_MC_IF_LOCAL_IS_Z(rc) {
11997 IEM_MC_ADVANCE_RIP_AND_FINISH();
11998 } IEM_MC_ELSE() {
11999 IEM_MC_RAISE_DIVIDE_ERROR();
12000 } IEM_MC_ENDIF();
12001
12002 IEM_MC_END();
12003 }
12004 else
12005 {
12006 /* memory access. */
12007 IEM_MC_BEGIN(3, 2);
12008 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12009 IEM_MC_ARG(uint8_t, u8Value, 1);
12010 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12012 IEM_MC_LOCAL(int32_t, rc);
12013
12014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12016 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12017 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12018 IEM_MC_REF_EFLAGS(pEFlags);
12019 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12020 IEM_MC_IF_LOCAL_IS_Z(rc) {
12021 IEM_MC_ADVANCE_RIP_AND_FINISH();
12022 } IEM_MC_ELSE() {
12023 IEM_MC_RAISE_DIVIDE_ERROR();
12024 } IEM_MC_ENDIF();
12025
12026 IEM_MC_END();
12027 }
12028}
12029
12030
12031/** Opcode 0xf7 /4, /5, /6 and /7. */
12032FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12033{
12034 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12035
12036 if (IEM_IS_MODRM_REG_MODE(bRm))
12037 {
12038 /* register access */
12039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12040 switch (pVCpu->iem.s.enmEffOpSize)
12041 {
12042 case IEMMODE_16BIT:
12043 {
12044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12045 IEM_MC_BEGIN(4, 1);
12046 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12047 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12048 IEM_MC_ARG(uint16_t, u16Value, 2);
12049 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12050 IEM_MC_LOCAL(int32_t, rc);
12051
12052 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12053 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12054 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12055 IEM_MC_REF_EFLAGS(pEFlags);
12056 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12057 IEM_MC_IF_LOCAL_IS_Z(rc) {
12058 IEM_MC_ADVANCE_RIP_AND_FINISH();
12059 } IEM_MC_ELSE() {
12060 IEM_MC_RAISE_DIVIDE_ERROR();
12061 } IEM_MC_ENDIF();
12062
12063 IEM_MC_END();
12064 break;
12065 }
12066
12067 case IEMMODE_32BIT:
12068 {
12069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12070 IEM_MC_BEGIN(4, 1);
12071 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12072 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12073 IEM_MC_ARG(uint32_t, u32Value, 2);
12074 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12075 IEM_MC_LOCAL(int32_t, rc);
12076
12077 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12078 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12079 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12080 IEM_MC_REF_EFLAGS(pEFlags);
12081 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12082 IEM_MC_IF_LOCAL_IS_Z(rc) {
12083 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12084 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12085 IEM_MC_ADVANCE_RIP_AND_FINISH();
12086 } IEM_MC_ELSE() {
12087 IEM_MC_RAISE_DIVIDE_ERROR();
12088 } IEM_MC_ENDIF();
12089
12090 IEM_MC_END();
12091 break;
12092 }
12093
12094 case IEMMODE_64BIT:
12095 {
12096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12097 IEM_MC_BEGIN(4, 1);
12098 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12099 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12100 IEM_MC_ARG(uint64_t, u64Value, 2);
12101 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12102 IEM_MC_LOCAL(int32_t, rc);
12103
12104 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12105 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12106 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12107 IEM_MC_REF_EFLAGS(pEFlags);
12108 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12109 IEM_MC_IF_LOCAL_IS_Z(rc) {
12110 IEM_MC_ADVANCE_RIP_AND_FINISH();
12111 } IEM_MC_ELSE() {
12112 IEM_MC_RAISE_DIVIDE_ERROR();
12113 } IEM_MC_ENDIF();
12114
12115 IEM_MC_END();
12116 break;
12117 }
12118
12119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12120 }
12121 }
12122 else
12123 {
12124 /* memory access. */
12125 switch (pVCpu->iem.s.enmEffOpSize)
12126 {
12127 case IEMMODE_16BIT:
12128 {
12129 IEM_MC_BEGIN(4, 2);
12130 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12131 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12132 IEM_MC_ARG(uint16_t, u16Value, 2);
12133 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12135 IEM_MC_LOCAL(int32_t, rc);
12136
12137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12139 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12140 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12141 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12142 IEM_MC_REF_EFLAGS(pEFlags);
12143 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12144 IEM_MC_IF_LOCAL_IS_Z(rc) {
12145 IEM_MC_ADVANCE_RIP_AND_FINISH();
12146 } IEM_MC_ELSE() {
12147 IEM_MC_RAISE_DIVIDE_ERROR();
12148 } IEM_MC_ENDIF();
12149
12150 IEM_MC_END();
12151 break;
12152 }
12153
12154 case IEMMODE_32BIT:
12155 {
12156 IEM_MC_BEGIN(4, 2);
12157 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12158 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12159 IEM_MC_ARG(uint32_t, u32Value, 2);
12160 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12162 IEM_MC_LOCAL(int32_t, rc);
12163
12164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12166 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12167 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12168 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12169 IEM_MC_REF_EFLAGS(pEFlags);
12170 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12171 IEM_MC_IF_LOCAL_IS_Z(rc) {
12172 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12173 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12174 IEM_MC_ADVANCE_RIP_AND_FINISH();
12175 } IEM_MC_ELSE() {
12176 IEM_MC_RAISE_DIVIDE_ERROR();
12177 } IEM_MC_ENDIF();
12178
12179 IEM_MC_END();
12180 break;
12181 }
12182
12183 case IEMMODE_64BIT:
12184 {
12185 IEM_MC_BEGIN(4, 2);
12186 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12187 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12188 IEM_MC_ARG(uint64_t, u64Value, 2);
12189 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12191 IEM_MC_LOCAL(int32_t, rc);
12192
12193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12195 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12196 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12197 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12198 IEM_MC_REF_EFLAGS(pEFlags);
12199 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12200 IEM_MC_IF_LOCAL_IS_Z(rc) {
12201 IEM_MC_ADVANCE_RIP_AND_FINISH();
12202 } IEM_MC_ELSE() {
12203 IEM_MC_RAISE_DIVIDE_ERROR();
12204 } IEM_MC_ENDIF();
12205
12206 IEM_MC_END();
12207 break;
12208 }
12209
12210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12211 }
12212 }
12213}
12214
12215
12216/**
12217 * @opmaps grp3_f6
12218 * @opcode /2
12219 */
12220FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12221{
12222 IEMOP_MNEMONIC(not_Eb, "not Eb");
12223 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12224}
12225
12226
12227/**
12228 * @opmaps grp3_f6
12229 * @opcode /3
12230 */
12231FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12232{
12233 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12234 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12235}
12236
12237
12238/**
12239 * @opcode 0xf6
12240 */
12241FNIEMOP_DEF(iemOp_Grp3_Eb)
12242{
12243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12244 switch (IEM_GET_MODRM_REG_8(bRm))
12245 {
12246 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12247 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12248 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12249 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12250 case 4:
12251 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12252 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12253 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12254 case 5:
12255 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12256 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12257 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12258 case 6:
12259 IEMOP_MNEMONIC(div_Eb, "div Eb");
12260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12261 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12262 case 7:
12263 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12264 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12265 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12267 }
12268}
12269
12270
12271/** Opcode 0xf7 /0. */
12272FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12273{
12274 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12276
12277 if (IEM_IS_MODRM_REG_MODE(bRm))
12278 {
12279 /* register access */
12280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12281 switch (pVCpu->iem.s.enmEffOpSize)
12282 {
12283 case IEMMODE_16BIT:
12284 {
12285 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12286 IEM_MC_BEGIN(3, 0);
12287 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12288 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12289 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12290 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12291 IEM_MC_REF_EFLAGS(pEFlags);
12292 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12293 IEM_MC_ADVANCE_RIP_AND_FINISH();
12294 IEM_MC_END();
12295 break;
12296 }
12297
12298 case IEMMODE_32BIT:
12299 {
12300 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12301 IEM_MC_BEGIN(3, 0);
12302 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12303 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12304 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12305 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12306 IEM_MC_REF_EFLAGS(pEFlags);
12307 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12308 /* No clearing the high dword here - test doesn't write back the result. */
12309 IEM_MC_ADVANCE_RIP_AND_FINISH();
12310 IEM_MC_END();
12311 break;
12312 }
12313
12314 case IEMMODE_64BIT:
12315 {
12316 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12317 IEM_MC_BEGIN(3, 0);
12318 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12319 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12320 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12321 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12322 IEM_MC_REF_EFLAGS(pEFlags);
12323 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12324 IEM_MC_ADVANCE_RIP_AND_FINISH();
12325 IEM_MC_END();
12326 break;
12327 }
12328
12329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12330 }
12331 }
12332 else
12333 {
12334 /* memory access. */
12335 switch (pVCpu->iem.s.enmEffOpSize)
12336 {
12337 case IEMMODE_16BIT:
12338 {
12339 IEM_MC_BEGIN(3, 2);
12340 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12341 IEM_MC_ARG(uint16_t, u16Src, 1);
12342 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12344
12345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12346 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12347 IEM_MC_ASSIGN(u16Src, u16Imm);
12348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12349 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12350 IEM_MC_FETCH_EFLAGS(EFlags);
12351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12352
12353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
12354 IEM_MC_COMMIT_EFLAGS(EFlags);
12355 IEM_MC_ADVANCE_RIP_AND_FINISH();
12356 IEM_MC_END();
12357 break;
12358 }
12359
12360 case IEMMODE_32BIT:
12361 {
12362 IEM_MC_BEGIN(3, 2);
12363 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12364 IEM_MC_ARG(uint32_t, u32Src, 1);
12365 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12367
12368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12369 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12370 IEM_MC_ASSIGN(u32Src, u32Imm);
12371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12372 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12373 IEM_MC_FETCH_EFLAGS(EFlags);
12374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12375
12376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
12377 IEM_MC_COMMIT_EFLAGS(EFlags);
12378 IEM_MC_ADVANCE_RIP_AND_FINISH();
12379 IEM_MC_END();
12380 break;
12381 }
12382
12383 case IEMMODE_64BIT:
12384 {
12385 IEM_MC_BEGIN(3, 2);
12386 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12387 IEM_MC_ARG(uint64_t, u64Src, 1);
12388 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12390
12391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12392 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12393 IEM_MC_ASSIGN(u64Src, u64Imm);
12394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12395 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12396 IEM_MC_FETCH_EFLAGS(EFlags);
12397 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12398
12399 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
12400 IEM_MC_COMMIT_EFLAGS(EFlags);
12401 IEM_MC_ADVANCE_RIP_AND_FINISH();
12402 IEM_MC_END();
12403 break;
12404 }
12405
12406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12407 }
12408 }
12409}
12410
12411
12412/** Opcode 0xf7 /2. */
12413FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
12414{
12415 IEMOP_MNEMONIC(not_Ev, "not Ev");
12416 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
12417 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
12418}
12419
12420
12421/** Opcode 0xf7 /3. */
12422FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
12423{
12424 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
12425 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
12426 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
12427}
12428
12429
12430/**
12431 * @opcode 0xf7
12432 */
12433FNIEMOP_DEF(iemOp_Grp3_Ev)
12434{
12435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12436 switch (IEM_GET_MODRM_REG_8(bRm))
12437 {
12438 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12439 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12440 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
12441 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
12442 case 4:
12443 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
12444 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12445 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
12446 case 5:
12447 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
12448 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12449 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
12450 case 6:
12451 IEMOP_MNEMONIC(div_Ev, "div Ev");
12452 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12453 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
12454 case 7:
12455 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
12456 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12457 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
12458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12459 }
12460}
12461
12462
12463/**
12464 * @opcode 0xf8
12465 */
12466FNIEMOP_DEF(iemOp_clc)
12467{
12468 IEMOP_MNEMONIC(clc, "clc");
12469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12470 IEM_MC_BEGIN(0, 0);
12471 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
12472 IEM_MC_ADVANCE_RIP_AND_FINISH();
12473 IEM_MC_END();
12474}
12475
12476
12477/**
12478 * @opcode 0xf9
12479 */
12480FNIEMOP_DEF(iemOp_stc)
12481{
12482 IEMOP_MNEMONIC(stc, "stc");
12483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12484 IEM_MC_BEGIN(0, 0);
12485 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
12486 IEM_MC_ADVANCE_RIP_AND_FINISH();
12487 IEM_MC_END();
12488}
12489
12490
12491/**
12492 * @opcode 0xfa
12493 */
12494FNIEMOP_DEF(iemOp_cli)
12495{
12496 IEMOP_MNEMONIC(cli, "cli");
12497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12498 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_cli);
12499}
12500
12501
12502FNIEMOP_DEF(iemOp_sti)
12503{
12504 IEMOP_MNEMONIC(sti, "sti");
12505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12506 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
12507}
12508
12509
12510/**
12511 * @opcode 0xfc
12512 */
12513FNIEMOP_DEF(iemOp_cld)
12514{
12515 IEMOP_MNEMONIC(cld, "cld");
12516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12517 IEM_MC_BEGIN(0, 0);
12518 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
12519 IEM_MC_ADVANCE_RIP_AND_FINISH();
12520 IEM_MC_END();
12521}
12522
12523
12524/**
12525 * @opcode 0xfd
12526 */
12527FNIEMOP_DEF(iemOp_std)
12528{
12529 IEMOP_MNEMONIC(std, "std");
12530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12531 IEM_MC_BEGIN(0, 0);
12532 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
12533 IEM_MC_ADVANCE_RIP_AND_FINISH();
12534 IEM_MC_END();
12535}
12536
12537
12538/**
12539 * @opmaps grp4
12540 * @opcode /0
12541 */
12542FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
12543{
12544 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
12545 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
12546}
12547
12548
12549/**
12550 * @opmaps grp4
12551 * @opcode /1
12552 */
12553FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
12554{
12555 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
12556 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
12557}
12558
12559
12560/**
12561 * @opcode 0xfe
12562 */
12563FNIEMOP_DEF(iemOp_Grp4)
12564{
12565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12566 switch (IEM_GET_MODRM_REG_8(bRm))
12567 {
12568 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
12569 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
12570 default:
12571 /** @todo is the eff-addr decoded? */
12572 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
12573 IEMOP_RAISE_INVALID_OPCODE_RET();
12574 }
12575}
12576
12577/** Opcode 0xff /0. */
12578FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
12579{
12580 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
12581 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
12582 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
12583}
12584
12585
12586/** Opcode 0xff /1. */
12587FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
12588{
12589 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
12590 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
12591 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
12592}
12593
12594
12595/**
12596 * Opcode 0xff /2.
12597 * @param bRm The RM byte.
12598 */
12599FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
12600{
12601 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
12602 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12603
12604 if (IEM_IS_MODRM_REG_MODE(bRm))
12605 {
12606 /* The new RIP is taken from a register. */
12607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12608 switch (pVCpu->iem.s.enmEffOpSize)
12609 {
12610 case IEMMODE_16BIT:
12611 IEM_MC_BEGIN(1, 0);
12612 IEM_MC_ARG(uint16_t, u16Target, 0);
12613 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12614 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_16, u16Target);
12615 IEM_MC_END();
12616 break;
12617
12618 case IEMMODE_32BIT:
12619 IEM_MC_BEGIN(1, 0);
12620 IEM_MC_ARG(uint32_t, u32Target, 0);
12621 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12622 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_32, u32Target);
12623 IEM_MC_END();
12624 break;
12625
12626 case IEMMODE_64BIT:
12627 IEM_MC_BEGIN(1, 0);
12628 IEM_MC_ARG(uint64_t, u64Target, 0);
12629 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12630 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_64, u64Target);
12631 IEM_MC_END();
12632 break;
12633
12634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12635 }
12636 }
12637 else
12638 {
12639 /* The new RIP is taken from a register. */
12640 switch (pVCpu->iem.s.enmEffOpSize)
12641 {
12642 case IEMMODE_16BIT:
12643 IEM_MC_BEGIN(1, 1);
12644 IEM_MC_ARG(uint16_t, u16Target, 0);
12645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12648 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12649 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_16, u16Target);
12650 IEM_MC_END();
12651 break;
12652
12653 case IEMMODE_32BIT:
12654 IEM_MC_BEGIN(1, 1);
12655 IEM_MC_ARG(uint32_t, u32Target, 0);
12656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12659 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12660 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_32, u32Target);
12661 IEM_MC_END();
12662 break;
12663
12664 case IEMMODE_64BIT:
12665 IEM_MC_BEGIN(1, 1);
12666 IEM_MC_ARG(uint64_t, u64Target, 0);
12667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12670 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12671 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_64, u64Target);
12672 IEM_MC_END();
12673 break;
12674
12675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12676 }
12677 }
12678}
12679
12680#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
12681 /* Registers? How?? */ \
12682 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
12683 { /* likely */ } \
12684 else \
12685 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
12686 \
12687 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
12688 /** @todo what does VIA do? */ \
12689 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
12690 { /* likely */ } \
12691 else \
12692 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
12693 \
12694 /* Far pointer loaded from memory. */ \
12695 switch (pVCpu->iem.s.enmEffOpSize) \
12696 { \
12697 case IEMMODE_16BIT: \
12698 IEM_MC_BEGIN(3, 1); \
12699 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12700 IEM_MC_ARG(uint16_t, offSeg, 1); \
12701 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
12702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12705 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12706 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
12707 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12708 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12709 IEM_MC_END(); \
12710 break; \
12711 \
12712 case IEMMODE_32BIT: \
12713 IEM_MC_BEGIN(3, 1); \
12714 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12715 IEM_MC_ARG(uint32_t, offSeg, 1); \
12716 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
12717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12720 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12721 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
12722 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12723 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12724 IEM_MC_END(); \
12725 break; \
12726 \
12727 case IEMMODE_64BIT: \
12728 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
12729 IEM_MC_BEGIN(3, 1); \
12730 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12731 IEM_MC_ARG(uint64_t, offSeg, 1); \
12732 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
12733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12736 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12737 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
12738 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE /* no gates */, \
12739 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12740 IEM_MC_END(); \
12741 break; \
12742 \
12743 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12744 } do {} while (0)
12745
12746
12747/**
12748 * Opcode 0xff /3.
12749 * @param bRm The RM byte.
12750 */
12751FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
12752{
12753 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
12754 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
12755}
12756
12757
12758/**
12759 * Opcode 0xff /4.
12760 * @param bRm The RM byte.
12761 */
12762FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
12763{
12764 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
12765 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12766
12767 if (IEM_IS_MODRM_REG_MODE(bRm))
12768 {
12769 /* The new RIP is taken from a register. */
12770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12771 switch (pVCpu->iem.s.enmEffOpSize)
12772 {
12773 case IEMMODE_16BIT:
12774 IEM_MC_BEGIN(0, 1);
12775 IEM_MC_LOCAL(uint16_t, u16Target);
12776 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12777 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12778 IEM_MC_END();
12779 break;
12780
12781 case IEMMODE_32BIT:
12782 IEM_MC_BEGIN(0, 1);
12783 IEM_MC_LOCAL(uint32_t, u32Target);
12784 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12785 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12786 IEM_MC_END();
12787 break;
12788
12789 case IEMMODE_64BIT:
12790 IEM_MC_BEGIN(0, 1);
12791 IEM_MC_LOCAL(uint64_t, u64Target);
12792 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12793 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12794 IEM_MC_END();
12795 break;
12796
12797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12798 }
12799 }
12800 else
12801 {
12802 /* The new RIP is taken from a memory location. */
12803 switch (pVCpu->iem.s.enmEffOpSize)
12804 {
12805 case IEMMODE_16BIT:
12806 IEM_MC_BEGIN(0, 2);
12807 IEM_MC_LOCAL(uint16_t, u16Target);
12808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12811 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12812 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12813 IEM_MC_END();
12814 break;
12815
12816 case IEMMODE_32BIT:
12817 IEM_MC_BEGIN(0, 2);
12818 IEM_MC_LOCAL(uint32_t, u32Target);
12819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12822 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12823 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12824 IEM_MC_END();
12825 break;
12826
12827 case IEMMODE_64BIT:
12828 IEM_MC_BEGIN(0, 2);
12829 IEM_MC_LOCAL(uint64_t, u64Target);
12830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12833 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12834 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12835 IEM_MC_END();
12836 break;
12837
12838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12839 }
12840 }
12841}
12842
12843
12844/**
12845 * Opcode 0xff /5.
12846 * @param bRm The RM byte.
12847 */
12848FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
12849{
12850 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
12851 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
12852}
12853
12854
12855/**
12856 * Opcode 0xff /6.
12857 * @param bRm The RM byte.
12858 */
12859FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
12860{
12861 IEMOP_MNEMONIC(push_Ev, "push Ev");
12862
12863 /* Registers are handled by a common worker. */
12864 if (IEM_IS_MODRM_REG_MODE(bRm))
12865 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
12866
12867 /* Memory we do here. */
12868 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12869 switch (pVCpu->iem.s.enmEffOpSize)
12870 {
12871 case IEMMODE_16BIT:
12872 IEM_MC_BEGIN(0, 2);
12873 IEM_MC_LOCAL(uint16_t, u16Src);
12874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12877 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12878 IEM_MC_PUSH_U16(u16Src);
12879 IEM_MC_ADVANCE_RIP_AND_FINISH();
12880 IEM_MC_END();
12881 break;
12882
12883 case IEMMODE_32BIT:
12884 IEM_MC_BEGIN(0, 2);
12885 IEM_MC_LOCAL(uint32_t, u32Src);
12886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12889 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12890 IEM_MC_PUSH_U32(u32Src);
12891 IEM_MC_ADVANCE_RIP_AND_FINISH();
12892 IEM_MC_END();
12893 break;
12894
12895 case IEMMODE_64BIT:
12896 IEM_MC_BEGIN(0, 2);
12897 IEM_MC_LOCAL(uint64_t, u64Src);
12898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12901 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12902 IEM_MC_PUSH_U64(u64Src);
12903 IEM_MC_ADVANCE_RIP_AND_FINISH();
12904 IEM_MC_END();
12905 break;
12906
12907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12908 }
12909}
12910
12911
12912/**
12913 * @opcode 0xff
12914 */
12915FNIEMOP_DEF(iemOp_Grp5)
12916{
12917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12918 switch (IEM_GET_MODRM_REG_8(bRm))
12919 {
12920 case 0:
12921 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
12922 case 1:
12923 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
12924 case 2:
12925 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
12926 case 3:
12927 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
12928 case 4:
12929 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
12930 case 5:
12931 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
12932 case 6:
12933 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
12934 case 7:
12935 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
12936 IEMOP_RAISE_INVALID_OPCODE_RET();
12937 }
12938 AssertFailedReturn(VERR_IEM_IPE_3);
12939}
12940
12941
12942
12943const PFNIEMOP g_apfnOneByteMap[256] =
12944{
12945 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
12946 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
12947 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
12948 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
12949 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
12950 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
12951 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
12952 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
12953 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
12954 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
12955 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
12956 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
12957 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
12958 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
12959 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
12960 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
12961 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
12962 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
12963 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
12964 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
12965 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
12966 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
12967 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
12968 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
12969 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
12970 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
12971 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
12972 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
12973 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
12974 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
12975 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
12976 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
12977 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
12978 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
12979 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
12980 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
12981 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
12982 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
12983 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
12984 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
12985 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
12986 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
12987 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
12988 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
12989 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
12990 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
12991 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
12992 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
12993 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
12994 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
12995 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
12996 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
12997 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
12998 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
12999 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13000 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13001 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13002 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13003 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13004 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13005 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13006 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13007 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13008 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13009};
13010
13011
13012/** @} */
13013
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette