VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 98880

最後變更 在這個檔案從98880是 98880,由 vboxsync 提交於 2 年 前

VMM/IEM: More work on processing MC blocks. bugref:10369

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 396.6 KB
 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 98880 2023-03-09 01:23:15Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.alldomusa.eu.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/* Instruction specification format - work in progress: */
60
61/**
62 * @opcode 0x00
63 * @opmnemonic add
64 * @op1 rm:Eb
65 * @op2 reg:Gb
66 * @opmaps one
67 * @openc ModR/M
68 * @opflmodify cf,pf,af,zf,sf,of
69 * @ophints harmless ignores_op_sizes
70 * @opstats add_Eb_Gb
71 * @opgroup og_gen_arith_bin
72 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
73 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
74 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
75 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
76 */
77FNIEMOP_DEF(iemOp_add_Eb_Gb)
78{
79 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
80 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
81}
82
83
84/**
85 * @opcode 0x01
86 * @opgroup og_gen_arith_bin
87 * @opflmodify cf,pf,af,zf,sf,of
88 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
89 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
90 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
91 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
92 */
93FNIEMOP_DEF(iemOp_add_Ev_Gv)
94{
95 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
96 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
97}
98
99
100/**
101 * @opcode 0x02
102 * @opgroup og_gen_arith_bin
103 * @opflmodify cf,pf,af,zf,sf,of
104 * @opcopytests iemOp_add_Eb_Gb
105 */
106FNIEMOP_DEF(iemOp_add_Gb_Eb)
107{
108 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
109 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
110}
111
112
113/**
114 * @opcode 0x03
115 * @opgroup og_gen_arith_bin
116 * @opflmodify cf,pf,af,zf,sf,of
117 * @opcopytests iemOp_add_Ev_Gv
118 */
119FNIEMOP_DEF(iemOp_add_Gv_Ev)
120{
121 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
122 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
123}
124
125
126/**
127 * @opcode 0x04
128 * @opgroup og_gen_arith_bin
129 * @opflmodify cf,pf,af,zf,sf,of
130 * @opcopytests iemOp_add_Eb_Gb
131 */
132FNIEMOP_DEF(iemOp_add_Al_Ib)
133{
134 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
136}
137
138
139/**
140 * @opcode 0x05
141 * @opgroup og_gen_arith_bin
142 * @opflmodify cf,pf,af,zf,sf,of
143 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
144 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
145 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
146 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
147 */
148FNIEMOP_DEF(iemOp_add_eAX_Iz)
149{
150 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
152}
153
154
155/**
156 * @opcode 0x06
157 * @opgroup og_stack_sreg
158 */
159FNIEMOP_DEF(iemOp_push_ES)
160{
161 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
162 IEMOP_HLP_NO_64BIT();
163 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
164}
165
166
167/**
168 * @opcode 0x07
169 * @opgroup og_stack_sreg
170 */
171FNIEMOP_DEF(iemOp_pop_ES)
172{
173 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
174 IEMOP_HLP_NO_64BIT();
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
176 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
177}
178
179
180/**
181 * @opcode 0x08
182 * @opgroup og_gen_arith_bin
183 * @opflmodify cf,pf,af,zf,sf,of
184 * @opflundef af
185 * @opflclear of,cf
186 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
187 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
188 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
189 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
190 */
191FNIEMOP_DEF(iemOp_or_Eb_Gb)
192{
193 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
196}
197
198
199/*
200 * @opcode 0x09
201 * @opgroup og_gen_arith_bin
202 * @opflmodify cf,pf,af,zf,sf,of
203 * @opflundef af
204 * @opflclear of,cf
205 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
206 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
207 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
208 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
209 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
210 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
211 */
212FNIEMOP_DEF(iemOp_or_Ev_Gv)
213{
214 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
217}
218
219
220/**
221 * @opcode 0x0a
222 * @opgroup og_gen_arith_bin
223 * @opflmodify cf,pf,af,zf,sf,of
224 * @opflundef af
225 * @opflclear of,cf
226 * @opcopytests iemOp_or_Eb_Gb
227 */
228FNIEMOP_DEF(iemOp_or_Gb_Eb)
229{
230 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
233}
234
235
236/**
237 * @opcode 0x0b
238 * @opgroup og_gen_arith_bin
239 * @opflmodify cf,pf,af,zf,sf,of
240 * @opflundef af
241 * @opflclear of,cf
242 * @opcopytests iemOp_or_Ev_Gv
243 */
244FNIEMOP_DEF(iemOp_or_Gv_Ev)
245{
246 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
249}
250
251
252/**
253 * @opcode 0x0c
254 * @opgroup og_gen_arith_bin
255 * @opflmodify cf,pf,af,zf,sf,of
256 * @opflundef af
257 * @opflclear of,cf
258 * @opcopytests iemOp_or_Eb_Gb
259 */
260FNIEMOP_DEF(iemOp_or_Al_Ib)
261{
262 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
264 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
265}
266
267
268/**
269 * @opcode 0x0d
270 * @opgroup og_gen_arith_bin
271 * @opflmodify cf,pf,af,zf,sf,of
272 * @opflundef af
273 * @opflclear of,cf
274 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
275 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
276 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
277 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
278 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
279 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
280 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
281 */
282FNIEMOP_DEF(iemOp_or_eAX_Iz)
283{
284 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
287}
288
289
290/**
291 * @opcode 0x0e
292 * @opgroup og_stack_sreg
293 */
294FNIEMOP_DEF(iemOp_push_CS)
295{
296 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
297 IEMOP_HLP_NO_64BIT();
298 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
299}
300
301
302/**
303 * @opcode 0x0f
304 * @opmnemonic EscTwo0f
305 * @openc two0f
306 * @opdisenum OP_2B_ESC
307 * @ophints harmless
308 * @opgroup og_escapes
309 */
310FNIEMOP_DEF(iemOp_2byteEscape)
311{
312#ifdef VBOX_STRICT
313 /* Sanity check the table the first time around. */
314 static bool s_fTested = false;
315 if (RT_LIKELY(s_fTested)) { /* likely */ }
316 else
317 {
318 s_fTested = true;
319 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
320 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
321 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
322 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
323 }
324#endif
325
326 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
327 {
328 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
329 IEMOP_HLP_MIN_286();
330 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
331 }
332 /* @opdone */
333
334 /*
335 * On the 8086 this is a POP CS instruction.
336 * For the time being we don't specify this this.
337 */
338 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
339 IEMOP_HLP_NO_64BIT();
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
342}
343
344/**
345 * @opcode 0x10
346 * @opgroup og_gen_arith_bin
347 * @opfltest cf
348 * @opflmodify cf,pf,af,zf,sf,of
349 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
350 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
351 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
352 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
353 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
354 */
355FNIEMOP_DEF(iemOp_adc_Eb_Gb)
356{
357 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
359}
360
361
362/**
363 * @opcode 0x11
364 * @opgroup og_gen_arith_bin
365 * @opfltest cf
366 * @opflmodify cf,pf,af,zf,sf,of
367 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
368 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
369 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
370 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
371 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
372 */
373FNIEMOP_DEF(iemOp_adc_Ev_Gv)
374{
375 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
377}
378
379
380/**
381 * @opcode 0x12
382 * @opgroup og_gen_arith_bin
383 * @opfltest cf
384 * @opflmodify cf,pf,af,zf,sf,of
385 * @opcopytests iemOp_adc_Eb_Gb
386 */
387FNIEMOP_DEF(iemOp_adc_Gb_Eb)
388{
389 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
391}
392
393
394/**
395 * @opcode 0x13
396 * @opgroup og_gen_arith_bin
397 * @opfltest cf
398 * @opflmodify cf,pf,af,zf,sf,of
399 * @opcopytests iemOp_adc_Ev_Gv
400 */
401FNIEMOP_DEF(iemOp_adc_Gv_Ev)
402{
403 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
404 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
405}
406
407
408/**
409 * @opcode 0x14
410 * @opgroup og_gen_arith_bin
411 * @opfltest cf
412 * @opflmodify cf,pf,af,zf,sf,of
413 * @opcopytests iemOp_adc_Eb_Gb
414 */
415FNIEMOP_DEF(iemOp_adc_Al_Ib)
416{
417 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
419}
420
421
422/**
423 * @opcode 0x15
424 * @opgroup og_gen_arith_bin
425 * @opfltest cf
426 * @opflmodify cf,pf,af,zf,sf,of
427 * @opcopytests iemOp_adc_Ev_Gv
428 */
429FNIEMOP_DEF(iemOp_adc_eAX_Iz)
430{
431 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
433}
434
435
436/**
437 * @opcode 0x16
438 */
439FNIEMOP_DEF(iemOp_push_SS)
440{
441 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
442 IEMOP_HLP_NO_64BIT();
443 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
444}
445
446
447/**
448 * @opcode 0x17
449 * @opgroup og_gen_arith_bin
450 * @opfltest cf
451 * @opflmodify cf,pf,af,zf,sf,of
452 */
453FNIEMOP_DEF(iemOp_pop_SS)
454{
455 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
457 IEMOP_HLP_NO_64BIT();
458 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
459}
460
461
462/**
463 * @opcode 0x18
464 * @opgroup og_gen_arith_bin
465 * @opfltest cf
466 * @opflmodify cf,pf,af,zf,sf,of
467 */
468FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
469{
470 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
472}
473
474
475/**
476 * @opcode 0x19
477 * @opgroup og_gen_arith_bin
478 * @opfltest cf
479 * @opflmodify cf,pf,af,zf,sf,of
480 */
481FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
482{
483 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
484 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
485}
486
487
488/**
489 * @opcode 0x1a
490 * @opgroup og_gen_arith_bin
491 * @opfltest cf
492 * @opflmodify cf,pf,af,zf,sf,of
493 */
494FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
495{
496 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
498}
499
500
501/**
502 * @opcode 0x1b
503 * @opgroup og_gen_arith_bin
504 * @opfltest cf
505 * @opflmodify cf,pf,af,zf,sf,of
506 */
507FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
508{
509 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
510 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
511}
512
513
514/**
515 * @opcode 0x1c
516 * @opgroup og_gen_arith_bin
517 * @opfltest cf
518 * @opflmodify cf,pf,af,zf,sf,of
519 */
520FNIEMOP_DEF(iemOp_sbb_Al_Ib)
521{
522 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
523 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
524}
525
526
527/**
528 * @opcode 0x1d
529 * @opgroup og_gen_arith_bin
530 * @opfltest cf
531 * @opflmodify cf,pf,af,zf,sf,of
532 */
533FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
534{
535 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
537}
538
539
540/**
541 * @opcode 0x1e
542 * @opgroup og_stack_sreg
543 */
544FNIEMOP_DEF(iemOp_push_DS)
545{
546 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
547 IEMOP_HLP_NO_64BIT();
548 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
549}
550
551
552/**
553 * @opcode 0x1f
554 * @opgroup og_stack_sreg
555 */
556FNIEMOP_DEF(iemOp_pop_DS)
557{
558 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
560 IEMOP_HLP_NO_64BIT();
561 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
562}
563
564
565/**
566 * @opcode 0x20
567 * @opgroup og_gen_arith_bin
568 * @opflmodify cf,pf,af,zf,sf,of
569 * @opflundef af
570 * @opflclear of,cf
571 */
572FNIEMOP_DEF(iemOp_and_Eb_Gb)
573{
574 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
577}
578
579
580/**
581 * @opcode 0x21
582 * @opgroup og_gen_arith_bin
583 * @opflmodify cf,pf,af,zf,sf,of
584 * @opflundef af
585 * @opflclear of,cf
586 */
587FNIEMOP_DEF(iemOp_and_Ev_Gv)
588{
589 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
592}
593
594
595/**
596 * @opcode 0x22
597 * @opgroup og_gen_arith_bin
598 * @opflmodify cf,pf,af,zf,sf,of
599 * @opflundef af
600 * @opflclear of,cf
601 */
602FNIEMOP_DEF(iemOp_and_Gb_Eb)
603{
604 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
607}
608
609
610/**
611 * @opcode 0x23
612 * @opgroup og_gen_arith_bin
613 * @opflmodify cf,pf,af,zf,sf,of
614 * @opflundef af
615 * @opflclear of,cf
616 */
617FNIEMOP_DEF(iemOp_and_Gv_Ev)
618{
619 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
622}
623
624
625/**
626 * @opcode 0x24
627 * @opgroup og_gen_arith_bin
628 * @opflmodify cf,pf,af,zf,sf,of
629 * @opflundef af
630 * @opflclear of,cf
631 */
632FNIEMOP_DEF(iemOp_and_Al_Ib)
633{
634 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
635 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
636 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
637}
638
639
640/**
641 * @opcode 0x25
642 * @opgroup og_gen_arith_bin
643 * @opflmodify cf,pf,af,zf,sf,of
644 * @opflundef af
645 * @opflclear of,cf
646 */
647FNIEMOP_DEF(iemOp_and_eAX_Iz)
648{
649 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
650 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
652}
653
654
655/**
656 * @opcode 0x26
657 * @opmnemonic SEG
658 * @op1 ES
659 * @opgroup og_prefix
660 * @openc prefix
661 * @opdisenum OP_SEG
662 * @ophints harmless
663 */
664FNIEMOP_DEF(iemOp_seg_ES)
665{
666 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
667 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
668 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
669
670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
671 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
672}
673
674
675/**
676 * @opcode 0x27
677 * @opfltest af,cf
678 * @opflmodify cf,pf,af,zf,sf,of
679 * @opflundef of
680 */
681FNIEMOP_DEF(iemOp_daa)
682{
683 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
684 IEMOP_HLP_NO_64BIT();
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
686 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
687 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
688}
689
690
691/**
692 * @opcode 0x28
693 * @opgroup og_gen_arith_bin
694 * @opflmodify cf,pf,af,zf,sf,of
695 */
696FNIEMOP_DEF(iemOp_sub_Eb_Gb)
697{
698 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
700}
701
702
703/**
704 * @opcode 0x29
705 * @opgroup og_gen_arith_bin
706 * @opflmodify cf,pf,af,zf,sf,of
707 */
708FNIEMOP_DEF(iemOp_sub_Ev_Gv)
709{
710 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
711 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
712}
713
714
715/**
716 * @opcode 0x2a
717 * @opgroup og_gen_arith_bin
718 * @opflmodify cf,pf,af,zf,sf,of
719 */
720FNIEMOP_DEF(iemOp_sub_Gb_Eb)
721{
722 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
723 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
724}
725
726
727/**
728 * @opcode 0x2b
729 * @opgroup og_gen_arith_bin
730 * @opflmodify cf,pf,af,zf,sf,of
731 */
732FNIEMOP_DEF(iemOp_sub_Gv_Ev)
733{
734 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
735 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
736}
737
738
739/**
740 * @opcode 0x2c
741 * @opgroup og_gen_arith_bin
742 * @opflmodify cf,pf,af,zf,sf,of
743 */
744FNIEMOP_DEF(iemOp_sub_Al_Ib)
745{
746 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
747 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
748}
749
750
751/**
752 * @opcode 0x2d
753 * @opgroup og_gen_arith_bin
754 * @opflmodify cf,pf,af,zf,sf,of
755 */
756FNIEMOP_DEF(iemOp_sub_eAX_Iz)
757{
758 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
760}
761
762
763/**
764 * @opcode 0x2e
765 * @opmnemonic SEG
766 * @op1 CS
767 * @opgroup og_prefix
768 * @openc prefix
769 * @opdisenum OP_SEG
770 * @ophints harmless
771 */
772FNIEMOP_DEF(iemOp_seg_CS)
773{
774 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
775 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
776 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
777
778 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
779 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
780}
781
782
783/**
784 * @opcode 0x2f
785 * @opfltest af,cf
786 * @opflmodify cf,pf,af,zf,sf,of
787 * @opflundef of
788 */
789FNIEMOP_DEF(iemOp_das)
790{
791 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
792 IEMOP_HLP_NO_64BIT();
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
795 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
796}
797
798
799/**
800 * @opcode 0x30
801 * @opgroup og_gen_arith_bin
802 * @opflmodify cf,pf,af,zf,sf,of
803 * @opflundef af
804 * @opflclear of,cf
805 */
806FNIEMOP_DEF(iemOp_xor_Eb_Gb)
807{
808 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
811}
812
813
814/**
815 * @opcode 0x31
816 * @opgroup og_gen_arith_bin
817 * @opflmodify cf,pf,af,zf,sf,of
818 * @opflundef af
819 * @opflclear of,cf
820 */
821FNIEMOP_DEF(iemOp_xor_Ev_Gv)
822{
823 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
826}
827
828
829/**
830 * @opcode 0x32
831 * @opgroup og_gen_arith_bin
832 * @opflmodify cf,pf,af,zf,sf,of
833 * @opflundef af
834 * @opflclear of,cf
835 */
836FNIEMOP_DEF(iemOp_xor_Gb_Eb)
837{
838 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
841}
842
843
844/**
845 * @opcode 0x33
846 * @opgroup og_gen_arith_bin
847 * @opflmodify cf,pf,af,zf,sf,of
848 * @opflundef af
849 * @opflclear of,cf
850 */
851FNIEMOP_DEF(iemOp_xor_Gv_Ev)
852{
853 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
856}
857
858
859/**
860 * @opcode 0x34
861 * @opgroup og_gen_arith_bin
862 * @opflmodify cf,pf,af,zf,sf,of
863 * @opflundef af
864 * @opflclear of,cf
865 */
866FNIEMOP_DEF(iemOp_xor_Al_Ib)
867{
868 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
870 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
871}
872
873
874/**
875 * @opcode 0x35
876 * @opgroup og_gen_arith_bin
877 * @opflmodify cf,pf,af,zf,sf,of
878 * @opflundef af
879 * @opflclear of,cf
880 */
881FNIEMOP_DEF(iemOp_xor_eAX_Iz)
882{
883 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
886}
887
888
889/**
890 * @opcode 0x36
891 * @opmnemonic SEG
892 * @op1 SS
893 * @opgroup og_prefix
894 * @openc prefix
895 * @opdisenum OP_SEG
896 * @ophints harmless
897 */
898FNIEMOP_DEF(iemOp_seg_SS)
899{
900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
902 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
903
904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
906}
907
908
909/**
910 * @opcode 0x37
911 * @opfltest af,cf
912 * @opflmodify cf,pf,af,zf,sf,of
913 * @opflundef pf,zf,sf,of
914 * @opgroup og_gen_arith_dec
915 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
916 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
917 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
918 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
919 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
920 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
921 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
922 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
925 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
926 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
927 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
928 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
929 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
930 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
931 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
933 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
936 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
937 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
938 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
939 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
940 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
941 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
942 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
943 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
944 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
945 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
946 */
947FNIEMOP_DEF(iemOp_aaa)
948{
949 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
950 IEMOP_HLP_NO_64BIT();
951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
953
954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
955}
956
957
958/**
959 * @opcode 0x38
960 */
961FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
962{
963 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x39
970 */
971FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
972{
973 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3a
980 */
981FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
982{
983 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3b
990 */
991FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
992{
993 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3c
1000 */
1001FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1002{
1003 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3d
1010 */
1011FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1012{
1013 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1015}
1016
1017
1018/**
1019 * @opcode 0x3e
1020 */
1021FNIEMOP_DEF(iemOp_seg_DS)
1022{
1023 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1024 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1025 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1026
1027 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1028 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1029}
1030
1031
1032/**
1033 * @opcode 0x3f
1034 * @opfltest af,cf
1035 * @opflmodify cf,pf,af,zf,sf,of
1036 * @opflundef pf,zf,sf,of
1037 * @opgroup og_gen_arith_dec
1038 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1039 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1040 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1041 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1042 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1043 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1045 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1046 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1047 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1048 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1049 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1050 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1051 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1052 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1053 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1058 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1059 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1060 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1061 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1062 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1063 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1064 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1065 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1066 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1067 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1068 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1069 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1070 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1071 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1072 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1073 */
1074FNIEMOP_DEF(iemOp_aas)
1075{
1076 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1077 IEMOP_HLP_NO_64BIT();
1078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1080
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1082}
1083
1084
1085/**
1086 * Common 'inc/dec/not/neg register' helper.
1087 */
1088FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1089{
1090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1091 switch (pVCpu->iem.s.enmEffOpSize)
1092 {
1093 case IEMMODE_16BIT:
1094 IEM_MC_BEGIN(2, 0);
1095 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1096 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1097 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1098 IEM_MC_REF_EFLAGS(pEFlags);
1099 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1100 IEM_MC_ADVANCE_RIP_AND_FINISH();
1101 IEM_MC_END();
1102 break;
1103
1104 case IEMMODE_32BIT:
1105 IEM_MC_BEGIN(2, 0);
1106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1107 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1108 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1109 IEM_MC_REF_EFLAGS(pEFlags);
1110 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1111 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1112 IEM_MC_ADVANCE_RIP_AND_FINISH();
1113 IEM_MC_END();
1114 break;
1115
1116 case IEMMODE_64BIT:
1117 IEM_MC_BEGIN(2, 0);
1118 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1119 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1120 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1121 IEM_MC_REF_EFLAGS(pEFlags);
1122 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1123 IEM_MC_ADVANCE_RIP_AND_FINISH();
1124 IEM_MC_END();
1125 break;
1126
1127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1128 }
1129}
1130
1131
1132/**
1133 * @opcode 0x40
1134 */
1135FNIEMOP_DEF(iemOp_inc_eAX)
1136{
1137 /*
1138 * This is a REX prefix in 64-bit mode.
1139 */
1140 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1141 {
1142 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1144
1145 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1146 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1147 }
1148
1149 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1150 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1151}
1152
1153
1154/**
1155 * @opcode 0x41
1156 */
1157FNIEMOP_DEF(iemOp_inc_eCX)
1158{
1159 /*
1160 * This is a REX prefix in 64-bit mode.
1161 */
1162 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1163 {
1164 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1165 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1166 pVCpu->iem.s.uRexB = 1 << 3;
1167
1168 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1169 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1170 }
1171
1172 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1173 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1174}
1175
1176
1177/**
1178 * @opcode 0x42
1179 */
1180FNIEMOP_DEF(iemOp_inc_eDX)
1181{
1182 /*
1183 * This is a REX prefix in 64-bit mode.
1184 */
1185 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1186 {
1187 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1188 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1189 pVCpu->iem.s.uRexIndex = 1 << 3;
1190
1191 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1192 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1193 }
1194
1195 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1196 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1197}
1198
1199
1200
1201/**
1202 * @opcode 0x43
1203 */
1204FNIEMOP_DEF(iemOp_inc_eBX)
1205{
1206 /*
1207 * This is a REX prefix in 64-bit mode.
1208 */
1209 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1210 {
1211 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1212 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1213 pVCpu->iem.s.uRexB = 1 << 3;
1214 pVCpu->iem.s.uRexIndex = 1 << 3;
1215
1216 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1217 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1218 }
1219
1220 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1221 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1222}
1223
1224
1225/**
1226 * @opcode 0x44
1227 */
1228FNIEMOP_DEF(iemOp_inc_eSP)
1229{
1230 /*
1231 * This is a REX prefix in 64-bit mode.
1232 */
1233 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1234 {
1235 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1236 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1237 pVCpu->iem.s.uRexReg = 1 << 3;
1238
1239 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1240 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1241 }
1242
1243 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1244 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1245}
1246
1247
1248/**
1249 * @opcode 0x45
1250 */
1251FNIEMOP_DEF(iemOp_inc_eBP)
1252{
1253 /*
1254 * This is a REX prefix in 64-bit mode.
1255 */
1256 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1257 {
1258 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1259 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1260 pVCpu->iem.s.uRexReg = 1 << 3;
1261 pVCpu->iem.s.uRexB = 1 << 3;
1262
1263 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1264 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1265 }
1266
1267 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1268 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1269}
1270
1271
1272/**
1273 * @opcode 0x46
1274 */
1275FNIEMOP_DEF(iemOp_inc_eSI)
1276{
1277 /*
1278 * This is a REX prefix in 64-bit mode.
1279 */
1280 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1281 {
1282 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1283 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1284 pVCpu->iem.s.uRexReg = 1 << 3;
1285 pVCpu->iem.s.uRexIndex = 1 << 3;
1286
1287 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1288 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1289 }
1290
1291 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1292 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1293}
1294
1295
1296/**
1297 * @opcode 0x47
1298 */
1299FNIEMOP_DEF(iemOp_inc_eDI)
1300{
1301 /*
1302 * This is a REX prefix in 64-bit mode.
1303 */
1304 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1305 {
1306 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1307 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1308 pVCpu->iem.s.uRexReg = 1 << 3;
1309 pVCpu->iem.s.uRexB = 1 << 3;
1310 pVCpu->iem.s.uRexIndex = 1 << 3;
1311
1312 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1313 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1314 }
1315
1316 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1317 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1318}
1319
1320
1321/**
1322 * @opcode 0x48
1323 */
1324FNIEMOP_DEF(iemOp_dec_eAX)
1325{
1326 /*
1327 * This is a REX prefix in 64-bit mode.
1328 */
1329 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1330 {
1331 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1332 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1333 iemRecalEffOpSize(pVCpu);
1334
1335 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1336 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1337 }
1338
1339 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1340 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1341}
1342
1343
1344/**
1345 * @opcode 0x49
1346 */
1347FNIEMOP_DEF(iemOp_dec_eCX)
1348{
1349 /*
1350 * This is a REX prefix in 64-bit mode.
1351 */
1352 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1353 {
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1356 pVCpu->iem.s.uRexB = 1 << 3;
1357 iemRecalEffOpSize(pVCpu);
1358
1359 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1360 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1361 }
1362
1363 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1364 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1365}
1366
1367
1368/**
1369 * @opcode 0x4a
1370 */
1371FNIEMOP_DEF(iemOp_dec_eDX)
1372{
1373 /*
1374 * This is a REX prefix in 64-bit mode.
1375 */
1376 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1377 {
1378 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1379 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1380 pVCpu->iem.s.uRexIndex = 1 << 3;
1381 iemRecalEffOpSize(pVCpu);
1382
1383 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1384 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1385 }
1386
1387 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1388 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1389}
1390
1391
1392/**
1393 * @opcode 0x4b
1394 */
1395FNIEMOP_DEF(iemOp_dec_eBX)
1396{
1397 /*
1398 * This is a REX prefix in 64-bit mode.
1399 */
1400 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1401 {
1402 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1403 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1404 pVCpu->iem.s.uRexB = 1 << 3;
1405 pVCpu->iem.s.uRexIndex = 1 << 3;
1406 iemRecalEffOpSize(pVCpu);
1407
1408 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1409 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1410 }
1411
1412 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1413 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1414}
1415
1416
1417/**
1418 * @opcode 0x4c
1419 */
1420FNIEMOP_DEF(iemOp_dec_eSP)
1421{
1422 /*
1423 * This is a REX prefix in 64-bit mode.
1424 */
1425 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1426 {
1427 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1428 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1429 pVCpu->iem.s.uRexReg = 1 << 3;
1430 iemRecalEffOpSize(pVCpu);
1431
1432 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1433 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1434 }
1435
1436 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1437 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1438}
1439
1440
1441/**
1442 * @opcode 0x4d
1443 */
1444FNIEMOP_DEF(iemOp_dec_eBP)
1445{
1446 /*
1447 * This is a REX prefix in 64-bit mode.
1448 */
1449 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1450 {
1451 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1452 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1453 pVCpu->iem.s.uRexReg = 1 << 3;
1454 pVCpu->iem.s.uRexB = 1 << 3;
1455 iemRecalEffOpSize(pVCpu);
1456
1457 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1458 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1459 }
1460
1461 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1462 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1463}
1464
1465
1466/**
1467 * @opcode 0x4e
1468 */
1469FNIEMOP_DEF(iemOp_dec_eSI)
1470{
1471 /*
1472 * This is a REX prefix in 64-bit mode.
1473 */
1474 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1475 {
1476 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1477 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1478 pVCpu->iem.s.uRexReg = 1 << 3;
1479 pVCpu->iem.s.uRexIndex = 1 << 3;
1480 iemRecalEffOpSize(pVCpu);
1481
1482 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1483 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1484 }
1485
1486 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1487 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1488}
1489
1490
1491/**
1492 * @opcode 0x4f
1493 */
1494FNIEMOP_DEF(iemOp_dec_eDI)
1495{
1496 /*
1497 * This is a REX prefix in 64-bit mode.
1498 */
1499 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1500 {
1501 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1502 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1503 pVCpu->iem.s.uRexReg = 1 << 3;
1504 pVCpu->iem.s.uRexB = 1 << 3;
1505 pVCpu->iem.s.uRexIndex = 1 << 3;
1506 iemRecalEffOpSize(pVCpu);
1507
1508 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1509 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1510 }
1511
1512 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1513 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1514}
1515
1516
1517/**
1518 * Common 'push register' helper.
1519 */
1520FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1521{
1522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1523 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1524 {
1525 iReg |= pVCpu->iem.s.uRexB;
1526 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1527 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1528 }
1529
1530 switch (pVCpu->iem.s.enmEffOpSize)
1531 {
1532 case IEMMODE_16BIT:
1533 IEM_MC_BEGIN(0, 1);
1534 IEM_MC_LOCAL(uint16_t, u16Value);
1535 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1536 IEM_MC_PUSH_U16(u16Value);
1537 IEM_MC_ADVANCE_RIP_AND_FINISH();
1538 IEM_MC_END();
1539 break;
1540
1541 case IEMMODE_32BIT:
1542 IEM_MC_BEGIN(0, 1);
1543 IEM_MC_LOCAL(uint32_t, u32Value);
1544 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1545 IEM_MC_PUSH_U32(u32Value);
1546 IEM_MC_ADVANCE_RIP_AND_FINISH();
1547 IEM_MC_END();
1548 break;
1549
1550 case IEMMODE_64BIT:
1551 IEM_MC_BEGIN(0, 1);
1552 IEM_MC_LOCAL(uint64_t, u64Value);
1553 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1554 IEM_MC_PUSH_U64(u64Value);
1555 IEM_MC_ADVANCE_RIP_AND_FINISH();
1556 IEM_MC_END();
1557 break;
1558
1559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1560 }
1561}
1562
1563
1564/**
1565 * @opcode 0x50
1566 */
1567FNIEMOP_DEF(iemOp_push_eAX)
1568{
1569 IEMOP_MNEMONIC(push_rAX, "push rAX");
1570 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1571}
1572
1573
1574/**
1575 * @opcode 0x51
1576 */
1577FNIEMOP_DEF(iemOp_push_eCX)
1578{
1579 IEMOP_MNEMONIC(push_rCX, "push rCX");
1580 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1581}
1582
1583
1584/**
1585 * @opcode 0x52
1586 */
1587FNIEMOP_DEF(iemOp_push_eDX)
1588{
1589 IEMOP_MNEMONIC(push_rDX, "push rDX");
1590 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1591}
1592
1593
1594/**
1595 * @opcode 0x53
1596 */
1597FNIEMOP_DEF(iemOp_push_eBX)
1598{
1599 IEMOP_MNEMONIC(push_rBX, "push rBX");
1600 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1601}
1602
1603
1604/**
1605 * @opcode 0x54
1606 */
1607FNIEMOP_DEF(iemOp_push_eSP)
1608{
1609 IEMOP_MNEMONIC(push_rSP, "push rSP");
1610 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1611 {
1612 IEM_MC_BEGIN(0, 1);
1613 IEM_MC_LOCAL(uint16_t, u16Value);
1614 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1615 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1616 IEM_MC_PUSH_U16(u16Value);
1617 IEM_MC_ADVANCE_RIP_AND_FINISH();
1618 IEM_MC_END();
1619 }
1620 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1621}
1622
1623
1624/**
1625 * @opcode 0x55
1626 */
1627FNIEMOP_DEF(iemOp_push_eBP)
1628{
1629 IEMOP_MNEMONIC(push_rBP, "push rBP");
1630 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1631}
1632
1633
1634/**
1635 * @opcode 0x56
1636 */
1637FNIEMOP_DEF(iemOp_push_eSI)
1638{
1639 IEMOP_MNEMONIC(push_rSI, "push rSI");
1640 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1641}
1642
1643
1644/**
1645 * @opcode 0x57
1646 */
1647FNIEMOP_DEF(iemOp_push_eDI)
1648{
1649 IEMOP_MNEMONIC(push_rDI, "push rDI");
1650 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1651}
1652
1653
1654/**
1655 * Common 'pop register' helper.
1656 */
1657FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1658{
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1661 {
1662 iReg |= pVCpu->iem.s.uRexB;
1663 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1664 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1665 }
1666
1667 switch (pVCpu->iem.s.enmEffOpSize)
1668 {
1669 case IEMMODE_16BIT:
1670 IEM_MC_BEGIN(0, 1);
1671 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1672 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1673 IEM_MC_POP_U16(pu16Dst);
1674 IEM_MC_ADVANCE_RIP_AND_FINISH();
1675 IEM_MC_END();
1676 break;
1677
1678 case IEMMODE_32BIT:
1679 IEM_MC_BEGIN(0, 1);
1680 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1681 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1682 IEM_MC_POP_U32(pu32Dst);
1683 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1684 IEM_MC_ADVANCE_RIP_AND_FINISH();
1685 IEM_MC_END();
1686 break;
1687
1688 case IEMMODE_64BIT:
1689 IEM_MC_BEGIN(0, 1);
1690 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1691 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1692 IEM_MC_POP_U64(pu64Dst);
1693 IEM_MC_ADVANCE_RIP_AND_FINISH();
1694 IEM_MC_END();
1695 break;
1696
1697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1698 }
1699}
1700
1701
1702/**
1703 * @opcode 0x58
1704 */
1705FNIEMOP_DEF(iemOp_pop_eAX)
1706{
1707 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1708 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1709}
1710
1711
1712/**
1713 * @opcode 0x59
1714 */
1715FNIEMOP_DEF(iemOp_pop_eCX)
1716{
1717 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1718 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1719}
1720
1721
1722/**
1723 * @opcode 0x5a
1724 */
1725FNIEMOP_DEF(iemOp_pop_eDX)
1726{
1727 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1728 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1729}
1730
1731
1732/**
1733 * @opcode 0x5b
1734 */
1735FNIEMOP_DEF(iemOp_pop_eBX)
1736{
1737 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1738 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1739}
1740
1741
1742/**
1743 * @opcode 0x5c
1744 */
1745FNIEMOP_DEF(iemOp_pop_eSP)
1746{
1747 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1748 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1749 {
1750 if (pVCpu->iem.s.uRexB)
1751 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1752 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1753 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1754 }
1755
1756 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1757 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1758 /** @todo add testcase for this instruction. */
1759 switch (pVCpu->iem.s.enmEffOpSize)
1760 {
1761 case IEMMODE_16BIT:
1762 IEM_MC_BEGIN(0, 1);
1763 IEM_MC_LOCAL(uint16_t, u16Dst);
1764 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1765 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1766 IEM_MC_ADVANCE_RIP_AND_FINISH();
1767 IEM_MC_END();
1768 break;
1769
1770 case IEMMODE_32BIT:
1771 IEM_MC_BEGIN(0, 1);
1772 IEM_MC_LOCAL(uint32_t, u32Dst);
1773 IEM_MC_POP_U32(&u32Dst);
1774 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1775 IEM_MC_ADVANCE_RIP_AND_FINISH();
1776 IEM_MC_END();
1777 break;
1778
1779 case IEMMODE_64BIT:
1780 IEM_MC_BEGIN(0, 1);
1781 IEM_MC_LOCAL(uint64_t, u64Dst);
1782 IEM_MC_POP_U64(&u64Dst);
1783 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1784 IEM_MC_ADVANCE_RIP_AND_FINISH();
1785 IEM_MC_END();
1786 break;
1787
1788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1789 }
1790}
1791
1792
1793/**
1794 * @opcode 0x5d
1795 */
1796FNIEMOP_DEF(iemOp_pop_eBP)
1797{
1798 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1799 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1800}
1801
1802
1803/**
1804 * @opcode 0x5e
1805 */
1806FNIEMOP_DEF(iemOp_pop_eSI)
1807{
1808 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1809 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1810}
1811
1812
1813/**
1814 * @opcode 0x5f
1815 */
1816FNIEMOP_DEF(iemOp_pop_eDI)
1817{
1818 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1819 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1820}
1821
1822
1823/**
1824 * @opcode 0x60
1825 */
1826FNIEMOP_DEF(iemOp_pusha)
1827{
1828 IEMOP_MNEMONIC(pusha, "pusha");
1829 IEMOP_HLP_MIN_186();
1830 IEMOP_HLP_NO_64BIT();
1831 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1832 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1833 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1834 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1835}
1836
1837
1838/**
1839 * @opcode 0x61
1840 */
1841FNIEMOP_DEF(iemOp_popa__mvex)
1842{
1843 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1844 {
1845 IEMOP_MNEMONIC(popa, "popa");
1846 IEMOP_HLP_MIN_186();
1847 IEMOP_HLP_NO_64BIT();
1848 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1849 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1850 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1851 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1852 }
1853 IEMOP_MNEMONIC(mvex, "mvex");
1854 Log(("mvex prefix is not supported!\n"));
1855 return IEMOP_RAISE_INVALID_OPCODE();
1856}
1857
1858
1859/**
1860 * @opcode 0x62
1861 * @opmnemonic bound
1862 * @op1 Gv_RO
1863 * @op2 Ma
1864 * @opmincpu 80186
1865 * @ophints harmless invalid_64
1866 * @optest op1=0 op2=0 ->
1867 * @optest op1=1 op2=0 -> value.xcpt=5
1868 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1869 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1870 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1871 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1872 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1873 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1874 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1875 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1876 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1877 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1878 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1879 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1880 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1881 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1882 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1883 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1884 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1885 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1886 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1887 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1888 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1889 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1890 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1891 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1892 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1893 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1894 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1895 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1896 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1897 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1898 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1899 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1900 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1901 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1902 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1903 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1904 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1905 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1906 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1907 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1908 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1909 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1910 */
1911FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1912{
1913 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1914 compatability mode it is invalid with MOD=3.
1915
1916 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1917 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1918 given as R and X without an exact description, so we assume it builds on
1919 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1920 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1921 uint8_t bRm;
1922 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1923 {
1924 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1925 IEMOP_HLP_MIN_186();
1926 IEM_OPCODE_GET_NEXT_U8(&bRm);
1927 if (IEM_IS_MODRM_MEM_MODE(bRm))
1928 {
1929 /** @todo testcase: check that there are two memory accesses involved. Check
1930 * whether they're both read before the \#BR triggers. */
1931 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1932 {
1933 IEM_MC_BEGIN(3, 1);
1934 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1935 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1936 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1941
1942 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
1943 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1944 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1945
1946 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1947 IEM_MC_END();
1948 }
1949 else /* 32-bit operands */
1950 {
1951 IEM_MC_BEGIN(3, 1);
1952 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1953 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1954 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1956
1957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1959
1960 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
1961 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1962 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1963
1964 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1965 IEM_MC_END();
1966 }
1967 }
1968
1969 /*
1970 * @opdone
1971 */
1972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1973 {
1974 /* Note that there is no need for the CPU to fetch further bytes
1975 here because MODRM.MOD == 3. */
1976 Log(("evex not supported by the guest CPU!\n"));
1977 return IEMOP_RAISE_INVALID_OPCODE();
1978 }
1979 }
1980 else
1981 {
1982 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1983 * does modr/m read, whereas AMD probably doesn't... */
1984 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1985 {
1986 Log(("evex not supported by the guest CPU!\n"));
1987 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1988 }
1989 IEM_OPCODE_GET_NEXT_U8(&bRm);
1990 }
1991
1992 IEMOP_MNEMONIC(evex, "evex");
1993 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1994 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1995 Log(("evex prefix is not implemented!\n"));
1996 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1997}
1998
1999
2000/** Opcode 0x63 - non-64-bit modes. */
2001FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2002{
2003 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2004 IEMOP_HLP_MIN_286();
2005 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2007
2008 if (IEM_IS_MODRM_REG_MODE(bRm))
2009 {
2010 /* Register */
2011 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2012 IEM_MC_BEGIN(3, 0);
2013 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2014 IEM_MC_ARG(uint16_t, u16Src, 1);
2015 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2016
2017 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2018 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2019 IEM_MC_REF_EFLAGS(pEFlags);
2020 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2021
2022 IEM_MC_ADVANCE_RIP_AND_FINISH();
2023 IEM_MC_END();
2024 }
2025 else
2026 {
2027 /* Memory */
2028 IEM_MC_BEGIN(3, 2);
2029 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2030 IEM_MC_ARG(uint16_t, u16Src, 1);
2031 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2033
2034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2035 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2036 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2037 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2038 IEM_MC_FETCH_EFLAGS(EFlags);
2039 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2040
2041 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2042 IEM_MC_COMMIT_EFLAGS(EFlags);
2043 IEM_MC_ADVANCE_RIP_AND_FINISH();
2044 IEM_MC_END();
2045 }
2046}
2047
2048
2049/**
2050 * @opcode 0x63
2051 *
2052 * @note This is a weird one. It works like a regular move instruction if
2053 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2054 * @todo This definitely needs a testcase to verify the odd cases. */
2055FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2056{
2057 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2058
2059 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2061
2062 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2063 {
2064 if (IEM_IS_MODRM_REG_MODE(bRm))
2065 {
2066 /*
2067 * Register to register.
2068 */
2069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2070 IEM_MC_BEGIN(0, 1);
2071 IEM_MC_LOCAL(uint64_t, u64Value);
2072 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2073 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2074 IEM_MC_ADVANCE_RIP_AND_FINISH();
2075 IEM_MC_END();
2076 }
2077 else
2078 {
2079 /*
2080 * We're loading a register from memory.
2081 */
2082 IEM_MC_BEGIN(0, 2);
2083 IEM_MC_LOCAL(uint64_t, u64Value);
2084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2087 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2088 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2089 IEM_MC_ADVANCE_RIP_AND_FINISH();
2090 IEM_MC_END();
2091 }
2092 }
2093 else
2094 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2095}
2096
2097
2098/**
2099 * @opcode 0x64
2100 * @opmnemonic segfs
2101 * @opmincpu 80386
2102 * @opgroup og_prefixes
2103 */
2104FNIEMOP_DEF(iemOp_seg_FS)
2105{
2106 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2107 IEMOP_HLP_MIN_386();
2108
2109 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2110 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2111
2112 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2113 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2114}
2115
2116
2117/**
2118 * @opcode 0x65
2119 * @opmnemonic seggs
2120 * @opmincpu 80386
2121 * @opgroup og_prefixes
2122 */
2123FNIEMOP_DEF(iemOp_seg_GS)
2124{
2125 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2126 IEMOP_HLP_MIN_386();
2127
2128 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2129 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2130
2131 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2132 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2133}
2134
2135
2136/**
2137 * @opcode 0x66
2138 * @opmnemonic opsize
2139 * @openc prefix
2140 * @opmincpu 80386
2141 * @ophints harmless
2142 * @opgroup og_prefixes
2143 */
2144FNIEMOP_DEF(iemOp_op_size)
2145{
2146 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2147 IEMOP_HLP_MIN_386();
2148
2149 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2150 iemRecalEffOpSize(pVCpu);
2151
2152 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2153 when REPZ or REPNZ are present. */
2154 if (pVCpu->iem.s.idxPrefix == 0)
2155 pVCpu->iem.s.idxPrefix = 1;
2156
2157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2159}
2160
2161
2162/**
2163 * @opcode 0x67
2164 * @opmnemonic addrsize
2165 * @openc prefix
2166 * @opmincpu 80386
2167 * @ophints harmless
2168 * @opgroup og_prefixes
2169 */
2170FNIEMOP_DEF(iemOp_addr_size)
2171{
2172 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2173 IEMOP_HLP_MIN_386();
2174
2175 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2176 switch (pVCpu->iem.s.enmDefAddrMode)
2177 {
2178 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2179 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2180 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2181 default: AssertFailed();
2182 }
2183
2184 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2185 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2186}
2187
2188
2189/**
2190 * @opcode 0x68
2191 */
2192FNIEMOP_DEF(iemOp_push_Iz)
2193{
2194 IEMOP_MNEMONIC(push_Iz, "push Iz");
2195 IEMOP_HLP_MIN_186();
2196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2197 switch (pVCpu->iem.s.enmEffOpSize)
2198 {
2199 case IEMMODE_16BIT:
2200 {
2201 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2203 IEM_MC_BEGIN(0,0);
2204 IEM_MC_PUSH_U16(u16Imm);
2205 IEM_MC_ADVANCE_RIP_AND_FINISH();
2206 IEM_MC_END();
2207 break;
2208 }
2209
2210 case IEMMODE_32BIT:
2211 {
2212 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 IEM_MC_BEGIN(0,0);
2215 IEM_MC_PUSH_U32(u32Imm);
2216 IEM_MC_ADVANCE_RIP_AND_FINISH();
2217 IEM_MC_END();
2218 break;
2219 }
2220
2221 case IEMMODE_64BIT:
2222 {
2223 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2225 IEM_MC_BEGIN(0,0);
2226 IEM_MC_PUSH_U64(u64Imm);
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 break;
2230 }
2231
2232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2233 }
2234}
2235
2236
2237/**
2238 * @opcode 0x69
2239 */
2240FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2241{
2242 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2243 IEMOP_HLP_MIN_186();
2244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2246
2247 switch (pVCpu->iem.s.enmEffOpSize)
2248 {
2249 case IEMMODE_16BIT:
2250 {
2251 if (IEM_IS_MODRM_REG_MODE(bRm))
2252 {
2253 /* register operand */
2254 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2256
2257 IEM_MC_BEGIN(3, 1);
2258 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2259 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2261 IEM_MC_LOCAL(uint16_t, u16Tmp);
2262
2263 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2264 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2265 IEM_MC_REF_EFLAGS(pEFlags);
2266 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2267 pu16Dst, u16Src, pEFlags);
2268 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2269
2270 IEM_MC_ADVANCE_RIP_AND_FINISH();
2271 IEM_MC_END();
2272 }
2273 else
2274 {
2275 /* memory operand */
2276 IEM_MC_BEGIN(3, 2);
2277 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2278 IEM_MC_ARG(uint16_t, u16Src, 1);
2279 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2280 IEM_MC_LOCAL(uint16_t, u16Tmp);
2281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2282
2283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2284 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2285 IEM_MC_ASSIGN(u16Src, u16Imm);
2286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2287 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2288 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2289 IEM_MC_REF_EFLAGS(pEFlags);
2290 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2291 pu16Dst, u16Src, pEFlags);
2292 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2293
2294 IEM_MC_ADVANCE_RIP_AND_FINISH();
2295 IEM_MC_END();
2296 }
2297 break;
2298 }
2299
2300 case IEMMODE_32BIT:
2301 {
2302 if (IEM_IS_MODRM_REG_MODE(bRm))
2303 {
2304 /* register operand */
2305 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2307
2308 IEM_MC_BEGIN(3, 1);
2309 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2310 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2311 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2312 IEM_MC_LOCAL(uint32_t, u32Tmp);
2313
2314 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2315 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2316 IEM_MC_REF_EFLAGS(pEFlags);
2317 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2318 pu32Dst, u32Src, pEFlags);
2319 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2320
2321 IEM_MC_ADVANCE_RIP_AND_FINISH();
2322 IEM_MC_END();
2323 }
2324 else
2325 {
2326 /* memory operand */
2327 IEM_MC_BEGIN(3, 2);
2328 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2329 IEM_MC_ARG(uint32_t, u32Src, 1);
2330 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2331 IEM_MC_LOCAL(uint32_t, u32Tmp);
2332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2333
2334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2335 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2336 IEM_MC_ASSIGN(u32Src, u32Imm);
2337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2338 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2339 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2340 IEM_MC_REF_EFLAGS(pEFlags);
2341 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2342 pu32Dst, u32Src, pEFlags);
2343 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2344
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348 break;
2349 }
2350
2351 case IEMMODE_64BIT:
2352 {
2353 if (IEM_IS_MODRM_REG_MODE(bRm))
2354 {
2355 /* register operand */
2356 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2358
2359 IEM_MC_BEGIN(3, 1);
2360 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2361 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2362 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2363 IEM_MC_LOCAL(uint64_t, u64Tmp);
2364
2365 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2366 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2367 IEM_MC_REF_EFLAGS(pEFlags);
2368 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2369 pu64Dst, u64Src, pEFlags);
2370 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2371
2372 IEM_MC_ADVANCE_RIP_AND_FINISH();
2373 IEM_MC_END();
2374 }
2375 else
2376 {
2377 /* memory operand */
2378 IEM_MC_BEGIN(3, 2);
2379 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2380 IEM_MC_ARG(uint64_t, u64Src, 1);
2381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2382 IEM_MC_LOCAL(uint64_t, u64Tmp);
2383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2384
2385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2386 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2387 IEM_MC_ASSIGN(u64Src, u64Imm);
2388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2389 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2390 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2391 IEM_MC_REF_EFLAGS(pEFlags);
2392 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2393 pu64Dst, u64Src, pEFlags);
2394 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2395
2396 IEM_MC_ADVANCE_RIP_AND_FINISH();
2397 IEM_MC_END();
2398 }
2399 break;
2400 }
2401
2402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2403 }
2404}
2405
2406
2407/**
2408 * @opcode 0x6a
2409 */
2410FNIEMOP_DEF(iemOp_push_Ib)
2411{
2412 IEMOP_MNEMONIC(push_Ib, "push Ib");
2413 IEMOP_HLP_MIN_186();
2414 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2416 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2417
2418 switch (pVCpu->iem.s.enmEffOpSize)
2419 {
2420 case IEMMODE_16BIT:
2421 IEM_MC_BEGIN(0,0);
2422 IEM_MC_PUSH_U16(i8Imm);
2423 IEM_MC_ADVANCE_RIP_AND_FINISH();
2424 IEM_MC_END();
2425 break;
2426 case IEMMODE_32BIT:
2427 IEM_MC_BEGIN(0,0);
2428 IEM_MC_PUSH_U32(i8Imm);
2429 IEM_MC_ADVANCE_RIP_AND_FINISH();
2430 IEM_MC_END();
2431 break;
2432 case IEMMODE_64BIT:
2433 IEM_MC_BEGIN(0,0);
2434 IEM_MC_PUSH_U64(i8Imm);
2435 IEM_MC_ADVANCE_RIP_AND_FINISH();
2436 IEM_MC_END();
2437 break;
2438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2439 }
2440}
2441
2442
2443/**
2444 * @opcode 0x6b
2445 */
2446FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2447{
2448 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2449 IEMOP_HLP_MIN_186();
2450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2451 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2452
2453 switch (pVCpu->iem.s.enmEffOpSize)
2454 {
2455 case IEMMODE_16BIT:
2456 if (IEM_IS_MODRM_REG_MODE(bRm))
2457 {
2458 /* register operand */
2459 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2461
2462 IEM_MC_BEGIN(3, 1);
2463 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2464 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2465 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2466 IEM_MC_LOCAL(uint16_t, u16Tmp);
2467
2468 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2469 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2470 IEM_MC_REF_EFLAGS(pEFlags);
2471 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2472 pu16Dst, u16Src, pEFlags);
2473 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2474
2475 IEM_MC_ADVANCE_RIP_AND_FINISH();
2476 IEM_MC_END();
2477 }
2478 else
2479 {
2480 /* memory operand */
2481 IEM_MC_BEGIN(3, 2);
2482 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2483 IEM_MC_ARG(uint16_t, u16Src, 1);
2484 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2485 IEM_MC_LOCAL(uint16_t, u16Tmp);
2486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2487
2488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2489 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2490 IEM_MC_ASSIGN(u16Src, u16Imm);
2491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2492 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2493 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2494 IEM_MC_REF_EFLAGS(pEFlags);
2495 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2496 pu16Dst, u16Src, pEFlags);
2497 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2498
2499 IEM_MC_ADVANCE_RIP_AND_FINISH();
2500 IEM_MC_END();
2501 }
2502 break;
2503
2504 case IEMMODE_32BIT:
2505 if (IEM_IS_MODRM_REG_MODE(bRm))
2506 {
2507 /* register operand */
2508 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2510
2511 IEM_MC_BEGIN(3, 1);
2512 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2513 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2514 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2515 IEM_MC_LOCAL(uint32_t, u32Tmp);
2516
2517 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2518 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2519 IEM_MC_REF_EFLAGS(pEFlags);
2520 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2521 pu32Dst, u32Src, pEFlags);
2522 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2523
2524 IEM_MC_ADVANCE_RIP_AND_FINISH();
2525 IEM_MC_END();
2526 }
2527 else
2528 {
2529 /* memory operand */
2530 IEM_MC_BEGIN(3, 2);
2531 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2532 IEM_MC_ARG(uint32_t, u32Src, 1);
2533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2534 IEM_MC_LOCAL(uint32_t, u32Tmp);
2535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2536
2537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2538 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2539 IEM_MC_ASSIGN(u32Src, u32Imm);
2540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2541 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2542 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2543 IEM_MC_REF_EFLAGS(pEFlags);
2544 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2545 pu32Dst, u32Src, pEFlags);
2546 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2547
2548 IEM_MC_ADVANCE_RIP_AND_FINISH();
2549 IEM_MC_END();
2550 }
2551 break;
2552
2553 case IEMMODE_64BIT:
2554 if (IEM_IS_MODRM_REG_MODE(bRm))
2555 {
2556 /* register operand */
2557 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2559
2560 IEM_MC_BEGIN(3, 1);
2561 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2562 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2563 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2564 IEM_MC_LOCAL(uint64_t, u64Tmp);
2565
2566 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2567 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2568 IEM_MC_REF_EFLAGS(pEFlags);
2569 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2570 pu64Dst, u64Src, pEFlags);
2571 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2572
2573 IEM_MC_ADVANCE_RIP_AND_FINISH();
2574 IEM_MC_END();
2575 }
2576 else
2577 {
2578 /* memory operand */
2579 IEM_MC_BEGIN(3, 2);
2580 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2581 IEM_MC_ARG(uint64_t, u64Src, 1);
2582 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2583 IEM_MC_LOCAL(uint64_t, u64Tmp);
2584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2585
2586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2587 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2588 IEM_MC_ASSIGN(u64Src, u64Imm);
2589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2590 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2591 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2592 IEM_MC_REF_EFLAGS(pEFlags);
2593 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2594 pu64Dst, u64Src, pEFlags);
2595 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2596
2597 IEM_MC_ADVANCE_RIP_AND_FINISH();
2598 IEM_MC_END();
2599 }
2600 break;
2601
2602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2603 }
2604}
2605
2606
2607/**
2608 * @opcode 0x6c
2609 */
2610FNIEMOP_DEF(iemOp_insb_Yb_DX)
2611{
2612 IEMOP_HLP_MIN_186();
2613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2614 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2615 {
2616 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2617 switch (pVCpu->iem.s.enmEffAddrMode)
2618 {
2619 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2620 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2621 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2623 }
2624 }
2625 else
2626 {
2627 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2628 switch (pVCpu->iem.s.enmEffAddrMode)
2629 {
2630 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2631 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2632 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2634 }
2635 }
2636}
2637
2638
2639/**
2640 * @opcode 0x6d
2641 */
2642FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2643{
2644 IEMOP_HLP_MIN_186();
2645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2646 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2647 {
2648 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2649 switch (pVCpu->iem.s.enmEffOpSize)
2650 {
2651 case IEMMODE_16BIT:
2652 switch (pVCpu->iem.s.enmEffAddrMode)
2653 {
2654 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2655 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2656 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2658 }
2659 break;
2660 case IEMMODE_64BIT:
2661 case IEMMODE_32BIT:
2662 switch (pVCpu->iem.s.enmEffAddrMode)
2663 {
2664 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2665 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2666 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2668 }
2669 break;
2670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2671 }
2672 }
2673 else
2674 {
2675 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2676 switch (pVCpu->iem.s.enmEffOpSize)
2677 {
2678 case IEMMODE_16BIT:
2679 switch (pVCpu->iem.s.enmEffAddrMode)
2680 {
2681 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2682 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2683 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2685 }
2686 break;
2687 case IEMMODE_64BIT:
2688 case IEMMODE_32BIT:
2689 switch (pVCpu->iem.s.enmEffAddrMode)
2690 {
2691 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2692 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2693 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2695 }
2696 break;
2697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2698 }
2699 }
2700}
2701
2702
2703/**
2704 * @opcode 0x6e
2705 */
2706FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2707{
2708 IEMOP_HLP_MIN_186();
2709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2710 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2711 {
2712 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2713 switch (pVCpu->iem.s.enmEffAddrMode)
2714 {
2715 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2716 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2717 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2719 }
2720 }
2721 else
2722 {
2723 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2724 switch (pVCpu->iem.s.enmEffAddrMode)
2725 {
2726 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2727 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2728 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2730 }
2731 }
2732}
2733
2734
2735/**
2736 * @opcode 0x6f
2737 */
2738FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2739{
2740 IEMOP_HLP_MIN_186();
2741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2742 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2743 {
2744 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2745 switch (pVCpu->iem.s.enmEffOpSize)
2746 {
2747 case IEMMODE_16BIT:
2748 switch (pVCpu->iem.s.enmEffAddrMode)
2749 {
2750 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2751 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2752 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2754 }
2755 break;
2756 case IEMMODE_64BIT:
2757 case IEMMODE_32BIT:
2758 switch (pVCpu->iem.s.enmEffAddrMode)
2759 {
2760 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2761 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2762 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2764 }
2765 break;
2766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2767 }
2768 }
2769 else
2770 {
2771 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2772 switch (pVCpu->iem.s.enmEffOpSize)
2773 {
2774 case IEMMODE_16BIT:
2775 switch (pVCpu->iem.s.enmEffAddrMode)
2776 {
2777 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2778 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2779 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2781 }
2782 break;
2783 case IEMMODE_64BIT:
2784 case IEMMODE_32BIT:
2785 switch (pVCpu->iem.s.enmEffAddrMode)
2786 {
2787 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2788 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2789 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2791 }
2792 break;
2793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2794 }
2795 }
2796}
2797
2798
2799/**
2800 * @opcode 0x70
2801 */
2802FNIEMOP_DEF(iemOp_jo_Jb)
2803{
2804 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2805 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2808
2809 IEM_MC_BEGIN(0, 0);
2810 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2811 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2812 } IEM_MC_ELSE() {
2813 IEM_MC_ADVANCE_RIP_AND_FINISH();
2814 } IEM_MC_ENDIF();
2815 IEM_MC_END();
2816}
2817
2818
2819/**
2820 * @opcode 0x71
2821 */
2822FNIEMOP_DEF(iemOp_jno_Jb)
2823{
2824 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2825 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2827 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2828
2829 IEM_MC_BEGIN(0, 0);
2830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2831 IEM_MC_ADVANCE_RIP_AND_FINISH();
2832 } IEM_MC_ELSE() {
2833 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2834 } IEM_MC_ENDIF();
2835 IEM_MC_END();
2836}
2837
2838/**
2839 * @opcode 0x72
2840 */
2841FNIEMOP_DEF(iemOp_jc_Jb)
2842{
2843 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2844 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2846 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2847
2848 IEM_MC_BEGIN(0, 0);
2849 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2850 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2851 } IEM_MC_ELSE() {
2852 IEM_MC_ADVANCE_RIP_AND_FINISH();
2853 } IEM_MC_ENDIF();
2854 IEM_MC_END();
2855}
2856
2857
2858/**
2859 * @opcode 0x73
2860 */
2861FNIEMOP_DEF(iemOp_jnc_Jb)
2862{
2863 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2864 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2867
2868 IEM_MC_BEGIN(0, 0);
2869 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2870 IEM_MC_ADVANCE_RIP_AND_FINISH();
2871 } IEM_MC_ELSE() {
2872 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2873 } IEM_MC_ENDIF();
2874 IEM_MC_END();
2875}
2876
2877
2878/**
2879 * @opcode 0x74
2880 */
2881FNIEMOP_DEF(iemOp_je_Jb)
2882{
2883 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2884 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2886 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2887
2888 IEM_MC_BEGIN(0, 0);
2889 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2890 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2891 } IEM_MC_ELSE() {
2892 IEM_MC_ADVANCE_RIP_AND_FINISH();
2893 } IEM_MC_ENDIF();
2894 IEM_MC_END();
2895}
2896
2897
2898/**
2899 * @opcode 0x75
2900 */
2901FNIEMOP_DEF(iemOp_jne_Jb)
2902{
2903 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2904 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2907
2908 IEM_MC_BEGIN(0, 0);
2909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2910 IEM_MC_ADVANCE_RIP_AND_FINISH();
2911 } IEM_MC_ELSE() {
2912 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2913 } IEM_MC_ENDIF();
2914 IEM_MC_END();
2915}
2916
2917
2918/**
2919 * @opcode 0x76
2920 */
2921FNIEMOP_DEF(iemOp_jbe_Jb)
2922{
2923 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2924 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2926 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2927
2928 IEM_MC_BEGIN(0, 0);
2929 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2930 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2931 } IEM_MC_ELSE() {
2932 IEM_MC_ADVANCE_RIP_AND_FINISH();
2933 } IEM_MC_ENDIF();
2934 IEM_MC_END();
2935}
2936
2937
2938/**
2939 * @opcode 0x77
2940 */
2941FNIEMOP_DEF(iemOp_jnbe_Jb)
2942{
2943 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2944 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2946 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2947
2948 IEM_MC_BEGIN(0, 0);
2949 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2950 IEM_MC_ADVANCE_RIP_AND_FINISH();
2951 } IEM_MC_ELSE() {
2952 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2953 } IEM_MC_ENDIF();
2954 IEM_MC_END();
2955}
2956
2957
2958/**
2959 * @opcode 0x78
2960 */
2961FNIEMOP_DEF(iemOp_js_Jb)
2962{
2963 IEMOP_MNEMONIC(js_Jb, "js Jb");
2964 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2967
2968 IEM_MC_BEGIN(0, 0);
2969 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2970 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2971 } IEM_MC_ELSE() {
2972 IEM_MC_ADVANCE_RIP_AND_FINISH();
2973 } IEM_MC_ENDIF();
2974 IEM_MC_END();
2975}
2976
2977
2978/**
2979 * @opcode 0x79
2980 */
2981FNIEMOP_DEF(iemOp_jns_Jb)
2982{
2983 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2984 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2986 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2987
2988 IEM_MC_BEGIN(0, 0);
2989 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2990 IEM_MC_ADVANCE_RIP_AND_FINISH();
2991 } IEM_MC_ELSE() {
2992 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2993 } IEM_MC_ENDIF();
2994 IEM_MC_END();
2995}
2996
2997
2998/**
2999 * @opcode 0x7a
3000 */
3001FNIEMOP_DEF(iemOp_jp_Jb)
3002{
3003 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3004 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3006 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3007
3008 IEM_MC_BEGIN(0, 0);
3009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3010 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3011 } IEM_MC_ELSE() {
3012 IEM_MC_ADVANCE_RIP_AND_FINISH();
3013 } IEM_MC_ENDIF();
3014 IEM_MC_END();
3015}
3016
3017
3018/**
3019 * @opcode 0x7b
3020 */
3021FNIEMOP_DEF(iemOp_jnp_Jb)
3022{
3023 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3024 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3027
3028 IEM_MC_BEGIN(0, 0);
3029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3030 IEM_MC_ADVANCE_RIP_AND_FINISH();
3031 } IEM_MC_ELSE() {
3032 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3033 } IEM_MC_ENDIF();
3034 IEM_MC_END();
3035}
3036
3037
3038/**
3039 * @opcode 0x7c
3040 */
3041FNIEMOP_DEF(iemOp_jl_Jb)
3042{
3043 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3047
3048 IEM_MC_BEGIN(0, 0);
3049 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3050 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3051 } IEM_MC_ELSE() {
3052 IEM_MC_ADVANCE_RIP_AND_FINISH();
3053 } IEM_MC_ENDIF();
3054 IEM_MC_END();
3055}
3056
3057
3058/**
3059 * @opcode 0x7d
3060 */
3061FNIEMOP_DEF(iemOp_jnl_Jb)
3062{
3063 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3064 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3066 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3067
3068 IEM_MC_BEGIN(0, 0);
3069 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3070 IEM_MC_ADVANCE_RIP_AND_FINISH();
3071 } IEM_MC_ELSE() {
3072 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3073 } IEM_MC_ENDIF();
3074 IEM_MC_END();
3075}
3076
3077
3078/**
3079 * @opcode 0x7e
3080 */
3081FNIEMOP_DEF(iemOp_jle_Jb)
3082{
3083 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3084 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3086 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3087
3088 IEM_MC_BEGIN(0, 0);
3089 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3090 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3091 } IEM_MC_ELSE() {
3092 IEM_MC_ADVANCE_RIP_AND_FINISH();
3093 } IEM_MC_ENDIF();
3094 IEM_MC_END();
3095}
3096
3097
3098/**
3099 * @opcode 0x7f
3100 */
3101FNIEMOP_DEF(iemOp_jnle_Jb)
3102{
3103 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3104 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3106 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3107
3108 IEM_MC_BEGIN(0, 0);
3109 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3110 IEM_MC_ADVANCE_RIP_AND_FINISH();
3111 } IEM_MC_ELSE() {
3112 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3113 } IEM_MC_ENDIF();
3114 IEM_MC_END();
3115}
3116
3117
3118/**
3119 * @opcode 0x80
3120 */
3121FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3122{
3123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3124 switch (IEM_GET_MODRM_REG_8(bRm))
3125 {
3126 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3127 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3128 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3129 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3130 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3131 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3132 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3133 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3134 }
3135 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3136
3137 if (IEM_IS_MODRM_REG_MODE(bRm))
3138 {
3139 /* register target */
3140 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3142 IEM_MC_BEGIN(3, 0);
3143 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3144 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3145 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3146
3147 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3148 IEM_MC_REF_EFLAGS(pEFlags);
3149 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3150
3151 IEM_MC_ADVANCE_RIP_AND_FINISH();
3152 IEM_MC_END();
3153 }
3154 else
3155 {
3156 /* memory target */
3157 uint32_t fAccess;
3158 if (pImpl->pfnLockedU8)
3159 fAccess = IEM_ACCESS_DATA_RW;
3160 else /* CMP */
3161 fAccess = IEM_ACCESS_DATA_R;
3162 IEM_MC_BEGIN(3, 2);
3163 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3164 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3166
3167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3168 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3169 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3170 if (pImpl->pfnLockedU8)
3171 IEMOP_HLP_DONE_DECODING();
3172 else
3173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3174
3175 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3176 IEM_MC_FETCH_EFLAGS(EFlags);
3177 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3179 else
3180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3181
3182 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3183 IEM_MC_COMMIT_EFLAGS(EFlags);
3184 IEM_MC_ADVANCE_RIP_AND_FINISH();
3185 IEM_MC_END();
3186 }
3187}
3188
3189
3190/**
3191 * @opcode 0x81
3192 */
3193FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3194{
3195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3196 switch (IEM_GET_MODRM_REG_8(bRm))
3197 {
3198 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3199 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3200 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3201 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3202 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3203 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3204 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3205 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3206 }
3207 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3208
3209 switch (pVCpu->iem.s.enmEffOpSize)
3210 {
3211 case IEMMODE_16BIT:
3212 {
3213 if (IEM_IS_MODRM_REG_MODE(bRm))
3214 {
3215 /* register target */
3216 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3218 IEM_MC_BEGIN(3, 0);
3219 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3220 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3221 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3222
3223 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3224 IEM_MC_REF_EFLAGS(pEFlags);
3225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3226
3227 IEM_MC_ADVANCE_RIP_AND_FINISH();
3228 IEM_MC_END();
3229 }
3230 else
3231 {
3232 /* memory target */
3233 uint32_t fAccess;
3234 if (pImpl->pfnLockedU16)
3235 fAccess = IEM_ACCESS_DATA_RW;
3236 else /* CMP, TEST */
3237 fAccess = IEM_ACCESS_DATA_R;
3238 IEM_MC_BEGIN(3, 2);
3239 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3240 IEM_MC_ARG(uint16_t, u16Src, 1);
3241 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3243
3244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3245 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3246 IEM_MC_ASSIGN(u16Src, u16Imm);
3247 if (pImpl->pfnLockedU16)
3248 IEMOP_HLP_DONE_DECODING();
3249 else
3250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3251 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3252 IEM_MC_FETCH_EFLAGS(EFlags);
3253 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3255 else
3256 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3257
3258 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3259 IEM_MC_COMMIT_EFLAGS(EFlags);
3260 IEM_MC_ADVANCE_RIP_AND_FINISH();
3261 IEM_MC_END();
3262 }
3263 break;
3264 }
3265
3266 case IEMMODE_32BIT:
3267 {
3268 if (IEM_IS_MODRM_REG_MODE(bRm))
3269 {
3270 /* register target */
3271 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3273 IEM_MC_BEGIN(3, 0);
3274 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3275 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3276 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3277
3278 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3279 IEM_MC_REF_EFLAGS(pEFlags);
3280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3281 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3282 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3283
3284 IEM_MC_ADVANCE_RIP_AND_FINISH();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /* memory target */
3290 uint32_t fAccess;
3291 if (pImpl->pfnLockedU32)
3292 fAccess = IEM_ACCESS_DATA_RW;
3293 else /* CMP, TEST */
3294 fAccess = IEM_ACCESS_DATA_R;
3295 IEM_MC_BEGIN(3, 2);
3296 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3297 IEM_MC_ARG(uint32_t, u32Src, 1);
3298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3300
3301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3302 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3303 IEM_MC_ASSIGN(u32Src, u32Imm);
3304 if (pImpl->pfnLockedU32)
3305 IEMOP_HLP_DONE_DECODING();
3306 else
3307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3308 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3309 IEM_MC_FETCH_EFLAGS(EFlags);
3310 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3312 else
3313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3314
3315 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3316 IEM_MC_COMMIT_EFLAGS(EFlags);
3317 IEM_MC_ADVANCE_RIP_AND_FINISH();
3318 IEM_MC_END();
3319 }
3320 break;
3321 }
3322
3323 case IEMMODE_64BIT:
3324 {
3325 if (IEM_IS_MODRM_REG_MODE(bRm))
3326 {
3327 /* register target */
3328 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3330 IEM_MC_BEGIN(3, 0);
3331 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3332 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3333 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3334
3335 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3336 IEM_MC_REF_EFLAGS(pEFlags);
3337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3338
3339 IEM_MC_ADVANCE_RIP_AND_FINISH();
3340 IEM_MC_END();
3341 }
3342 else
3343 {
3344 /* memory target */
3345 uint32_t fAccess;
3346 if (pImpl->pfnLockedU64)
3347 fAccess = IEM_ACCESS_DATA_RW;
3348 else /* CMP */
3349 fAccess = IEM_ACCESS_DATA_R;
3350 IEM_MC_BEGIN(3, 2);
3351 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3352 IEM_MC_ARG(uint64_t, u64Src, 1);
3353 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3355
3356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3357 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3358 if (pImpl->pfnLockedU64)
3359 IEMOP_HLP_DONE_DECODING();
3360 else
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_ASSIGN(u64Src, u64Imm);
3363 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3364 IEM_MC_FETCH_EFLAGS(EFlags);
3365 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3366 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3367 else
3368 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3369
3370 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3371 IEM_MC_COMMIT_EFLAGS(EFlags);
3372 IEM_MC_ADVANCE_RIP_AND_FINISH();
3373 IEM_MC_END();
3374 }
3375 break;
3376 }
3377
3378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3379 }
3380}
3381
3382
3383/**
3384 * @opcode 0x82
3385 * @opmnemonic grp1_82
3386 * @opgroup og_groups
3387 */
3388FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3389{
3390 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3391 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3392}
3393
3394
3395/**
3396 * @opcode 0x83
3397 */
3398FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3399{
3400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3401 switch (IEM_GET_MODRM_REG_8(bRm))
3402 {
3403 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3404 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3405 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3406 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3407 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3408 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3409 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3410 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3411 }
3412 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3413 to the 386 even if absent in the intel reference manuals and some
3414 3rd party opcode listings. */
3415 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3416
3417 if (IEM_IS_MODRM_REG_MODE(bRm))
3418 {
3419 /*
3420 * Register target
3421 */
3422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3423 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3424 switch (pVCpu->iem.s.enmEffOpSize)
3425 {
3426 case IEMMODE_16BIT:
3427 {
3428 IEM_MC_BEGIN(3, 0);
3429 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3430 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3432
3433 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3434 IEM_MC_REF_EFLAGS(pEFlags);
3435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3436
3437 IEM_MC_ADVANCE_RIP_AND_FINISH();
3438 IEM_MC_END();
3439 break;
3440 }
3441
3442 case IEMMODE_32BIT:
3443 {
3444 IEM_MC_BEGIN(3, 0);
3445 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3446 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3447 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3448
3449 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3450 IEM_MC_REF_EFLAGS(pEFlags);
3451 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3452 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3453 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3454
3455 IEM_MC_ADVANCE_RIP_AND_FINISH();
3456 IEM_MC_END();
3457 break;
3458 }
3459
3460 case IEMMODE_64BIT:
3461 {
3462 IEM_MC_BEGIN(3, 0);
3463 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3464 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3465 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3466
3467 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3468 IEM_MC_REF_EFLAGS(pEFlags);
3469 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3470
3471 IEM_MC_ADVANCE_RIP_AND_FINISH();
3472 IEM_MC_END();
3473 break;
3474 }
3475
3476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3477 }
3478 }
3479 else
3480 {
3481 /*
3482 * Memory target.
3483 */
3484 uint32_t fAccess;
3485 if (pImpl->pfnLockedU16)
3486 fAccess = IEM_ACCESS_DATA_RW;
3487 else /* CMP */
3488 fAccess = IEM_ACCESS_DATA_R;
3489
3490 switch (pVCpu->iem.s.enmEffOpSize)
3491 {
3492 case IEMMODE_16BIT:
3493 {
3494 IEM_MC_BEGIN(3, 2);
3495 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3496 IEM_MC_ARG(uint16_t, u16Src, 1);
3497 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3499
3500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3501 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3502 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3503 if (pImpl->pfnLockedU16)
3504 IEMOP_HLP_DONE_DECODING();
3505 else
3506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3507 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3508 IEM_MC_FETCH_EFLAGS(EFlags);
3509 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3510 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3511 else
3512 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3513
3514 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3515 IEM_MC_COMMIT_EFLAGS(EFlags);
3516 IEM_MC_ADVANCE_RIP_AND_FINISH();
3517 IEM_MC_END();
3518 break;
3519 }
3520
3521 case IEMMODE_32BIT:
3522 {
3523 IEM_MC_BEGIN(3, 2);
3524 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3525 IEM_MC_ARG(uint32_t, u32Src, 1);
3526 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3528
3529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3530 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3531 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3532 if (pImpl->pfnLockedU32)
3533 IEMOP_HLP_DONE_DECODING();
3534 else
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3537 IEM_MC_FETCH_EFLAGS(EFlags);
3538 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3539 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3540 else
3541 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3542
3543 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3544 IEM_MC_COMMIT_EFLAGS(EFlags);
3545 IEM_MC_ADVANCE_RIP_AND_FINISH();
3546 IEM_MC_END();
3547 break;
3548 }
3549
3550 case IEMMODE_64BIT:
3551 {
3552 IEM_MC_BEGIN(3, 2);
3553 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3554 IEM_MC_ARG(uint64_t, u64Src, 1);
3555 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3557
3558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3559 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3560 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3561 if (pImpl->pfnLockedU64)
3562 IEMOP_HLP_DONE_DECODING();
3563 else
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3565 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3566 IEM_MC_FETCH_EFLAGS(EFlags);
3567 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3568 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3569 else
3570 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3571
3572 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3573 IEM_MC_COMMIT_EFLAGS(EFlags);
3574 IEM_MC_ADVANCE_RIP_AND_FINISH();
3575 IEM_MC_END();
3576 break;
3577 }
3578
3579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3580 }
3581 }
3582}
3583
3584
3585/**
3586 * @opcode 0x84
3587 */
3588FNIEMOP_DEF(iemOp_test_Eb_Gb)
3589{
3590 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3591 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3592 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3593}
3594
3595
3596/**
3597 * @opcode 0x85
3598 */
3599FNIEMOP_DEF(iemOp_test_Ev_Gv)
3600{
3601 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3602 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3603 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3604}
3605
3606
3607/**
3608 * @opcode 0x86
3609 */
3610FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3611{
3612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3613 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3614
3615 /*
3616 * If rm is denoting a register, no more instruction bytes.
3617 */
3618 if (IEM_IS_MODRM_REG_MODE(bRm))
3619 {
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3621
3622 IEM_MC_BEGIN(0, 2);
3623 IEM_MC_LOCAL(uint8_t, uTmp1);
3624 IEM_MC_LOCAL(uint8_t, uTmp2);
3625
3626 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3627 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3628 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3629 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3630
3631 IEM_MC_ADVANCE_RIP_AND_FINISH();
3632 IEM_MC_END();
3633 }
3634 else
3635 {
3636 /*
3637 * We're accessing memory.
3638 */
3639/** @todo the register must be committed separately! */
3640 IEM_MC_BEGIN(2, 2);
3641 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3642 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3644
3645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3646 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3647 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3648 if (!pVCpu->iem.s.fDisregardLock)
3649 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
3650 else
3651 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
3652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3653
3654 IEM_MC_ADVANCE_RIP_AND_FINISH();
3655 IEM_MC_END();
3656 }
3657}
3658
3659
3660/**
3661 * @opcode 0x87
3662 */
3663FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3664{
3665 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3667
3668 /*
3669 * If rm is denoting a register, no more instruction bytes.
3670 */
3671 if (IEM_IS_MODRM_REG_MODE(bRm))
3672 {
3673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3674
3675 switch (pVCpu->iem.s.enmEffOpSize)
3676 {
3677 case IEMMODE_16BIT:
3678 IEM_MC_BEGIN(0, 2);
3679 IEM_MC_LOCAL(uint16_t, uTmp1);
3680 IEM_MC_LOCAL(uint16_t, uTmp2);
3681
3682 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3683 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3684 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3685 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3686
3687 IEM_MC_ADVANCE_RIP_AND_FINISH();
3688 IEM_MC_END();
3689 break;
3690
3691 case IEMMODE_32BIT:
3692 IEM_MC_BEGIN(0, 2);
3693 IEM_MC_LOCAL(uint32_t, uTmp1);
3694 IEM_MC_LOCAL(uint32_t, uTmp2);
3695
3696 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3697 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3698 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3699 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3700
3701 IEM_MC_ADVANCE_RIP_AND_FINISH();
3702 IEM_MC_END();
3703 break;
3704
3705 case IEMMODE_64BIT:
3706 IEM_MC_BEGIN(0, 2);
3707 IEM_MC_LOCAL(uint64_t, uTmp1);
3708 IEM_MC_LOCAL(uint64_t, uTmp2);
3709
3710 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3711 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3712 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3713 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3714
3715 IEM_MC_ADVANCE_RIP_AND_FINISH();
3716 IEM_MC_END();
3717 break;
3718
3719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3720 }
3721 }
3722 else
3723 {
3724 /*
3725 * We're accessing memory.
3726 */
3727 switch (pVCpu->iem.s.enmEffOpSize)
3728 {
3729/** @todo the register must be committed separately! */
3730 case IEMMODE_16BIT:
3731 IEM_MC_BEGIN(2, 2);
3732 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3733 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3735
3736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3737 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3738 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3739 if (!pVCpu->iem.s.fDisregardLock)
3740 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
3741 else
3742 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
3743 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3744
3745 IEM_MC_ADVANCE_RIP_AND_FINISH();
3746 IEM_MC_END();
3747 break;
3748
3749 case IEMMODE_32BIT:
3750 IEM_MC_BEGIN(2, 2);
3751 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3752 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3754
3755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3756 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3757 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3758 if (!pVCpu->iem.s.fDisregardLock)
3759 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
3760 else
3761 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
3762 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3763
3764 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3765 IEM_MC_ADVANCE_RIP_AND_FINISH();
3766 IEM_MC_END();
3767 break;
3768
3769 case IEMMODE_64BIT:
3770 IEM_MC_BEGIN(2, 2);
3771 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3772 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3774
3775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3776 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3777 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3778 if (!pVCpu->iem.s.fDisregardLock)
3779 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
3780 else
3781 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
3782 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3783
3784 IEM_MC_ADVANCE_RIP_AND_FINISH();
3785 IEM_MC_END();
3786 break;
3787
3788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3789 }
3790 }
3791}
3792
3793
3794/**
3795 * @opcode 0x88
3796 */
3797FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3798{
3799 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3800
3801 uint8_t bRm;
3802 IEM_OPCODE_GET_NEXT_U8(&bRm);
3803
3804 /*
3805 * If rm is denoting a register, no more instruction bytes.
3806 */
3807 if (IEM_IS_MODRM_REG_MODE(bRm))
3808 {
3809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3810 IEM_MC_BEGIN(0, 1);
3811 IEM_MC_LOCAL(uint8_t, u8Value);
3812 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3813 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
3814 IEM_MC_ADVANCE_RIP_AND_FINISH();
3815 IEM_MC_END();
3816 }
3817 else
3818 {
3819 /*
3820 * We're writing a register to memory.
3821 */
3822 IEM_MC_BEGIN(0, 2);
3823 IEM_MC_LOCAL(uint8_t, u8Value);
3824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3827 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3828 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3829 IEM_MC_ADVANCE_RIP_AND_FINISH();
3830 IEM_MC_END();
3831 }
3832}
3833
3834
3835/**
3836 * @opcode 0x89
3837 */
3838FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3839{
3840 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3841
3842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3843
3844 /*
3845 * If rm is denoting a register, no more instruction bytes.
3846 */
3847 if (IEM_IS_MODRM_REG_MODE(bRm))
3848 {
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3850 switch (pVCpu->iem.s.enmEffOpSize)
3851 {
3852 case IEMMODE_16BIT:
3853 IEM_MC_BEGIN(0, 1);
3854 IEM_MC_LOCAL(uint16_t, u16Value);
3855 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3856 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
3857 IEM_MC_ADVANCE_RIP_AND_FINISH();
3858 IEM_MC_END();
3859 break;
3860
3861 case IEMMODE_32BIT:
3862 IEM_MC_BEGIN(0, 1);
3863 IEM_MC_LOCAL(uint32_t, u32Value);
3864 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3865 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
3866 IEM_MC_ADVANCE_RIP_AND_FINISH();
3867 IEM_MC_END();
3868 break;
3869
3870 case IEMMODE_64BIT:
3871 IEM_MC_BEGIN(0, 1);
3872 IEM_MC_LOCAL(uint64_t, u64Value);
3873 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3874 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
3875 IEM_MC_ADVANCE_RIP_AND_FINISH();
3876 IEM_MC_END();
3877 break;
3878
3879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3880 }
3881 }
3882 else
3883 {
3884 /*
3885 * We're writing a register to memory.
3886 */
3887 switch (pVCpu->iem.s.enmEffOpSize)
3888 {
3889 case IEMMODE_16BIT:
3890 IEM_MC_BEGIN(0, 2);
3891 IEM_MC_LOCAL(uint16_t, u16Value);
3892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3895 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3896 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3897 IEM_MC_ADVANCE_RIP_AND_FINISH();
3898 IEM_MC_END();
3899 break;
3900
3901 case IEMMODE_32BIT:
3902 IEM_MC_BEGIN(0, 2);
3903 IEM_MC_LOCAL(uint32_t, u32Value);
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3907 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3908 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3909 IEM_MC_ADVANCE_RIP_AND_FINISH();
3910 IEM_MC_END();
3911 break;
3912
3913 case IEMMODE_64BIT:
3914 IEM_MC_BEGIN(0, 2);
3915 IEM_MC_LOCAL(uint64_t, u64Value);
3916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3919 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3920 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3921 IEM_MC_ADVANCE_RIP_AND_FINISH();
3922 IEM_MC_END();
3923 break;
3924
3925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3926 }
3927 }
3928}
3929
3930
3931/**
3932 * @opcode 0x8a
3933 */
3934FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3935{
3936 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3937
3938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3939
3940 /*
3941 * If rm is denoting a register, no more instruction bytes.
3942 */
3943 if (IEM_IS_MODRM_REG_MODE(bRm))
3944 {
3945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3946 IEM_MC_BEGIN(0, 1);
3947 IEM_MC_LOCAL(uint8_t, u8Value);
3948 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3949 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3950 IEM_MC_ADVANCE_RIP_AND_FINISH();
3951 IEM_MC_END();
3952 }
3953 else
3954 {
3955 /*
3956 * We're loading a register from memory.
3957 */
3958 IEM_MC_BEGIN(0, 2);
3959 IEM_MC_LOCAL(uint8_t, u8Value);
3960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3963 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3964 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3965 IEM_MC_ADVANCE_RIP_AND_FINISH();
3966 IEM_MC_END();
3967 }
3968}
3969
3970
3971/**
3972 * @opcode 0x8b
3973 */
3974FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3975{
3976 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3977
3978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3979
3980 /*
3981 * If rm is denoting a register, no more instruction bytes.
3982 */
3983 if (IEM_IS_MODRM_REG_MODE(bRm))
3984 {
3985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3986 switch (pVCpu->iem.s.enmEffOpSize)
3987 {
3988 case IEMMODE_16BIT:
3989 IEM_MC_BEGIN(0, 1);
3990 IEM_MC_LOCAL(uint16_t, u16Value);
3991 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3992 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
3993 IEM_MC_ADVANCE_RIP_AND_FINISH();
3994 IEM_MC_END();
3995 break;
3996
3997 case IEMMODE_32BIT:
3998 IEM_MC_BEGIN(0, 1);
3999 IEM_MC_LOCAL(uint32_t, u32Value);
4000 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4001 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4002 IEM_MC_ADVANCE_RIP_AND_FINISH();
4003 IEM_MC_END();
4004 break;
4005
4006 case IEMMODE_64BIT:
4007 IEM_MC_BEGIN(0, 1);
4008 IEM_MC_LOCAL(uint64_t, u64Value);
4009 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4010 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4011 IEM_MC_ADVANCE_RIP_AND_FINISH();
4012 IEM_MC_END();
4013 break;
4014
4015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4016 }
4017 }
4018 else
4019 {
4020 /*
4021 * We're loading a register from memory.
4022 */
4023 switch (pVCpu->iem.s.enmEffOpSize)
4024 {
4025 case IEMMODE_16BIT:
4026 IEM_MC_BEGIN(0, 2);
4027 IEM_MC_LOCAL(uint16_t, u16Value);
4028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4031 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4032 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4033 IEM_MC_ADVANCE_RIP_AND_FINISH();
4034 IEM_MC_END();
4035 break;
4036
4037 case IEMMODE_32BIT:
4038 IEM_MC_BEGIN(0, 2);
4039 IEM_MC_LOCAL(uint32_t, u32Value);
4040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4043 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4044 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4045 IEM_MC_ADVANCE_RIP_AND_FINISH();
4046 IEM_MC_END();
4047 break;
4048
4049 case IEMMODE_64BIT:
4050 IEM_MC_BEGIN(0, 2);
4051 IEM_MC_LOCAL(uint64_t, u64Value);
4052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4055 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4056 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4057 IEM_MC_ADVANCE_RIP_AND_FINISH();
4058 IEM_MC_END();
4059 break;
4060
4061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4062 }
4063 }
4064}
4065
4066
4067/**
4068 * opcode 0x63
4069 * @todo Table fixme
4070 */
4071FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4072{
4073 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4074 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4075 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4076 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4077 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4078}
4079
4080
4081/**
4082 * @opcode 0x8c
4083 */
4084FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4085{
4086 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4087
4088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4089
4090 /*
4091 * Check that the destination register exists. The REX.R prefix is ignored.
4092 */
4093 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4094 if ( iSegReg > X86_SREG_GS)
4095 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4096
4097 /*
4098 * If rm is denoting a register, no more instruction bytes.
4099 * In that case, the operand size is respected and the upper bits are
4100 * cleared (starting with some pentium).
4101 */
4102 if (IEM_IS_MODRM_REG_MODE(bRm))
4103 {
4104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4105 switch (pVCpu->iem.s.enmEffOpSize)
4106 {
4107 case IEMMODE_16BIT:
4108 IEM_MC_BEGIN(0, 1);
4109 IEM_MC_LOCAL(uint16_t, u16Value);
4110 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4111 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4112 IEM_MC_ADVANCE_RIP_AND_FINISH();
4113 IEM_MC_END();
4114 break;
4115
4116 case IEMMODE_32BIT:
4117 IEM_MC_BEGIN(0, 1);
4118 IEM_MC_LOCAL(uint32_t, u32Value);
4119 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4120 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4121 IEM_MC_ADVANCE_RIP_AND_FINISH();
4122 IEM_MC_END();
4123 break;
4124
4125 case IEMMODE_64BIT:
4126 IEM_MC_BEGIN(0, 1);
4127 IEM_MC_LOCAL(uint64_t, u64Value);
4128 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4129 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4130 IEM_MC_ADVANCE_RIP_AND_FINISH();
4131 IEM_MC_END();
4132 break;
4133
4134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4135 }
4136 }
4137 else
4138 {
4139 /*
4140 * We're saving the register to memory. The access is word sized
4141 * regardless of operand size prefixes.
4142 */
4143#if 0 /* not necessary */
4144 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4145#endif
4146 IEM_MC_BEGIN(0, 2);
4147 IEM_MC_LOCAL(uint16_t, u16Value);
4148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4151 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4152 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4153 IEM_MC_ADVANCE_RIP_AND_FINISH();
4154 IEM_MC_END();
4155 }
4156}
4157
4158
4159
4160
4161/**
4162 * @opcode 0x8d
4163 */
4164FNIEMOP_DEF(iemOp_lea_Gv_M)
4165{
4166 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4168 if (IEM_IS_MODRM_REG_MODE(bRm))
4169 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4170
4171 switch (pVCpu->iem.s.enmEffOpSize)
4172 {
4173 case IEMMODE_16BIT:
4174 IEM_MC_BEGIN(0, 2);
4175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4176 IEM_MC_LOCAL(uint16_t, u16Cast);
4177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4179 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4180 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
4181 IEM_MC_ADVANCE_RIP_AND_FINISH();
4182 IEM_MC_END();
4183 break;
4184
4185 case IEMMODE_32BIT:
4186 IEM_MC_BEGIN(0, 2);
4187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4188 IEM_MC_LOCAL(uint32_t, u32Cast);
4189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4191 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4192 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
4193 IEM_MC_ADVANCE_RIP_AND_FINISH();
4194 IEM_MC_END();
4195 break;
4196
4197 case IEMMODE_64BIT:
4198 IEM_MC_BEGIN(0, 1);
4199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4202 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
4203 IEM_MC_ADVANCE_RIP_AND_FINISH();
4204 IEM_MC_END();
4205 break;
4206
4207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4208 }
4209}
4210
4211
4212/**
4213 * @opcode 0x8e
4214 */
4215FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4216{
4217 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4218
4219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4220
4221 /*
4222 * The practical operand size is 16-bit.
4223 */
4224#if 0 /* not necessary */
4225 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4226#endif
4227
4228 /*
4229 * Check that the destination register exists and can be used with this
4230 * instruction. The REX.R prefix is ignored.
4231 */
4232 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4233 if ( iSegReg == X86_SREG_CS
4234 || iSegReg > X86_SREG_GS)
4235 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4236
4237 /*
4238 * If rm is denoting a register, no more instruction bytes.
4239 */
4240 if (IEM_IS_MODRM_REG_MODE(bRm))
4241 {
4242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4243 IEM_MC_BEGIN(2, 0);
4244 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4245 IEM_MC_ARG(uint16_t, u16Value, 1);
4246 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4247 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4248 IEM_MC_END();
4249 }
4250 else
4251 {
4252 /*
4253 * We're loading the register from memory. The access is word sized
4254 * regardless of operand size prefixes.
4255 */
4256 IEM_MC_BEGIN(2, 1);
4257 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4258 IEM_MC_ARG(uint16_t, u16Value, 1);
4259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4262 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4263 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4264 IEM_MC_END();
4265 }
4266}
4267
4268
4269/** Opcode 0x8f /0. */
4270FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4271{
4272 /* This bugger is rather annoying as it requires rSP to be updated before
4273 doing the effective address calculations. Will eventually require a
4274 split between the R/M+SIB decoding and the effective address
4275 calculation - which is something that is required for any attempt at
4276 reusing this code for a recompiler. It may also be good to have if we
4277 need to delay #UD exception caused by invalid lock prefixes.
4278
4279 For now, we'll do a mostly safe interpreter-only implementation here. */
4280 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4281 * now until tests show it's checked.. */
4282 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4283
4284 /* Register access is relatively easy and can share code. */
4285 if (IEM_IS_MODRM_REG_MODE(bRm))
4286 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
4287
4288 /*
4289 * Memory target.
4290 *
4291 * Intel says that RSP is incremented before it's used in any effective
4292 * address calcuations. This means some serious extra annoyance here since
4293 * we decode and calculate the effective address in one step and like to
4294 * delay committing registers till everything is done.
4295 *
4296 * So, we'll decode and calculate the effective address twice. This will
4297 * require some recoding if turned into a recompiler.
4298 */
4299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4300
4301#ifndef TST_IEM_CHECK_MC
4302 /* Calc effective address with modified ESP. */
4303/** @todo testcase */
4304 RTGCPTR GCPtrEff;
4305 VBOXSTRICTRC rcStrict;
4306 switch (pVCpu->iem.s.enmEffOpSize)
4307 {
4308 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4309 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4310 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4312 }
4313 if (rcStrict != VINF_SUCCESS)
4314 return rcStrict;
4315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4316
4317 /* Perform the operation - this should be CImpl. */
4318 RTUINT64U TmpRsp;
4319 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4320 switch (pVCpu->iem.s.enmEffOpSize)
4321 {
4322 case IEMMODE_16BIT:
4323 {
4324 uint16_t u16Value;
4325 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4326 if (rcStrict == VINF_SUCCESS)
4327 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4328 break;
4329 }
4330
4331 case IEMMODE_32BIT:
4332 {
4333 uint32_t u32Value;
4334 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4335 if (rcStrict == VINF_SUCCESS)
4336 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4337 break;
4338 }
4339
4340 case IEMMODE_64BIT:
4341 {
4342 uint64_t u64Value;
4343 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4344 if (rcStrict == VINF_SUCCESS)
4345 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4346 break;
4347 }
4348
4349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4350 }
4351 if (rcStrict == VINF_SUCCESS)
4352 {
4353 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4354 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
4355 }
4356 return rcStrict;
4357
4358#else
4359 return VERR_IEM_IPE_2;
4360#endif
4361}
4362
4363
4364/**
4365 * @opcode 0x8f
4366 */
4367FNIEMOP_DEF(iemOp_Grp1A__xop)
4368{
4369 /*
4370 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4371 * three byte VEX prefix, except that the mmmmm field cannot have the values
4372 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4373 */
4374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4375 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4376 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4377
4378 IEMOP_MNEMONIC(xop, "xop");
4379 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4380 {
4381 /** @todo Test when exctly the XOP conformance checks kick in during
4382 * instruction decoding and fetching (using \#PF). */
4383 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4384 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4385 if ( ( pVCpu->iem.s.fPrefixes
4386 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4387 == 0)
4388 {
4389 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4390 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4391 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4392 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4393 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4394 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4395 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4396 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4397 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4398
4399 /** @todo XOP: Just use new tables and decoders. */
4400 switch (bRm & 0x1f)
4401 {
4402 case 8: /* xop opcode map 8. */
4403 IEMOP_BITCH_ABOUT_STUB();
4404 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4405
4406 case 9: /* xop opcode map 9. */
4407 IEMOP_BITCH_ABOUT_STUB();
4408 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4409
4410 case 10: /* xop opcode map 10. */
4411 IEMOP_BITCH_ABOUT_STUB();
4412 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4413
4414 default:
4415 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4416 return IEMOP_RAISE_INVALID_OPCODE();
4417 }
4418 }
4419 else
4420 Log(("XOP: Invalid prefix mix!\n"));
4421 }
4422 else
4423 Log(("XOP: XOP support disabled!\n"));
4424 return IEMOP_RAISE_INVALID_OPCODE();
4425}
4426
4427
4428/**
4429 * Common 'xchg reg,rAX' helper.
4430 */
4431FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4432{
4433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4434
4435 iReg |= pVCpu->iem.s.uRexB;
4436 switch (pVCpu->iem.s.enmEffOpSize)
4437 {
4438 case IEMMODE_16BIT:
4439 IEM_MC_BEGIN(0, 2);
4440 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4441 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4442 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4443 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4444 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4445 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4446 IEM_MC_ADVANCE_RIP_AND_FINISH();
4447 IEM_MC_END();
4448 break;
4449
4450 case IEMMODE_32BIT:
4451 IEM_MC_BEGIN(0, 2);
4452 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4453 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4454 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4455 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4456 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4457 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4458 IEM_MC_ADVANCE_RIP_AND_FINISH();
4459 IEM_MC_END();
4460 break;
4461
4462 case IEMMODE_64BIT:
4463 IEM_MC_BEGIN(0, 2);
4464 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4465 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4466 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4467 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4468 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4469 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4470 IEM_MC_ADVANCE_RIP_AND_FINISH();
4471 IEM_MC_END();
4472 break;
4473
4474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4475 }
4476}
4477
4478
4479/**
4480 * @opcode 0x90
4481 */
4482FNIEMOP_DEF(iemOp_nop)
4483{
4484 /* R8/R8D and RAX/EAX can be exchanged. */
4485 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4486 {
4487 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4488 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4489 }
4490
4491 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4492 {
4493 IEMOP_MNEMONIC(pause, "pause");
4494#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4495 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
4496 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
4497#endif
4498#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4499 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4500 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4501#endif
4502 }
4503 else
4504 IEMOP_MNEMONIC(nop, "nop");
4505 IEM_MC_BEGIN(0, 0);
4506 IEM_MC_ADVANCE_RIP_AND_FINISH();
4507 IEM_MC_END();
4508}
4509
4510
4511/**
4512 * @opcode 0x91
4513 */
4514FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4515{
4516 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4517 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4518}
4519
4520
4521/**
4522 * @opcode 0x92
4523 */
4524FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4525{
4526 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4527 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4528}
4529
4530
4531/**
4532 * @opcode 0x93
4533 */
4534FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4535{
4536 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4537 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4538}
4539
4540
4541/**
4542 * @opcode 0x94
4543 */
4544FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4545{
4546 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4547 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4548}
4549
4550
4551/**
4552 * @opcode 0x95
4553 */
4554FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4555{
4556 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4557 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4558}
4559
4560
4561/**
4562 * @opcode 0x96
4563 */
4564FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4565{
4566 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4567 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4568}
4569
4570
4571/**
4572 * @opcode 0x97
4573 */
4574FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4575{
4576 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4577 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4578}
4579
4580
4581/**
4582 * @opcode 0x98
4583 */
4584FNIEMOP_DEF(iemOp_cbw)
4585{
4586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4587 switch (pVCpu->iem.s.enmEffOpSize)
4588 {
4589 case IEMMODE_16BIT:
4590 IEMOP_MNEMONIC(cbw, "cbw");
4591 IEM_MC_BEGIN(0, 1);
4592 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4593 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4594 } IEM_MC_ELSE() {
4595 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4596 } IEM_MC_ENDIF();
4597 IEM_MC_ADVANCE_RIP_AND_FINISH();
4598 IEM_MC_END();
4599 break;
4600
4601 case IEMMODE_32BIT:
4602 IEMOP_MNEMONIC(cwde, "cwde");
4603 IEM_MC_BEGIN(0, 1);
4604 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4605 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4606 } IEM_MC_ELSE() {
4607 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4608 } IEM_MC_ENDIF();
4609 IEM_MC_ADVANCE_RIP_AND_FINISH();
4610 IEM_MC_END();
4611 break;
4612
4613 case IEMMODE_64BIT:
4614 IEMOP_MNEMONIC(cdqe, "cdqe");
4615 IEM_MC_BEGIN(0, 1);
4616 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4617 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4618 } IEM_MC_ELSE() {
4619 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4620 } IEM_MC_ENDIF();
4621 IEM_MC_ADVANCE_RIP_AND_FINISH();
4622 IEM_MC_END();
4623 break;
4624
4625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4626 }
4627}
4628
4629
4630/**
4631 * @opcode 0x99
4632 */
4633FNIEMOP_DEF(iemOp_cwd)
4634{
4635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4636 switch (pVCpu->iem.s.enmEffOpSize)
4637 {
4638 case IEMMODE_16BIT:
4639 IEMOP_MNEMONIC(cwd, "cwd");
4640 IEM_MC_BEGIN(0, 1);
4641 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4642 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4643 } IEM_MC_ELSE() {
4644 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4645 } IEM_MC_ENDIF();
4646 IEM_MC_ADVANCE_RIP_AND_FINISH();
4647 IEM_MC_END();
4648 break;
4649
4650 case IEMMODE_32BIT:
4651 IEMOP_MNEMONIC(cdq, "cdq");
4652 IEM_MC_BEGIN(0, 1);
4653 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4654 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4655 } IEM_MC_ELSE() {
4656 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4657 } IEM_MC_ENDIF();
4658 IEM_MC_ADVANCE_RIP_AND_FINISH();
4659 IEM_MC_END();
4660 break;
4661
4662 case IEMMODE_64BIT:
4663 IEMOP_MNEMONIC(cqo, "cqo");
4664 IEM_MC_BEGIN(0, 1);
4665 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4666 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4667 } IEM_MC_ELSE() {
4668 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4669 } IEM_MC_ENDIF();
4670 IEM_MC_ADVANCE_RIP_AND_FINISH();
4671 IEM_MC_END();
4672 break;
4673
4674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4675 }
4676}
4677
4678
4679/**
4680 * @opcode 0x9a
4681 */
4682FNIEMOP_DEF(iemOp_call_Ap)
4683{
4684 IEMOP_MNEMONIC(call_Ap, "call Ap");
4685 IEMOP_HLP_NO_64BIT();
4686
4687 /* Decode the far pointer address and pass it on to the far call C implementation. */
4688 uint32_t offSeg;
4689 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4690 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4691 else
4692 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4693 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4695 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4696}
4697
4698
4699/** Opcode 0x9b. (aka fwait) */
4700FNIEMOP_DEF(iemOp_wait)
4701{
4702 IEMOP_MNEMONIC(wait, "wait");
4703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4704
4705 IEM_MC_BEGIN(0, 0);
4706 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4708 IEM_MC_ADVANCE_RIP_AND_FINISH();
4709 IEM_MC_END();
4710}
4711
4712
4713/**
4714 * @opcode 0x9c
4715 */
4716FNIEMOP_DEF(iemOp_pushf_Fv)
4717{
4718 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
4719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4720 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4721 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4722}
4723
4724
4725/**
4726 * @opcode 0x9d
4727 */
4728FNIEMOP_DEF(iemOp_popf_Fv)
4729{
4730 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
4731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4733 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4734}
4735
4736
4737/**
4738 * @opcode 0x9e
4739 */
4740FNIEMOP_DEF(iemOp_sahf)
4741{
4742 IEMOP_MNEMONIC(sahf, "sahf");
4743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4744 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4745 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4746 return IEMOP_RAISE_INVALID_OPCODE();
4747 IEM_MC_BEGIN(0, 2);
4748 IEM_MC_LOCAL(uint32_t, u32Flags);
4749 IEM_MC_LOCAL(uint32_t, EFlags);
4750 IEM_MC_FETCH_EFLAGS(EFlags);
4751 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4752 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4753 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4754 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4755 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4756 IEM_MC_COMMIT_EFLAGS(EFlags);
4757 IEM_MC_ADVANCE_RIP_AND_FINISH();
4758 IEM_MC_END();
4759}
4760
4761
4762/**
4763 * @opcode 0x9f
4764 */
4765FNIEMOP_DEF(iemOp_lahf)
4766{
4767 IEMOP_MNEMONIC(lahf, "lahf");
4768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4769 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4770 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4771 return IEMOP_RAISE_INVALID_OPCODE();
4772 IEM_MC_BEGIN(0, 1);
4773 IEM_MC_LOCAL(uint8_t, u8Flags);
4774 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4775 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4776 IEM_MC_ADVANCE_RIP_AND_FINISH();
4777 IEM_MC_END();
4778}
4779
4780
4781/**
4782 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4783 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
4784 * prefixes. Will return on failures.
4785 * @param a_GCPtrMemOff The variable to store the offset in.
4786 */
4787#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4788 do \
4789 { \
4790 switch (pVCpu->iem.s.enmEffAddrMode) \
4791 { \
4792 case IEMMODE_16BIT: \
4793 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4794 break; \
4795 case IEMMODE_32BIT: \
4796 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4797 break; \
4798 case IEMMODE_64BIT: \
4799 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4800 break; \
4801 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4802 } \
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4804 } while (0)
4805
4806/**
4807 * @opcode 0xa0
4808 */
4809FNIEMOP_DEF(iemOp_mov_AL_Ob)
4810{
4811 /*
4812 * Get the offset and fend off lock prefixes.
4813 */
4814 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
4815 RTGCPTR GCPtrMemOff;
4816 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4817
4818 /*
4819 * Fetch AL.
4820 */
4821 IEM_MC_BEGIN(0,1);
4822 IEM_MC_LOCAL(uint8_t, u8Tmp);
4823 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4824 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4825 IEM_MC_ADVANCE_RIP_AND_FINISH();
4826 IEM_MC_END();
4827}
4828
4829
4830/**
4831 * @opcode 0xa1
4832 */
4833FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4834{
4835 /*
4836 * Get the offset and fend off lock prefixes.
4837 */
4838 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4839 RTGCPTR GCPtrMemOff;
4840 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4841
4842 /*
4843 * Fetch rAX.
4844 */
4845 switch (pVCpu->iem.s.enmEffOpSize)
4846 {
4847 case IEMMODE_16BIT:
4848 IEM_MC_BEGIN(0,1);
4849 IEM_MC_LOCAL(uint16_t, u16Tmp);
4850 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4851 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4852 IEM_MC_ADVANCE_RIP_AND_FINISH();
4853 IEM_MC_END();
4854 break;
4855
4856 case IEMMODE_32BIT:
4857 IEM_MC_BEGIN(0,1);
4858 IEM_MC_LOCAL(uint32_t, u32Tmp);
4859 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4860 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4861 IEM_MC_ADVANCE_RIP_AND_FINISH();
4862 IEM_MC_END();
4863 break;
4864
4865 case IEMMODE_64BIT:
4866 IEM_MC_BEGIN(0,1);
4867 IEM_MC_LOCAL(uint64_t, u64Tmp);
4868 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4869 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4870 IEM_MC_ADVANCE_RIP_AND_FINISH();
4871 IEM_MC_END();
4872 break;
4873
4874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4875 }
4876}
4877
4878
4879/**
4880 * @opcode 0xa2
4881 */
4882FNIEMOP_DEF(iemOp_mov_Ob_AL)
4883{
4884 /*
4885 * Get the offset and fend off lock prefixes.
4886 */
4887 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
4888 RTGCPTR GCPtrMemOff;
4889 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4890
4891 /*
4892 * Store AL.
4893 */
4894 IEM_MC_BEGIN(0,1);
4895 IEM_MC_LOCAL(uint8_t, u8Tmp);
4896 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4897 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4898 IEM_MC_ADVANCE_RIP_AND_FINISH();
4899 IEM_MC_END();
4900}
4901
4902
4903/**
4904 * @opcode 0xa3
4905 */
4906FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4907{
4908 /*
4909 * Get the offset and fend off lock prefixes.
4910 */
4911 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
4912 RTGCPTR GCPtrMemOff;
4913 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4914
4915 /*
4916 * Store rAX.
4917 */
4918 switch (pVCpu->iem.s.enmEffOpSize)
4919 {
4920 case IEMMODE_16BIT:
4921 IEM_MC_BEGIN(0,1);
4922 IEM_MC_LOCAL(uint16_t, u16Tmp);
4923 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4924 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4925 IEM_MC_ADVANCE_RIP_AND_FINISH();
4926 IEM_MC_END();
4927 break;
4928
4929 case IEMMODE_32BIT:
4930 IEM_MC_BEGIN(0,1);
4931 IEM_MC_LOCAL(uint32_t, u32Tmp);
4932 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4933 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4934 IEM_MC_ADVANCE_RIP_AND_FINISH();
4935 IEM_MC_END();
4936 break;
4937
4938 case IEMMODE_64BIT:
4939 IEM_MC_BEGIN(0,1);
4940 IEM_MC_LOCAL(uint64_t, u64Tmp);
4941 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4942 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4943 IEM_MC_ADVANCE_RIP_AND_FINISH();
4944 IEM_MC_END();
4945 break;
4946
4947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4948 }
4949}
4950
4951/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4952#define IEM_MOVS_CASE(ValBits, AddrBits) \
4953 IEM_MC_BEGIN(0, 2); \
4954 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4955 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4956 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4957 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4958 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4959 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4960 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4961 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4962 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4963 } IEM_MC_ELSE() { \
4964 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4965 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4966 } IEM_MC_ENDIF(); \
4967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4968 IEM_MC_END();
4969
4970/**
4971 * @opcode 0xa4
4972 */
4973FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4974{
4975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4976
4977 /*
4978 * Use the C implementation if a repeat prefix is encountered.
4979 */
4980 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4981 {
4982 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4983 switch (pVCpu->iem.s.enmEffAddrMode)
4984 {
4985 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4986 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4987 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4989 }
4990 }
4991 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4992
4993 /*
4994 * Sharing case implementation with movs[wdq] below.
4995 */
4996 switch (pVCpu->iem.s.enmEffAddrMode)
4997 {
4998 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4999 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5000 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5002 }
5003}
5004
5005
5006/**
5007 * @opcode 0xa5
5008 */
5009FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5010{
5011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5012
5013 /*
5014 * Use the C implementation if a repeat prefix is encountered.
5015 */
5016 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5017 {
5018 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5019 switch (pVCpu->iem.s.enmEffOpSize)
5020 {
5021 case IEMMODE_16BIT:
5022 switch (pVCpu->iem.s.enmEffAddrMode)
5023 {
5024 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5025 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5026 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5028 }
5029 break;
5030 case IEMMODE_32BIT:
5031 switch (pVCpu->iem.s.enmEffAddrMode)
5032 {
5033 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5034 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5035 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5037 }
5038 case IEMMODE_64BIT:
5039 switch (pVCpu->iem.s.enmEffAddrMode)
5040 {
5041 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5042 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5043 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5045 }
5046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5047 }
5048 }
5049 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5050
5051 /*
5052 * Annoying double switch here.
5053 * Using ugly macro for implementing the cases, sharing it with movsb.
5054 */
5055 switch (pVCpu->iem.s.enmEffOpSize)
5056 {
5057 case IEMMODE_16BIT:
5058 switch (pVCpu->iem.s.enmEffAddrMode)
5059 {
5060 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5061 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5062 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5064 }
5065 break;
5066
5067 case IEMMODE_32BIT:
5068 switch (pVCpu->iem.s.enmEffAddrMode)
5069 {
5070 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5071 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5072 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5074 }
5075 break;
5076
5077 case IEMMODE_64BIT:
5078 switch (pVCpu->iem.s.enmEffAddrMode)
5079 {
5080 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5081 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5082 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5084 }
5085 break;
5086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5087 }
5088}
5089
5090#undef IEM_MOVS_CASE
5091
5092/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5093#define IEM_CMPS_CASE(ValBits, AddrBits) \
5094 IEM_MC_BEGIN(3, 3); \
5095 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5096 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5097 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5098 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5099 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5100 \
5101 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5102 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5103 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5104 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5105 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5106 IEM_MC_REF_EFLAGS(pEFlags); \
5107 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5108 \
5109 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5110 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5111 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5112 } IEM_MC_ELSE() { \
5113 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5114 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5115 } IEM_MC_ENDIF(); \
5116 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5117 IEM_MC_END();
5118
5119/**
5120 * @opcode 0xa6
5121 */
5122FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5123{
5124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5125
5126 /*
5127 * Use the C implementation if a repeat prefix is encountered.
5128 */
5129 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5130 {
5131 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5132 switch (pVCpu->iem.s.enmEffAddrMode)
5133 {
5134 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5135 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5136 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5138 }
5139 }
5140 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5141 {
5142 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5143 switch (pVCpu->iem.s.enmEffAddrMode)
5144 {
5145 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5146 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5147 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5149 }
5150 }
5151 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5152
5153 /*
5154 * Sharing case implementation with cmps[wdq] below.
5155 */
5156 switch (pVCpu->iem.s.enmEffAddrMode)
5157 {
5158 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5159 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5160 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5162 }
5163}
5164
5165
5166/**
5167 * @opcode 0xa7
5168 */
5169FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5170{
5171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5172
5173 /*
5174 * Use the C implementation if a repeat prefix is encountered.
5175 */
5176 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5177 {
5178 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5179 switch (pVCpu->iem.s.enmEffOpSize)
5180 {
5181 case IEMMODE_16BIT:
5182 switch (pVCpu->iem.s.enmEffAddrMode)
5183 {
5184 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5185 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5186 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5188 }
5189 break;
5190 case IEMMODE_32BIT:
5191 switch (pVCpu->iem.s.enmEffAddrMode)
5192 {
5193 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5194 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5195 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5197 }
5198 case IEMMODE_64BIT:
5199 switch (pVCpu->iem.s.enmEffAddrMode)
5200 {
5201 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5202 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5203 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5205 }
5206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5207 }
5208 }
5209
5210 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5211 {
5212 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5213 switch (pVCpu->iem.s.enmEffOpSize)
5214 {
5215 case IEMMODE_16BIT:
5216 switch (pVCpu->iem.s.enmEffAddrMode)
5217 {
5218 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5219 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5220 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5222 }
5223 break;
5224 case IEMMODE_32BIT:
5225 switch (pVCpu->iem.s.enmEffAddrMode)
5226 {
5227 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5228 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5229 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5231 }
5232 case IEMMODE_64BIT:
5233 switch (pVCpu->iem.s.enmEffAddrMode)
5234 {
5235 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5236 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5237 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5238 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5239 }
5240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5241 }
5242 }
5243
5244 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5245
5246 /*
5247 * Annoying double switch here.
5248 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5249 */
5250 switch (pVCpu->iem.s.enmEffOpSize)
5251 {
5252 case IEMMODE_16BIT:
5253 switch (pVCpu->iem.s.enmEffAddrMode)
5254 {
5255 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5256 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5257 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5259 }
5260 break;
5261
5262 case IEMMODE_32BIT:
5263 switch (pVCpu->iem.s.enmEffAddrMode)
5264 {
5265 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5266 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5267 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5269 }
5270 break;
5271
5272 case IEMMODE_64BIT:
5273 switch (pVCpu->iem.s.enmEffAddrMode)
5274 {
5275 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5276 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5277 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5279 }
5280 break;
5281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5282 }
5283}
5284
5285#undef IEM_CMPS_CASE
5286
5287/**
5288 * @opcode 0xa8
5289 */
5290FNIEMOP_DEF(iemOp_test_AL_Ib)
5291{
5292 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5294 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5295}
5296
5297
5298/**
5299 * @opcode 0xa9
5300 */
5301FNIEMOP_DEF(iemOp_test_eAX_Iz)
5302{
5303 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5304 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5305 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5306}
5307
5308
5309/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5310#define IEM_STOS_CASE(ValBits, AddrBits) \
5311 IEM_MC_BEGIN(0, 2); \
5312 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5313 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5314 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5315 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5316 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5317 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5318 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5319 } IEM_MC_ELSE() { \
5320 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5321 } IEM_MC_ENDIF(); \
5322 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5323 IEM_MC_END();
5324
5325/**
5326 * @opcode 0xaa
5327 */
5328FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5329{
5330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5331
5332 /*
5333 * Use the C implementation if a repeat prefix is encountered.
5334 */
5335 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5336 {
5337 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5338 switch (pVCpu->iem.s.enmEffAddrMode)
5339 {
5340 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5341 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5342 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5344 }
5345 }
5346 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5347
5348 /*
5349 * Sharing case implementation with stos[wdq] below.
5350 */
5351 switch (pVCpu->iem.s.enmEffAddrMode)
5352 {
5353 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5354 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5355 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5357 }
5358}
5359
5360
5361/**
5362 * @opcode 0xab
5363 */
5364FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5365{
5366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5367
5368 /*
5369 * Use the C implementation if a repeat prefix is encountered.
5370 */
5371 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5372 {
5373 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5374 switch (pVCpu->iem.s.enmEffOpSize)
5375 {
5376 case IEMMODE_16BIT:
5377 switch (pVCpu->iem.s.enmEffAddrMode)
5378 {
5379 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5380 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5381 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5383 }
5384 break;
5385 case IEMMODE_32BIT:
5386 switch (pVCpu->iem.s.enmEffAddrMode)
5387 {
5388 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5389 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5390 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5392 }
5393 case IEMMODE_64BIT:
5394 switch (pVCpu->iem.s.enmEffAddrMode)
5395 {
5396 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5397 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5398 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5400 }
5401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5402 }
5403 }
5404 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5405
5406 /*
5407 * Annoying double switch here.
5408 * Using ugly macro for implementing the cases, sharing it with stosb.
5409 */
5410 switch (pVCpu->iem.s.enmEffOpSize)
5411 {
5412 case IEMMODE_16BIT:
5413 switch (pVCpu->iem.s.enmEffAddrMode)
5414 {
5415 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5416 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5417 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5419 }
5420 break;
5421
5422 case IEMMODE_32BIT:
5423 switch (pVCpu->iem.s.enmEffAddrMode)
5424 {
5425 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5426 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5427 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5429 }
5430 break;
5431
5432 case IEMMODE_64BIT:
5433 switch (pVCpu->iem.s.enmEffAddrMode)
5434 {
5435 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5436 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5437 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5439 }
5440 break;
5441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5442 }
5443}
5444
5445#undef IEM_STOS_CASE
5446
5447/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5448#define IEM_LODS_CASE(ValBits, AddrBits) \
5449 IEM_MC_BEGIN(0, 2); \
5450 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5451 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5452 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5453 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5454 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5455 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5456 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5457 } IEM_MC_ELSE() { \
5458 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5459 } IEM_MC_ENDIF(); \
5460 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5461 IEM_MC_END();
5462
5463/**
5464 * @opcode 0xac
5465 */
5466FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5467{
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469
5470 /*
5471 * Use the C implementation if a repeat prefix is encountered.
5472 */
5473 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5474 {
5475 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5476 switch (pVCpu->iem.s.enmEffAddrMode)
5477 {
5478 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5479 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5480 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5482 }
5483 }
5484 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5485
5486 /*
5487 * Sharing case implementation with stos[wdq] below.
5488 */
5489 switch (pVCpu->iem.s.enmEffAddrMode)
5490 {
5491 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5492 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5493 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5495 }
5496}
5497
5498
5499/**
5500 * @opcode 0xad
5501 */
5502FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5503{
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505
5506 /*
5507 * Use the C implementation if a repeat prefix is encountered.
5508 */
5509 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5510 {
5511 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5512 switch (pVCpu->iem.s.enmEffOpSize)
5513 {
5514 case IEMMODE_16BIT:
5515 switch (pVCpu->iem.s.enmEffAddrMode)
5516 {
5517 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5518 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5519 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5521 }
5522 break;
5523 case IEMMODE_32BIT:
5524 switch (pVCpu->iem.s.enmEffAddrMode)
5525 {
5526 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5527 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5528 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5530 }
5531 case IEMMODE_64BIT:
5532 switch (pVCpu->iem.s.enmEffAddrMode)
5533 {
5534 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5535 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5536 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5538 }
5539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5540 }
5541 }
5542 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5543
5544 /*
5545 * Annoying double switch here.
5546 * Using ugly macro for implementing the cases, sharing it with lodsb.
5547 */
5548 switch (pVCpu->iem.s.enmEffOpSize)
5549 {
5550 case IEMMODE_16BIT:
5551 switch (pVCpu->iem.s.enmEffAddrMode)
5552 {
5553 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5554 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5555 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5557 }
5558 break;
5559
5560 case IEMMODE_32BIT:
5561 switch (pVCpu->iem.s.enmEffAddrMode)
5562 {
5563 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5564 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5565 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5567 }
5568 break;
5569
5570 case IEMMODE_64BIT:
5571 switch (pVCpu->iem.s.enmEffAddrMode)
5572 {
5573 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5574 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5575 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5577 }
5578 break;
5579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5580 }
5581}
5582
5583#undef IEM_LODS_CASE
5584
5585/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5586#define IEM_SCAS_CASE(ValBits, AddrBits) \
5587 IEM_MC_BEGIN(3, 2); \
5588 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5589 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5590 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5591 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5592 \
5593 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5594 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5595 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5596 IEM_MC_REF_EFLAGS(pEFlags); \
5597 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5598 \
5599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5600 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5601 } IEM_MC_ELSE() { \
5602 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5603 } IEM_MC_ENDIF(); \
5604 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5605 IEM_MC_END();
5606
5607/**
5608 * @opcode 0xae
5609 */
5610FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5611{
5612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5613
5614 /*
5615 * Use the C implementation if a repeat prefix is encountered.
5616 */
5617 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5618 {
5619 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5620 switch (pVCpu->iem.s.enmEffAddrMode)
5621 {
5622 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5623 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5624 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5626 }
5627 }
5628 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5629 {
5630 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5631 switch (pVCpu->iem.s.enmEffAddrMode)
5632 {
5633 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5634 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5635 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5637 }
5638 }
5639 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5640
5641 /*
5642 * Sharing case implementation with stos[wdq] below.
5643 */
5644 switch (pVCpu->iem.s.enmEffAddrMode)
5645 {
5646 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5647 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5648 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5650 }
5651}
5652
5653
5654/**
5655 * @opcode 0xaf
5656 */
5657FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5658{
5659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5660
5661 /*
5662 * Use the C implementation if a repeat prefix is encountered.
5663 */
5664 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5665 {
5666 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5667 switch (pVCpu->iem.s.enmEffOpSize)
5668 {
5669 case IEMMODE_16BIT:
5670 switch (pVCpu->iem.s.enmEffAddrMode)
5671 {
5672 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5673 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5674 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5676 }
5677 break;
5678 case IEMMODE_32BIT:
5679 switch (pVCpu->iem.s.enmEffAddrMode)
5680 {
5681 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5682 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5683 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5685 }
5686 case IEMMODE_64BIT:
5687 switch (pVCpu->iem.s.enmEffAddrMode)
5688 {
5689 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5690 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5691 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5693 }
5694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5695 }
5696 }
5697 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5698 {
5699 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5700 switch (pVCpu->iem.s.enmEffOpSize)
5701 {
5702 case IEMMODE_16BIT:
5703 switch (pVCpu->iem.s.enmEffAddrMode)
5704 {
5705 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5706 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5707 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5709 }
5710 break;
5711 case IEMMODE_32BIT:
5712 switch (pVCpu->iem.s.enmEffAddrMode)
5713 {
5714 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5715 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5716 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5718 }
5719 case IEMMODE_64BIT:
5720 switch (pVCpu->iem.s.enmEffAddrMode)
5721 {
5722 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5723 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5724 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5726 }
5727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5728 }
5729 }
5730 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5731
5732 /*
5733 * Annoying double switch here.
5734 * Using ugly macro for implementing the cases, sharing it with scasb.
5735 */
5736 switch (pVCpu->iem.s.enmEffOpSize)
5737 {
5738 case IEMMODE_16BIT:
5739 switch (pVCpu->iem.s.enmEffAddrMode)
5740 {
5741 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5742 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5743 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5745 }
5746 break;
5747
5748 case IEMMODE_32BIT:
5749 switch (pVCpu->iem.s.enmEffAddrMode)
5750 {
5751 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5752 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5753 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5755 }
5756 break;
5757
5758 case IEMMODE_64BIT:
5759 switch (pVCpu->iem.s.enmEffAddrMode)
5760 {
5761 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5762 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5763 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5765 }
5766 break;
5767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5768 }
5769}
5770
5771#undef IEM_SCAS_CASE
5772
5773/**
5774 * Common 'mov r8, imm8' helper.
5775 */
5776FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5777{
5778 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5780
5781 IEM_MC_BEGIN(0, 1);
5782 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5783 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5784 IEM_MC_ADVANCE_RIP_AND_FINISH();
5785 IEM_MC_END();
5786}
5787
5788
5789/**
5790 * @opcode 0xb0
5791 */
5792FNIEMOP_DEF(iemOp_mov_AL_Ib)
5793{
5794 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5795 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5796}
5797
5798
5799/**
5800 * @opcode 0xb1
5801 */
5802FNIEMOP_DEF(iemOp_CL_Ib)
5803{
5804 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5805 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5806}
5807
5808
5809/**
5810 * @opcode 0xb2
5811 */
5812FNIEMOP_DEF(iemOp_DL_Ib)
5813{
5814 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5815 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5816}
5817
5818
5819/**
5820 * @opcode 0xb3
5821 */
5822FNIEMOP_DEF(iemOp_BL_Ib)
5823{
5824 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5825 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5826}
5827
5828
5829/**
5830 * @opcode 0xb4
5831 */
5832FNIEMOP_DEF(iemOp_mov_AH_Ib)
5833{
5834 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5835 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5836}
5837
5838
5839/**
5840 * @opcode 0xb5
5841 */
5842FNIEMOP_DEF(iemOp_CH_Ib)
5843{
5844 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5845 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5846}
5847
5848
5849/**
5850 * @opcode 0xb6
5851 */
5852FNIEMOP_DEF(iemOp_DH_Ib)
5853{
5854 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5855 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5856}
5857
5858
5859/**
5860 * @opcode 0xb7
5861 */
5862FNIEMOP_DEF(iemOp_BH_Ib)
5863{
5864 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5865 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5866}
5867
5868
5869/**
5870 * Common 'mov regX,immX' helper.
5871 */
5872FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5873{
5874 switch (pVCpu->iem.s.enmEffOpSize)
5875 {
5876 case IEMMODE_16BIT:
5877 {
5878 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5880
5881 IEM_MC_BEGIN(0, 1);
5882 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5883 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5884 IEM_MC_ADVANCE_RIP_AND_FINISH();
5885 IEM_MC_END();
5886 break;
5887 }
5888
5889 case IEMMODE_32BIT:
5890 {
5891 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5893
5894 IEM_MC_BEGIN(0, 1);
5895 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5896 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5897 IEM_MC_ADVANCE_RIP_AND_FINISH();
5898 IEM_MC_END();
5899 break;
5900 }
5901 case IEMMODE_64BIT:
5902 {
5903 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5905
5906 IEM_MC_BEGIN(0, 1);
5907 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5908 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5909 IEM_MC_ADVANCE_RIP_AND_FINISH();
5910 IEM_MC_END();
5911 break;
5912 }
5913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5914 }
5915}
5916
5917
5918/**
5919 * @opcode 0xb8
5920 */
5921FNIEMOP_DEF(iemOp_eAX_Iv)
5922{
5923 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5924 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5925}
5926
5927
5928/**
5929 * @opcode 0xb9
5930 */
5931FNIEMOP_DEF(iemOp_eCX_Iv)
5932{
5933 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5934 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5935}
5936
5937
5938/**
5939 * @opcode 0xba
5940 */
5941FNIEMOP_DEF(iemOp_eDX_Iv)
5942{
5943 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5944 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5945}
5946
5947
5948/**
5949 * @opcode 0xbb
5950 */
5951FNIEMOP_DEF(iemOp_eBX_Iv)
5952{
5953 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5954 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5955}
5956
5957
5958/**
5959 * @opcode 0xbc
5960 */
5961FNIEMOP_DEF(iemOp_eSP_Iv)
5962{
5963 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5964 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5965}
5966
5967
5968/**
5969 * @opcode 0xbd
5970 */
5971FNIEMOP_DEF(iemOp_eBP_Iv)
5972{
5973 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5974 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5975}
5976
5977
5978/**
5979 * @opcode 0xbe
5980 */
5981FNIEMOP_DEF(iemOp_eSI_Iv)
5982{
5983 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5984 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5985}
5986
5987
5988/**
5989 * @opcode 0xbf
5990 */
5991FNIEMOP_DEF(iemOp_eDI_Iv)
5992{
5993 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5994 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5995}
5996
5997
5998/**
5999 * @opcode 0xc0
6000 */
6001FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6002{
6003 IEMOP_HLP_MIN_186();
6004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6005 PCIEMOPSHIFTSIZES pImpl;
6006 switch (IEM_GET_MODRM_REG_8(bRm))
6007 {
6008 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6009 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6010 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6011 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6012 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6013 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6014 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6015 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6016 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6017 }
6018 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6019
6020 if (IEM_IS_MODRM_REG_MODE(bRm))
6021 {
6022 /* register */
6023 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6025 IEM_MC_BEGIN(3, 0);
6026 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6027 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6029 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6030 IEM_MC_REF_EFLAGS(pEFlags);
6031 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6032 IEM_MC_ADVANCE_RIP_AND_FINISH();
6033 IEM_MC_END();
6034 }
6035 else
6036 {
6037 /* memory */
6038 IEM_MC_BEGIN(3, 2);
6039 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6040 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6041 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6043
6044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6045 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6046 IEM_MC_ASSIGN(cShiftArg, cShift);
6047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6048 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6049 IEM_MC_FETCH_EFLAGS(EFlags);
6050 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6051
6052 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6053 IEM_MC_COMMIT_EFLAGS(EFlags);
6054 IEM_MC_ADVANCE_RIP_AND_FINISH();
6055 IEM_MC_END();
6056 }
6057}
6058
6059
6060/**
6061 * @opcode 0xc1
6062 */
6063FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6064{
6065 IEMOP_HLP_MIN_186();
6066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6067 PCIEMOPSHIFTSIZES pImpl;
6068 switch (IEM_GET_MODRM_REG_8(bRm))
6069 {
6070 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6071 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6072 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6073 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6074 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6075 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6076 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6077 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6078 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6079 }
6080 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6081
6082 if (IEM_IS_MODRM_REG_MODE(bRm))
6083 {
6084 /* register */
6085 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6087 switch (pVCpu->iem.s.enmEffOpSize)
6088 {
6089 case IEMMODE_16BIT:
6090 IEM_MC_BEGIN(3, 0);
6091 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6092 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6093 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6094 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6095 IEM_MC_REF_EFLAGS(pEFlags);
6096 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6097 IEM_MC_ADVANCE_RIP_AND_FINISH();
6098 IEM_MC_END();
6099 break;
6100
6101 case IEMMODE_32BIT:
6102 IEM_MC_BEGIN(3, 0);
6103 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6104 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6105 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6106 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6107 IEM_MC_REF_EFLAGS(pEFlags);
6108 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6109 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6110 IEM_MC_ADVANCE_RIP_AND_FINISH();
6111 IEM_MC_END();
6112 break;
6113
6114 case IEMMODE_64BIT:
6115 IEM_MC_BEGIN(3, 0);
6116 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6117 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6118 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6119 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6120 IEM_MC_REF_EFLAGS(pEFlags);
6121 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6122 IEM_MC_ADVANCE_RIP_AND_FINISH();
6123 IEM_MC_END();
6124 break;
6125
6126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6127 }
6128 }
6129 else
6130 {
6131 /* memory */
6132 switch (pVCpu->iem.s.enmEffOpSize)
6133 {
6134 case IEMMODE_16BIT:
6135 IEM_MC_BEGIN(3, 2);
6136 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6137 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6138 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6140
6141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6142 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6143 IEM_MC_ASSIGN(cShiftArg, cShift);
6144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6145 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6146 IEM_MC_FETCH_EFLAGS(EFlags);
6147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6148
6149 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6150 IEM_MC_COMMIT_EFLAGS(EFlags);
6151 IEM_MC_ADVANCE_RIP_AND_FINISH();
6152 IEM_MC_END();
6153 break;
6154
6155 case IEMMODE_32BIT:
6156 IEM_MC_BEGIN(3, 2);
6157 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6158 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6159 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6161
6162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6163 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6164 IEM_MC_ASSIGN(cShiftArg, cShift);
6165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6166 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6167 IEM_MC_FETCH_EFLAGS(EFlags);
6168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6169
6170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6171 IEM_MC_COMMIT_EFLAGS(EFlags);
6172 IEM_MC_ADVANCE_RIP_AND_FINISH();
6173 IEM_MC_END();
6174 break;
6175
6176 case IEMMODE_64BIT:
6177 IEM_MC_BEGIN(3, 2);
6178 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6179 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6180 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6182
6183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6184 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6185 IEM_MC_ASSIGN(cShiftArg, cShift);
6186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6187 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6188 IEM_MC_FETCH_EFLAGS(EFlags);
6189 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6190
6191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6192 IEM_MC_COMMIT_EFLAGS(EFlags);
6193 IEM_MC_ADVANCE_RIP_AND_FINISH();
6194 IEM_MC_END();
6195 break;
6196
6197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6198 }
6199 }
6200}
6201
6202
6203/**
6204 * @opcode 0xc2
6205 */
6206FNIEMOP_DEF(iemOp_retn_Iw)
6207{
6208 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6209 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
6211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6212 switch (pVCpu->iem.s.enmEffOpSize)
6213 {
6214 case IEMMODE_16BIT:
6215 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_16, u16Imm);
6216 case IEMMODE_32BIT:
6217 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_32, u16Imm);
6218 case IEMMODE_64BIT:
6219 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_64, u16Imm);
6220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6221 }
6222}
6223
6224
6225/**
6226 * @opcode 0xc3
6227 */
6228FNIEMOP_DEF(iemOp_retn)
6229{
6230 IEMOP_MNEMONIC(retn, "retn");
6231 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6233 switch (pVCpu->iem.s.enmEffOpSize)
6234 {
6235 case IEMMODE_16BIT:
6236 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_16);
6237 case IEMMODE_32BIT:
6238 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_32);
6239 case IEMMODE_64BIT:
6240 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_64);
6241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6242 }
6243}
6244
6245
6246/**
6247 * @opcode 0xc4
6248 */
6249FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6250{
6251 /* The LDS instruction is invalid 64-bit mode. In legacy and
6252 compatability mode it is invalid with MOD=3.
6253 The use as a VEX prefix is made possible by assigning the inverted
6254 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6255 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6257 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6258 || IEM_IS_MODRM_REG_MODE(bRm) )
6259 {
6260 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6261 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6262 {
6263 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6264 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6265 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6266 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6267 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6268 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6269 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6270 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6271 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6272 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6273 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6274 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6275 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6276
6277 switch (bRm & 0x1f)
6278 {
6279 case 1: /* 0x0f lead opcode byte. */
6280#ifdef IEM_WITH_VEX
6281 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6282#else
6283 IEMOP_BITCH_ABOUT_STUB();
6284 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6285#endif
6286
6287 case 2: /* 0x0f 0x38 lead opcode bytes. */
6288#ifdef IEM_WITH_VEX
6289 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6290#else
6291 IEMOP_BITCH_ABOUT_STUB();
6292 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6293#endif
6294
6295 case 3: /* 0x0f 0x3a lead opcode bytes. */
6296#ifdef IEM_WITH_VEX
6297 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6298#else
6299 IEMOP_BITCH_ABOUT_STUB();
6300 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6301#endif
6302
6303 default:
6304 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6305 return IEMOP_RAISE_INVALID_OPCODE();
6306 }
6307 }
6308 Log(("VEX3: AVX support disabled!\n"));
6309 return IEMOP_RAISE_INVALID_OPCODE();
6310 }
6311
6312 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6313 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6314}
6315
6316
6317/**
6318 * @opcode 0xc5
6319 */
6320FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6321{
6322 /* The LES instruction is invalid 64-bit mode. In legacy and
6323 compatability mode it is invalid with MOD=3.
6324 The use as a VEX prefix is made possible by assigning the inverted
6325 REX.R to the top MOD bit, and the top bit in the inverted register
6326 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6327 to accessing registers 0..7 in this VEX form. */
6328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6329 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6330 || IEM_IS_MODRM_REG_MODE(bRm))
6331 {
6332 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6333 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6334 {
6335 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6336 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6337 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6338 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6339 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6340 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6341 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6342 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6343
6344#ifdef IEM_WITH_VEX
6345 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6346#else
6347 IEMOP_BITCH_ABOUT_STUB();
6348 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6349#endif
6350 }
6351
6352 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6353 Log(("VEX2: AVX support disabled!\n"));
6354 return IEMOP_RAISE_INVALID_OPCODE();
6355 }
6356
6357 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6358 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6359}
6360
6361
6362/**
6363 * @opcode 0xc6
6364 */
6365FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6366{
6367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6368 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6369 return IEMOP_RAISE_INVALID_OPCODE();
6370 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6371
6372 if (IEM_IS_MODRM_REG_MODE(bRm))
6373 {
6374 /* register access */
6375 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6377 IEM_MC_BEGIN(0, 0);
6378 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
6379 IEM_MC_ADVANCE_RIP_AND_FINISH();
6380 IEM_MC_END();
6381 }
6382 else
6383 {
6384 /* memory access. */
6385 IEM_MC_BEGIN(0, 1);
6386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6388 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6390 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6391 IEM_MC_ADVANCE_RIP_AND_FINISH();
6392 IEM_MC_END();
6393 }
6394}
6395
6396
6397/**
6398 * @opcode 0xc7
6399 */
6400FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6401{
6402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6403 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6404 return IEMOP_RAISE_INVALID_OPCODE();
6405 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6406
6407 if (IEM_IS_MODRM_REG_MODE(bRm))
6408 {
6409 /* register access */
6410 switch (pVCpu->iem.s.enmEffOpSize)
6411 {
6412 case IEMMODE_16BIT:
6413 IEM_MC_BEGIN(0, 0);
6414 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6416 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
6417 IEM_MC_ADVANCE_RIP_AND_FINISH();
6418 IEM_MC_END();
6419 break;
6420
6421 case IEMMODE_32BIT:
6422 IEM_MC_BEGIN(0, 0);
6423 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
6426 IEM_MC_ADVANCE_RIP_AND_FINISH();
6427 IEM_MC_END();
6428 break;
6429
6430 case IEMMODE_64BIT:
6431 IEM_MC_BEGIN(0, 0);
6432 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
6435 IEM_MC_ADVANCE_RIP_AND_FINISH();
6436 IEM_MC_END();
6437 break;
6438
6439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6440 }
6441 }
6442 else
6443 {
6444 /* memory access. */
6445 switch (pVCpu->iem.s.enmEffOpSize)
6446 {
6447 case IEMMODE_16BIT:
6448 IEM_MC_BEGIN(0, 1);
6449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6451 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6453 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6454 IEM_MC_ADVANCE_RIP_AND_FINISH();
6455 IEM_MC_END();
6456 break;
6457
6458 case IEMMODE_32BIT:
6459 IEM_MC_BEGIN(0, 1);
6460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6462 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6464 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6465 IEM_MC_ADVANCE_RIP_AND_FINISH();
6466 IEM_MC_END();
6467 break;
6468
6469 case IEMMODE_64BIT:
6470 IEM_MC_BEGIN(0, 1);
6471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6473 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6475 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6476 IEM_MC_ADVANCE_RIP_AND_FINISH();
6477 IEM_MC_END();
6478 break;
6479
6480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6481 }
6482 }
6483}
6484
6485
6486
6487
6488/**
6489 * @opcode 0xc8
6490 */
6491FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6492{
6493 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6494 IEMOP_HLP_MIN_186();
6495 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6496 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6497 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6499 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6500}
6501
6502
6503/**
6504 * @opcode 0xc9
6505 */
6506FNIEMOP_DEF(iemOp_leave)
6507{
6508 IEMOP_MNEMONIC(leave, "leave");
6509 IEMOP_HLP_MIN_186();
6510 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6513}
6514
6515
6516/**
6517 * @opcode 0xca
6518 */
6519FNIEMOP_DEF(iemOp_retf_Iw)
6520{
6521 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6522 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6524 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6525}
6526
6527
6528/**
6529 * @opcode 0xcb
6530 */
6531FNIEMOP_DEF(iemOp_retf)
6532{
6533 IEMOP_MNEMONIC(retf, "retf");
6534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6535 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6536}
6537
6538
6539/**
6540 * @opcode 0xcc
6541 */
6542FNIEMOP_DEF(iemOp_int3)
6543{
6544 IEMOP_MNEMONIC(int3, "int3");
6545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6546 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6547}
6548
6549
6550/**
6551 * @opcode 0xcd
6552 */
6553FNIEMOP_DEF(iemOp_int_Ib)
6554{
6555 IEMOP_MNEMONIC(int_Ib, "int Ib");
6556 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6558 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6559}
6560
6561
6562/**
6563 * @opcode 0xce
6564 */
6565FNIEMOP_DEF(iemOp_into)
6566{
6567 IEMOP_MNEMONIC(into, "into");
6568 IEMOP_HLP_NO_64BIT();
6569
6570 IEM_MC_BEGIN(2, 0);
6571 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6572 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6573 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6574 IEM_MC_END();
6575}
6576
6577
6578/**
6579 * @opcode 0xcf
6580 */
6581FNIEMOP_DEF(iemOp_iret)
6582{
6583 IEMOP_MNEMONIC(iret, "iret");
6584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6585 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6586}
6587
6588
6589/**
6590 * @opcode 0xd0
6591 */
6592FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6593{
6594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6595 PCIEMOPSHIFTSIZES pImpl;
6596 switch (IEM_GET_MODRM_REG_8(bRm))
6597 {
6598 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6599 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6600 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6601 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6602 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6603 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6604 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6605 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6606 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6607 }
6608 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6609
6610 if (IEM_IS_MODRM_REG_MODE(bRm))
6611 {
6612 /* register */
6613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6614 IEM_MC_BEGIN(3, 0);
6615 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6616 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6617 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6618 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6619 IEM_MC_REF_EFLAGS(pEFlags);
6620 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6621 IEM_MC_ADVANCE_RIP_AND_FINISH();
6622 IEM_MC_END();
6623 }
6624 else
6625 {
6626 /* memory */
6627 IEM_MC_BEGIN(3, 2);
6628 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6629 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6630 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6632
6633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6635 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6636 IEM_MC_FETCH_EFLAGS(EFlags);
6637 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6638
6639 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6640 IEM_MC_COMMIT_EFLAGS(EFlags);
6641 IEM_MC_ADVANCE_RIP_AND_FINISH();
6642 IEM_MC_END();
6643 }
6644}
6645
6646
6647
6648/**
6649 * @opcode 0xd1
6650 */
6651FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6652{
6653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6654 PCIEMOPSHIFTSIZES pImpl;
6655 switch (IEM_GET_MODRM_REG_8(bRm))
6656 {
6657 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6658 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6659 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6660 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6661 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6662 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6663 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6664 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6665 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6666 }
6667 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6668
6669 if (IEM_IS_MODRM_REG_MODE(bRm))
6670 {
6671 /* register */
6672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6673 switch (pVCpu->iem.s.enmEffOpSize)
6674 {
6675 case IEMMODE_16BIT:
6676 IEM_MC_BEGIN(3, 0);
6677 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6678 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6679 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6680 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6681 IEM_MC_REF_EFLAGS(pEFlags);
6682 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6683 IEM_MC_ADVANCE_RIP_AND_FINISH();
6684 IEM_MC_END();
6685 break;
6686
6687 case IEMMODE_32BIT:
6688 IEM_MC_BEGIN(3, 0);
6689 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6690 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6691 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6692 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6693 IEM_MC_REF_EFLAGS(pEFlags);
6694 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6695 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6696 IEM_MC_ADVANCE_RIP_AND_FINISH();
6697 IEM_MC_END();
6698 break;
6699
6700 case IEMMODE_64BIT:
6701 IEM_MC_BEGIN(3, 0);
6702 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6703 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6704 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6705 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6706 IEM_MC_REF_EFLAGS(pEFlags);
6707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6708 IEM_MC_ADVANCE_RIP_AND_FINISH();
6709 IEM_MC_END();
6710 break;
6711
6712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6713 }
6714 }
6715 else
6716 {
6717 /* memory */
6718 switch (pVCpu->iem.s.enmEffOpSize)
6719 {
6720 case IEMMODE_16BIT:
6721 IEM_MC_BEGIN(3, 2);
6722 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6723 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6724 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6726
6727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6729 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6730 IEM_MC_FETCH_EFLAGS(EFlags);
6731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6732
6733 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6734 IEM_MC_COMMIT_EFLAGS(EFlags);
6735 IEM_MC_ADVANCE_RIP_AND_FINISH();
6736 IEM_MC_END();
6737 break;
6738
6739 case IEMMODE_32BIT:
6740 IEM_MC_BEGIN(3, 2);
6741 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6742 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6743 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6745
6746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6749 IEM_MC_FETCH_EFLAGS(EFlags);
6750 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6751
6752 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6753 IEM_MC_COMMIT_EFLAGS(EFlags);
6754 IEM_MC_ADVANCE_RIP_AND_FINISH();
6755 IEM_MC_END();
6756 break;
6757
6758 case IEMMODE_64BIT:
6759 IEM_MC_BEGIN(3, 2);
6760 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6761 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6762 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6764
6765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6767 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6768 IEM_MC_FETCH_EFLAGS(EFlags);
6769 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6770
6771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6772 IEM_MC_COMMIT_EFLAGS(EFlags);
6773 IEM_MC_ADVANCE_RIP_AND_FINISH();
6774 IEM_MC_END();
6775 break;
6776
6777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6778 }
6779 }
6780}
6781
6782
6783/**
6784 * @opcode 0xd2
6785 */
6786FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6787{
6788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6789 PCIEMOPSHIFTSIZES pImpl;
6790 switch (IEM_GET_MODRM_REG_8(bRm))
6791 {
6792 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6793 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6794 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6795 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6796 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6797 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6798 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6799 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6800 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6801 }
6802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6803
6804 if (IEM_IS_MODRM_REG_MODE(bRm))
6805 {
6806 /* register */
6807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6808 IEM_MC_BEGIN(3, 0);
6809 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6810 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6811 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6812 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6813 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6814 IEM_MC_REF_EFLAGS(pEFlags);
6815 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6816 IEM_MC_ADVANCE_RIP_AND_FINISH();
6817 IEM_MC_END();
6818 }
6819 else
6820 {
6821 /* memory */
6822 IEM_MC_BEGIN(3, 2);
6823 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6824 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6825 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6827
6828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6830 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6831 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6832 IEM_MC_FETCH_EFLAGS(EFlags);
6833 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6834
6835 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6836 IEM_MC_COMMIT_EFLAGS(EFlags);
6837 IEM_MC_ADVANCE_RIP_AND_FINISH();
6838 IEM_MC_END();
6839 }
6840}
6841
6842
6843/**
6844 * @opcode 0xd3
6845 */
6846FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6847{
6848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6849 PCIEMOPSHIFTSIZES pImpl;
6850 switch (IEM_GET_MODRM_REG_8(bRm))
6851 {
6852 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6853 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6854 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6855 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6856 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6857 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6858 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6859 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6860 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6861 }
6862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6863
6864 if (IEM_IS_MODRM_REG_MODE(bRm))
6865 {
6866 /* register */
6867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6868 switch (pVCpu->iem.s.enmEffOpSize)
6869 {
6870 case IEMMODE_16BIT:
6871 IEM_MC_BEGIN(3, 0);
6872 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6873 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6874 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6875 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6876 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6877 IEM_MC_REF_EFLAGS(pEFlags);
6878 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6879 IEM_MC_ADVANCE_RIP_AND_FINISH();
6880 IEM_MC_END();
6881 break;
6882
6883 case IEMMODE_32BIT:
6884 IEM_MC_BEGIN(3, 0);
6885 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6886 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6887 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6888 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6889 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6890 IEM_MC_REF_EFLAGS(pEFlags);
6891 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6892 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6893 IEM_MC_ADVANCE_RIP_AND_FINISH();
6894 IEM_MC_END();
6895 break;
6896
6897 case IEMMODE_64BIT:
6898 IEM_MC_BEGIN(3, 0);
6899 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6900 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6901 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6902 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6903 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6904 IEM_MC_REF_EFLAGS(pEFlags);
6905 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6906 IEM_MC_ADVANCE_RIP_AND_FINISH();
6907 IEM_MC_END();
6908 break;
6909
6910 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6911 }
6912 }
6913 else
6914 {
6915 /* memory */
6916 switch (pVCpu->iem.s.enmEffOpSize)
6917 {
6918 case IEMMODE_16BIT:
6919 IEM_MC_BEGIN(3, 2);
6920 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6921 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6922 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6924
6925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6927 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6928 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6929 IEM_MC_FETCH_EFLAGS(EFlags);
6930 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6931
6932 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6933 IEM_MC_COMMIT_EFLAGS(EFlags);
6934 IEM_MC_ADVANCE_RIP_AND_FINISH();
6935 IEM_MC_END();
6936 break;
6937
6938 case IEMMODE_32BIT:
6939 IEM_MC_BEGIN(3, 2);
6940 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6941 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6942 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6944
6945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6947 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6948 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6949 IEM_MC_FETCH_EFLAGS(EFlags);
6950 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6951
6952 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6953 IEM_MC_COMMIT_EFLAGS(EFlags);
6954 IEM_MC_ADVANCE_RIP_AND_FINISH();
6955 IEM_MC_END();
6956 break;
6957
6958 case IEMMODE_64BIT:
6959 IEM_MC_BEGIN(3, 2);
6960 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6961 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6962 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6964
6965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6967 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6968 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6969 IEM_MC_FETCH_EFLAGS(EFlags);
6970 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6971
6972 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6973 IEM_MC_COMMIT_EFLAGS(EFlags);
6974 IEM_MC_ADVANCE_RIP_AND_FINISH();
6975 IEM_MC_END();
6976 break;
6977
6978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6979 }
6980 }
6981}
6982
6983/**
6984 * @opcode 0xd4
6985 */
6986FNIEMOP_DEF(iemOp_aam_Ib)
6987{
6988 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6989 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6991 IEMOP_HLP_NO_64BIT();
6992 if (!bImm)
6993 return IEMOP_RAISE_DIVIDE_ERROR();
6994 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6995}
6996
6997
6998/**
6999 * @opcode 0xd5
7000 */
7001FNIEMOP_DEF(iemOp_aad_Ib)
7002{
7003 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7004 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7006 IEMOP_HLP_NO_64BIT();
7007 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
7008}
7009
7010
7011/**
7012 * @opcode 0xd6
7013 */
7014FNIEMOP_DEF(iemOp_salc)
7015{
7016 IEMOP_MNEMONIC(salc, "salc");
7017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7018 IEMOP_HLP_NO_64BIT();
7019
7020 IEM_MC_BEGIN(0, 0);
7021 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7022 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7023 } IEM_MC_ELSE() {
7024 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7025 } IEM_MC_ENDIF();
7026 IEM_MC_ADVANCE_RIP_AND_FINISH();
7027 IEM_MC_END();
7028}
7029
7030
7031/**
7032 * @opcode 0xd7
7033 */
7034FNIEMOP_DEF(iemOp_xlat)
7035{
7036 IEMOP_MNEMONIC(xlat, "xlat");
7037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7038 switch (pVCpu->iem.s.enmEffAddrMode)
7039 {
7040 case IEMMODE_16BIT:
7041 IEM_MC_BEGIN(2, 0);
7042 IEM_MC_LOCAL(uint8_t, u8Tmp);
7043 IEM_MC_LOCAL(uint16_t, u16Addr);
7044 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7045 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7046 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7047 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7048 IEM_MC_ADVANCE_RIP_AND_FINISH();
7049 IEM_MC_END();
7050 break;
7051
7052 case IEMMODE_32BIT:
7053 IEM_MC_BEGIN(2, 0);
7054 IEM_MC_LOCAL(uint8_t, u8Tmp);
7055 IEM_MC_LOCAL(uint32_t, u32Addr);
7056 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7057 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7058 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7059 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7060 IEM_MC_ADVANCE_RIP_AND_FINISH();
7061 IEM_MC_END();
7062 break;
7063
7064 case IEMMODE_64BIT:
7065 IEM_MC_BEGIN(2, 0);
7066 IEM_MC_LOCAL(uint8_t, u8Tmp);
7067 IEM_MC_LOCAL(uint64_t, u64Addr);
7068 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7069 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7070 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7071 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7072 IEM_MC_ADVANCE_RIP_AND_FINISH();
7073 IEM_MC_END();
7074 break;
7075
7076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7077 }
7078}
7079
7080
7081/**
7082 * Common worker for FPU instructions working on ST0 and STn, and storing the
7083 * result in ST0.
7084 *
7085 * @param bRm Mod R/M byte.
7086 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7087 */
7088FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7089{
7090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7091
7092 IEM_MC_BEGIN(3, 1);
7093 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7094 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7095 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7096 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7097
7098 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7099 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7100 IEM_MC_PREPARE_FPU_USAGE();
7101 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
7102 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7103 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7104 } IEM_MC_ELSE() {
7105 IEM_MC_FPU_STACK_UNDERFLOW(0);
7106 } IEM_MC_ENDIF();
7107 IEM_MC_ADVANCE_RIP_AND_FINISH();
7108
7109 IEM_MC_END();
7110}
7111
7112
7113/**
7114 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7115 * flags.
7116 *
7117 * @param bRm Mod R/M byte.
7118 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7119 */
7120FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7121{
7122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7123
7124 IEM_MC_BEGIN(3, 1);
7125 IEM_MC_LOCAL(uint16_t, u16Fsw);
7126 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7127 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7128 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7129
7130 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7131 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7132 IEM_MC_PREPARE_FPU_USAGE();
7133 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
7134 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7135 IEM_MC_UPDATE_FSW(u16Fsw);
7136 } IEM_MC_ELSE() {
7137 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7138 } IEM_MC_ENDIF();
7139 IEM_MC_ADVANCE_RIP_AND_FINISH();
7140
7141 IEM_MC_END();
7142}
7143
7144
7145/**
7146 * Common worker for FPU instructions working on ST0 and STn, only affecting
7147 * flags, and popping when done.
7148 *
7149 * @param bRm Mod R/M byte.
7150 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7151 */
7152FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7153{
7154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7155
7156 IEM_MC_BEGIN(3, 1);
7157 IEM_MC_LOCAL(uint16_t, u16Fsw);
7158 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7159 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7160 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7161
7162 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7163 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7164 IEM_MC_PREPARE_FPU_USAGE();
7165 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
7166 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7167 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7168 } IEM_MC_ELSE() {
7169 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7170 } IEM_MC_ENDIF();
7171 IEM_MC_ADVANCE_RIP_AND_FINISH();
7172
7173 IEM_MC_END();
7174}
7175
7176
7177/** Opcode 0xd8 11/0. */
7178FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7179{
7180 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7181 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7182}
7183
7184
7185/** Opcode 0xd8 11/1. */
7186FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7187{
7188 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7189 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7190}
7191
7192
7193/** Opcode 0xd8 11/2. */
7194FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7195{
7196 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7197 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7198}
7199
7200
7201/** Opcode 0xd8 11/3. */
7202FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7203{
7204 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7205 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7206}
7207
7208
7209/** Opcode 0xd8 11/4. */
7210FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7211{
7212 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7213 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7214}
7215
7216
7217/** Opcode 0xd8 11/5. */
7218FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7219{
7220 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7221 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7222}
7223
7224
7225/** Opcode 0xd8 11/6. */
7226FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7227{
7228 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7229 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7230}
7231
7232
7233/** Opcode 0xd8 11/7. */
7234FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7235{
7236 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7237 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7238}
7239
7240
7241/**
7242 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7243 * the result in ST0.
7244 *
7245 * @param bRm Mod R/M byte.
7246 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7247 */
7248FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7249{
7250 IEM_MC_BEGIN(3, 3);
7251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7252 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7253 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7254 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7255 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7256 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7257
7258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7260
7261 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7262 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7263 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7264
7265 IEM_MC_PREPARE_FPU_USAGE();
7266 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
7267 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7268 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7269 } IEM_MC_ELSE() {
7270 IEM_MC_FPU_STACK_UNDERFLOW(0);
7271 } IEM_MC_ENDIF();
7272 IEM_MC_ADVANCE_RIP_AND_FINISH();
7273
7274 IEM_MC_END();
7275}
7276
7277
7278/** Opcode 0xd8 !11/0. */
7279FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7280{
7281 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7282 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7283}
7284
7285
7286/** Opcode 0xd8 !11/1. */
7287FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7288{
7289 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7290 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7291}
7292
7293
7294/** Opcode 0xd8 !11/2. */
7295FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7296{
7297 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7298
7299 IEM_MC_BEGIN(3, 3);
7300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7301 IEM_MC_LOCAL(uint16_t, u16Fsw);
7302 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7303 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7304 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7305 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7306
7307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7309
7310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7312 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7313
7314 IEM_MC_PREPARE_FPU_USAGE();
7315 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
7316 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7317 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7318 } IEM_MC_ELSE() {
7319 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7320 } IEM_MC_ENDIF();
7321 IEM_MC_ADVANCE_RIP_AND_FINISH();
7322
7323 IEM_MC_END();
7324}
7325
7326
7327/** Opcode 0xd8 !11/3. */
7328FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7329{
7330 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7331
7332 IEM_MC_BEGIN(3, 3);
7333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7334 IEM_MC_LOCAL(uint16_t, u16Fsw);
7335 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7336 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7337 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7338 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7339
7340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7342
7343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7345 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7346
7347 IEM_MC_PREPARE_FPU_USAGE();
7348 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
7349 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7350 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7351 } IEM_MC_ELSE() {
7352 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7353 } IEM_MC_ENDIF();
7354 IEM_MC_ADVANCE_RIP_AND_FINISH();
7355
7356 IEM_MC_END();
7357}
7358
7359
7360/** Opcode 0xd8 !11/4. */
7361FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7362{
7363 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7364 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7365}
7366
7367
7368/** Opcode 0xd8 !11/5. */
7369FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7370{
7371 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7372 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7373}
7374
7375
7376/** Opcode 0xd8 !11/6. */
7377FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7378{
7379 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7380 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7381}
7382
7383
7384/** Opcode 0xd8 !11/7. */
7385FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7386{
7387 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7388 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7389}
7390
7391
7392/**
7393 * @opcode 0xd8
7394 */
7395FNIEMOP_DEF(iemOp_EscF0)
7396{
7397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7398 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7399
7400 if (IEM_IS_MODRM_REG_MODE(bRm))
7401 {
7402 switch (IEM_GET_MODRM_REG_8(bRm))
7403 {
7404 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7405 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7406 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7407 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7408 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7409 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7410 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7411 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7413 }
7414 }
7415 else
7416 {
7417 switch (IEM_GET_MODRM_REG_8(bRm))
7418 {
7419 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7420 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7421 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7422 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7423 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7424 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7425 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7426 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7428 }
7429 }
7430}
7431
7432
7433/** Opcode 0xd9 /0 mem32real
7434 * @sa iemOp_fld_m64r */
7435FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7436{
7437 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7438
7439 IEM_MC_BEGIN(2, 3);
7440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7441 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7442 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7443 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7444 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7445
7446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7448
7449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7451 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7452
7453 IEM_MC_PREPARE_FPU_USAGE();
7454 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
7455 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
7456 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7457 } IEM_MC_ELSE() {
7458 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7459 } IEM_MC_ENDIF();
7460 IEM_MC_ADVANCE_RIP_AND_FINISH();
7461
7462 IEM_MC_END();
7463}
7464
7465
7466/** Opcode 0xd9 !11/2 mem32real */
7467FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7468{
7469 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7470 IEM_MC_BEGIN(3, 2);
7471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7472 IEM_MC_LOCAL(uint16_t, u16Fsw);
7473 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7474 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7475 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7476
7477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7479 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7480 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7481
7482 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7483 IEM_MC_PREPARE_FPU_USAGE();
7484 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
7485 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7486 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7487 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7488 } IEM_MC_ELSE() {
7489 IEM_MC_IF_FCW_IM() {
7490 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7491 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7492 } IEM_MC_ENDIF();
7493 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7494 } IEM_MC_ENDIF();
7495 IEM_MC_ADVANCE_RIP_AND_FINISH();
7496
7497 IEM_MC_END();
7498}
7499
7500
7501/** Opcode 0xd9 !11/3 */
7502FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7503{
7504 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7505 IEM_MC_BEGIN(3, 2);
7506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7507 IEM_MC_LOCAL(uint16_t, u16Fsw);
7508 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7509 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7510 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7511
7512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7516
7517 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7518 IEM_MC_PREPARE_FPU_USAGE();
7519 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
7520 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7521 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7522 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7523 } IEM_MC_ELSE() {
7524 IEM_MC_IF_FCW_IM() {
7525 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7526 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7527 } IEM_MC_ENDIF();
7528 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7529 } IEM_MC_ENDIF();
7530 IEM_MC_ADVANCE_RIP_AND_FINISH();
7531
7532 IEM_MC_END();
7533}
7534
7535
7536/** Opcode 0xd9 !11/4 */
7537FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7538{
7539 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7540 IEM_MC_BEGIN(3, 0);
7541 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7542 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7543 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7547 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7548 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7549 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7550 IEM_MC_END();
7551}
7552
7553
7554/** Opcode 0xd9 !11/5 */
7555FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7556{
7557 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7558 IEM_MC_BEGIN(1, 1);
7559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7560 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7563 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7564 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7565 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7566 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7567 IEM_MC_END();
7568}
7569
7570
7571/** Opcode 0xd9 !11/6 */
7572FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7573{
7574 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7575 IEM_MC_BEGIN(3, 0);
7576 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7577 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7578 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7581 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7582 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7583 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7584 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7585 IEM_MC_END();
7586}
7587
7588
7589/** Opcode 0xd9 !11/7 */
7590FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7591{
7592 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7593 IEM_MC_BEGIN(2, 0);
7594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7595 IEM_MC_LOCAL(uint16_t, u16Fcw);
7596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7598 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7599 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7600 IEM_MC_FETCH_FCW(u16Fcw);
7601 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7602 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7603 IEM_MC_END();
7604}
7605
7606
7607/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7608FNIEMOP_DEF(iemOp_fnop)
7609{
7610 IEMOP_MNEMONIC(fnop, "fnop");
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7612
7613 IEM_MC_BEGIN(0, 0);
7614 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7615 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7616 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7617 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7618 * intel optimizations. Investigate. */
7619 IEM_MC_UPDATE_FPU_OPCODE_IP();
7620 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7621 IEM_MC_END();
7622}
7623
7624
7625/** Opcode 0xd9 11/0 stN */
7626FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7627{
7628 IEMOP_MNEMONIC(fld_stN, "fld stN");
7629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7630
7631 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7632 * indicates that it does. */
7633 IEM_MC_BEGIN(0, 2);
7634 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7635 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7637 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7638
7639 IEM_MC_PREPARE_FPU_USAGE();
7640 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
7641 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7642 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7643 } IEM_MC_ELSE() {
7644 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7645 } IEM_MC_ENDIF();
7646
7647 IEM_MC_ADVANCE_RIP_AND_FINISH();
7648 IEM_MC_END();
7649}
7650
7651
7652/** Opcode 0xd9 11/3 stN */
7653FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7654{
7655 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7657
7658 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7659 * indicates that it does. */
7660 IEM_MC_BEGIN(1, 3);
7661 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7662 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7663 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7664 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
7665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7666 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7667
7668 IEM_MC_PREPARE_FPU_USAGE();
7669 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
7670 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7671 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
7672 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7673 } IEM_MC_ELSE() {
7674 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7675 } IEM_MC_ENDIF();
7676
7677 IEM_MC_ADVANCE_RIP_AND_FINISH();
7678 IEM_MC_END();
7679}
7680
7681
7682/** Opcode 0xd9 11/4, 0xdd 11/2. */
7683FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7684{
7685 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7687
7688 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7689 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
7690 if (!iDstReg)
7691 {
7692 IEM_MC_BEGIN(0, 1);
7693 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7696
7697 IEM_MC_PREPARE_FPU_USAGE();
7698 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
7699 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7700 } IEM_MC_ELSE() {
7701 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7702 } IEM_MC_ENDIF();
7703
7704 IEM_MC_ADVANCE_RIP_AND_FINISH();
7705 IEM_MC_END();
7706 }
7707 else
7708 {
7709 IEM_MC_BEGIN(0, 2);
7710 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7711 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7714
7715 IEM_MC_PREPARE_FPU_USAGE();
7716 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
7717 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7718 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7719 } IEM_MC_ELSE() {
7720 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7721 } IEM_MC_ENDIF();
7722
7723 IEM_MC_ADVANCE_RIP_AND_FINISH();
7724 IEM_MC_END();
7725 }
7726}
7727
7728
7729/**
7730 * Common worker for FPU instructions working on ST0 and replaces it with the
7731 * result, i.e. unary operators.
7732 *
7733 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7734 */
7735FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7736{
7737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7738
7739 IEM_MC_BEGIN(2, 1);
7740 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7741 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7742 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7743
7744 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7746 IEM_MC_PREPARE_FPU_USAGE();
7747 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
7748 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7749 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7750 } IEM_MC_ELSE() {
7751 IEM_MC_FPU_STACK_UNDERFLOW(0);
7752 } IEM_MC_ENDIF();
7753 IEM_MC_ADVANCE_RIP_AND_FINISH();
7754
7755 IEM_MC_END();
7756}
7757
7758
7759/** Opcode 0xd9 0xe0. */
7760FNIEMOP_DEF(iemOp_fchs)
7761{
7762 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7763 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7764}
7765
7766
7767/** Opcode 0xd9 0xe1. */
7768FNIEMOP_DEF(iemOp_fabs)
7769{
7770 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7771 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7772}
7773
7774
7775/** Opcode 0xd9 0xe4. */
7776FNIEMOP_DEF(iemOp_ftst)
7777{
7778 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7780
7781 IEM_MC_BEGIN(2, 1);
7782 IEM_MC_LOCAL(uint16_t, u16Fsw);
7783 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7784 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7785
7786 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7787 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7788 IEM_MC_PREPARE_FPU_USAGE();
7789 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
7790 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
7791 IEM_MC_UPDATE_FSW(u16Fsw);
7792 } IEM_MC_ELSE() {
7793 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7794 } IEM_MC_ENDIF();
7795 IEM_MC_ADVANCE_RIP_AND_FINISH();
7796
7797 IEM_MC_END();
7798}
7799
7800
7801/** Opcode 0xd9 0xe5. */
7802FNIEMOP_DEF(iemOp_fxam)
7803{
7804 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7806
7807 IEM_MC_BEGIN(2, 1);
7808 IEM_MC_LOCAL(uint16_t, u16Fsw);
7809 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7810 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7811
7812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7813 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7814 IEM_MC_PREPARE_FPU_USAGE();
7815 IEM_MC_REF_FPUREG(pr80Value, 0);
7816 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
7817 IEM_MC_UPDATE_FSW(u16Fsw);
7818 IEM_MC_ADVANCE_RIP_AND_FINISH();
7819
7820 IEM_MC_END();
7821}
7822
7823
7824/**
7825 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7826 *
7827 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7828 */
7829FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7830{
7831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7832
7833 IEM_MC_BEGIN(1, 1);
7834 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7835 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7836
7837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7838 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7839 IEM_MC_PREPARE_FPU_USAGE();
7840 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
7841 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7842 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7843 } IEM_MC_ELSE() {
7844 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7845 } IEM_MC_ENDIF();
7846 IEM_MC_ADVANCE_RIP_AND_FINISH();
7847
7848 IEM_MC_END();
7849}
7850
7851
7852/** Opcode 0xd9 0xe8. */
7853FNIEMOP_DEF(iemOp_fld1)
7854{
7855 IEMOP_MNEMONIC(fld1, "fld1");
7856 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7857}
7858
7859
7860/** Opcode 0xd9 0xe9. */
7861FNIEMOP_DEF(iemOp_fldl2t)
7862{
7863 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7864 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7865}
7866
7867
7868/** Opcode 0xd9 0xea. */
7869FNIEMOP_DEF(iemOp_fldl2e)
7870{
7871 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7872 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7873}
7874
7875/** Opcode 0xd9 0xeb. */
7876FNIEMOP_DEF(iemOp_fldpi)
7877{
7878 IEMOP_MNEMONIC(fldpi, "fldpi");
7879 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7880}
7881
7882
7883/** Opcode 0xd9 0xec. */
7884FNIEMOP_DEF(iemOp_fldlg2)
7885{
7886 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7887 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7888}
7889
7890/** Opcode 0xd9 0xed. */
7891FNIEMOP_DEF(iemOp_fldln2)
7892{
7893 IEMOP_MNEMONIC(fldln2, "fldln2");
7894 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7895}
7896
7897
7898/** Opcode 0xd9 0xee. */
7899FNIEMOP_DEF(iemOp_fldz)
7900{
7901 IEMOP_MNEMONIC(fldz, "fldz");
7902 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7903}
7904
7905
7906/** Opcode 0xd9 0xf0.
7907 *
7908 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
7909 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
7910 * to produce proper results for +Inf and -Inf.
7911 *
7912 * This is probably usful in the implementation pow() and similar.
7913 */
7914FNIEMOP_DEF(iemOp_f2xm1)
7915{
7916 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7917 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7918}
7919
7920
7921/**
7922 * Common worker for FPU instructions working on STn and ST0, storing the result
7923 * in STn, and popping the stack unless IE, DE or ZE was raised.
7924 *
7925 * @param bRm Mod R/M byte.
7926 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7927 */
7928FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7929{
7930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7931
7932 IEM_MC_BEGIN(3, 1);
7933 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7934 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7935 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7936 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7937
7938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7940
7941 IEM_MC_PREPARE_FPU_USAGE();
7942 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
7943 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7944 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
7945 } IEM_MC_ELSE() {
7946 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
7947 } IEM_MC_ENDIF();
7948 IEM_MC_ADVANCE_RIP_AND_FINISH();
7949
7950 IEM_MC_END();
7951}
7952
7953
7954/** Opcode 0xd9 0xf1. */
7955FNIEMOP_DEF(iemOp_fyl2x)
7956{
7957 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7958 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7959}
7960
7961
7962/**
7963 * Common worker for FPU instructions working on ST0 and having two outputs, one
7964 * replacing ST0 and one pushed onto the stack.
7965 *
7966 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7967 */
7968FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7969{
7970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7971
7972 IEM_MC_BEGIN(2, 1);
7973 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7974 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7975 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7976
7977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7978 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7979 IEM_MC_PREPARE_FPU_USAGE();
7980 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
7981 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7982 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7983 } IEM_MC_ELSE() {
7984 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7985 } IEM_MC_ENDIF();
7986 IEM_MC_ADVANCE_RIP_AND_FINISH();
7987
7988 IEM_MC_END();
7989}
7990
7991
7992/** Opcode 0xd9 0xf2. */
7993FNIEMOP_DEF(iemOp_fptan)
7994{
7995 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7996 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7997}
7998
7999
8000/** Opcode 0xd9 0xf3. */
8001FNIEMOP_DEF(iemOp_fpatan)
8002{
8003 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8004 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8005}
8006
8007
8008/** Opcode 0xd9 0xf4. */
8009FNIEMOP_DEF(iemOp_fxtract)
8010{
8011 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8012 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8013}
8014
8015
8016/** Opcode 0xd9 0xf5. */
8017FNIEMOP_DEF(iemOp_fprem1)
8018{
8019 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8020 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8021}
8022
8023
8024/** Opcode 0xd9 0xf6. */
8025FNIEMOP_DEF(iemOp_fdecstp)
8026{
8027 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8029 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8030 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8031 * FINCSTP and FDECSTP. */
8032
8033 IEM_MC_BEGIN(0,0);
8034
8035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8037
8038 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8039 IEM_MC_FPU_STACK_DEC_TOP();
8040 IEM_MC_UPDATE_FSW_CONST(0);
8041
8042 IEM_MC_ADVANCE_RIP_AND_FINISH();
8043 IEM_MC_END();
8044}
8045
8046
8047/** Opcode 0xd9 0xf7. */
8048FNIEMOP_DEF(iemOp_fincstp)
8049{
8050 IEMOP_MNEMONIC(fincstp, "fincstp");
8051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8052 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8053 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8054 * FINCSTP and FDECSTP. */
8055
8056 IEM_MC_BEGIN(0,0);
8057
8058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8060
8061 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8062 IEM_MC_FPU_STACK_INC_TOP();
8063 IEM_MC_UPDATE_FSW_CONST(0);
8064
8065 IEM_MC_ADVANCE_RIP_AND_FINISH();
8066 IEM_MC_END();
8067}
8068
8069
8070/** Opcode 0xd9 0xf8. */
8071FNIEMOP_DEF(iemOp_fprem)
8072{
8073 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8074 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8075}
8076
8077
8078/** Opcode 0xd9 0xf9. */
8079FNIEMOP_DEF(iemOp_fyl2xp1)
8080{
8081 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8082 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8083}
8084
8085
8086/** Opcode 0xd9 0xfa. */
8087FNIEMOP_DEF(iemOp_fsqrt)
8088{
8089 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8090 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8091}
8092
8093
8094/** Opcode 0xd9 0xfb. */
8095FNIEMOP_DEF(iemOp_fsincos)
8096{
8097 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8098 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8099}
8100
8101
8102/** Opcode 0xd9 0xfc. */
8103FNIEMOP_DEF(iemOp_frndint)
8104{
8105 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8106 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8107}
8108
8109
8110/** Opcode 0xd9 0xfd. */
8111FNIEMOP_DEF(iemOp_fscale)
8112{
8113 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8114 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8115}
8116
8117
8118/** Opcode 0xd9 0xfe. */
8119FNIEMOP_DEF(iemOp_fsin)
8120{
8121 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8122 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8123}
8124
8125
8126/** Opcode 0xd9 0xff. */
8127FNIEMOP_DEF(iemOp_fcos)
8128{
8129 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8130 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8131}
8132
8133
8134/** Used by iemOp_EscF1. */
8135IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8136{
8137 /* 0xe0 */ iemOp_fchs,
8138 /* 0xe1 */ iemOp_fabs,
8139 /* 0xe2 */ iemOp_Invalid,
8140 /* 0xe3 */ iemOp_Invalid,
8141 /* 0xe4 */ iemOp_ftst,
8142 /* 0xe5 */ iemOp_fxam,
8143 /* 0xe6 */ iemOp_Invalid,
8144 /* 0xe7 */ iemOp_Invalid,
8145 /* 0xe8 */ iemOp_fld1,
8146 /* 0xe9 */ iemOp_fldl2t,
8147 /* 0xea */ iemOp_fldl2e,
8148 /* 0xeb */ iemOp_fldpi,
8149 /* 0xec */ iemOp_fldlg2,
8150 /* 0xed */ iemOp_fldln2,
8151 /* 0xee */ iemOp_fldz,
8152 /* 0xef */ iemOp_Invalid,
8153 /* 0xf0 */ iemOp_f2xm1,
8154 /* 0xf1 */ iemOp_fyl2x,
8155 /* 0xf2 */ iemOp_fptan,
8156 /* 0xf3 */ iemOp_fpatan,
8157 /* 0xf4 */ iemOp_fxtract,
8158 /* 0xf5 */ iemOp_fprem1,
8159 /* 0xf6 */ iemOp_fdecstp,
8160 /* 0xf7 */ iemOp_fincstp,
8161 /* 0xf8 */ iemOp_fprem,
8162 /* 0xf9 */ iemOp_fyl2xp1,
8163 /* 0xfa */ iemOp_fsqrt,
8164 /* 0xfb */ iemOp_fsincos,
8165 /* 0xfc */ iemOp_frndint,
8166 /* 0xfd */ iemOp_fscale,
8167 /* 0xfe */ iemOp_fsin,
8168 /* 0xff */ iemOp_fcos
8169};
8170
8171
8172/**
8173 * @opcode 0xd9
8174 */
8175FNIEMOP_DEF(iemOp_EscF1)
8176{
8177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8178 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8179
8180 if (IEM_IS_MODRM_REG_MODE(bRm))
8181 {
8182 switch (IEM_GET_MODRM_REG_8(bRm))
8183 {
8184 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8185 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8186 case 2:
8187 if (bRm == 0xd0)
8188 return FNIEMOP_CALL(iemOp_fnop);
8189 return IEMOP_RAISE_INVALID_OPCODE();
8190 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8191 case 4:
8192 case 5:
8193 case 6:
8194 case 7:
8195 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8196 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8198 }
8199 }
8200 else
8201 {
8202 switch (IEM_GET_MODRM_REG_8(bRm))
8203 {
8204 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8205 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8206 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8207 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8208 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8209 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8210 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8211 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8213 }
8214 }
8215}
8216
8217
8218/** Opcode 0xda 11/0. */
8219FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8220{
8221 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8223
8224 IEM_MC_BEGIN(0, 1);
8225 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8226
8227 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8228 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8229
8230 IEM_MC_PREPARE_FPU_USAGE();
8231 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
8232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8233 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8234 } IEM_MC_ENDIF();
8235 IEM_MC_UPDATE_FPU_OPCODE_IP();
8236 } IEM_MC_ELSE() {
8237 IEM_MC_FPU_STACK_UNDERFLOW(0);
8238 } IEM_MC_ENDIF();
8239 IEM_MC_ADVANCE_RIP_AND_FINISH();
8240
8241 IEM_MC_END();
8242}
8243
8244
8245/** Opcode 0xda 11/1. */
8246FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8247{
8248 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8250
8251 IEM_MC_BEGIN(0, 1);
8252 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8253
8254 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8255 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8256
8257 IEM_MC_PREPARE_FPU_USAGE();
8258 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
8259 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8260 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8261 } IEM_MC_ENDIF();
8262 IEM_MC_UPDATE_FPU_OPCODE_IP();
8263 } IEM_MC_ELSE() {
8264 IEM_MC_FPU_STACK_UNDERFLOW(0);
8265 } IEM_MC_ENDIF();
8266 IEM_MC_ADVANCE_RIP_AND_FINISH();
8267
8268 IEM_MC_END();
8269}
8270
8271
8272/** Opcode 0xda 11/2. */
8273FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8274{
8275 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8277
8278 IEM_MC_BEGIN(0, 1);
8279 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8280
8281 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8282 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8283
8284 IEM_MC_PREPARE_FPU_USAGE();
8285 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
8286 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8287 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8288 } IEM_MC_ENDIF();
8289 IEM_MC_UPDATE_FPU_OPCODE_IP();
8290 } IEM_MC_ELSE() {
8291 IEM_MC_FPU_STACK_UNDERFLOW(0);
8292 } IEM_MC_ENDIF();
8293 IEM_MC_ADVANCE_RIP_AND_FINISH();
8294
8295 IEM_MC_END();
8296}
8297
8298
8299/** Opcode 0xda 11/3. */
8300FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8301{
8302 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8304
8305 IEM_MC_BEGIN(0, 1);
8306 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8307
8308 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8309 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8310
8311 IEM_MC_PREPARE_FPU_USAGE();
8312 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
8313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8314 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8315 } IEM_MC_ENDIF();
8316 IEM_MC_UPDATE_FPU_OPCODE_IP();
8317 } IEM_MC_ELSE() {
8318 IEM_MC_FPU_STACK_UNDERFLOW(0);
8319 } IEM_MC_ENDIF();
8320 IEM_MC_ADVANCE_RIP_AND_FINISH();
8321
8322 IEM_MC_END();
8323}
8324
8325
8326/**
8327 * Common worker for FPU instructions working on ST0 and ST1, only affecting
8328 * flags, and popping twice when done.
8329 *
8330 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8331 */
8332FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8333{
8334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8335
8336 IEM_MC_BEGIN(3, 1);
8337 IEM_MC_LOCAL(uint16_t, u16Fsw);
8338 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8339 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8340 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8341
8342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8344
8345 IEM_MC_PREPARE_FPU_USAGE();
8346 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
8347 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8348 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8349 } IEM_MC_ELSE() {
8350 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8351 } IEM_MC_ENDIF();
8352 IEM_MC_ADVANCE_RIP_AND_FINISH();
8353
8354 IEM_MC_END();
8355}
8356
8357
8358/** Opcode 0xda 0xe9. */
8359FNIEMOP_DEF(iemOp_fucompp)
8360{
8361 IEMOP_MNEMONIC(fucompp, "fucompp");
8362 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
8363}
8364
8365
8366/**
8367 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8368 * the result in ST0.
8369 *
8370 * @param bRm Mod R/M byte.
8371 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8372 */
8373FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8374{
8375 IEM_MC_BEGIN(3, 3);
8376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8377 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8378 IEM_MC_LOCAL(int32_t, i32Val2);
8379 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8380 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8381 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8382
8383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8385
8386 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8387 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8388 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8389
8390 IEM_MC_PREPARE_FPU_USAGE();
8391 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8392 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8393 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8394 } IEM_MC_ELSE() {
8395 IEM_MC_FPU_STACK_UNDERFLOW(0);
8396 } IEM_MC_ENDIF();
8397 IEM_MC_ADVANCE_RIP_AND_FINISH();
8398
8399 IEM_MC_END();
8400}
8401
8402
8403/** Opcode 0xda !11/0. */
8404FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8405{
8406 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8407 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8408}
8409
8410
8411/** Opcode 0xda !11/1. */
8412FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8413{
8414 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8415 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8416}
8417
8418
8419/** Opcode 0xda !11/2. */
8420FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8421{
8422 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8423
8424 IEM_MC_BEGIN(3, 3);
8425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8426 IEM_MC_LOCAL(uint16_t, u16Fsw);
8427 IEM_MC_LOCAL(int32_t, i32Val2);
8428 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8429 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8430 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8431
8432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8434
8435 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8436 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8437 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8438
8439 IEM_MC_PREPARE_FPU_USAGE();
8440 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8441 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8442 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8443 } IEM_MC_ELSE() {
8444 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8445 } IEM_MC_ENDIF();
8446 IEM_MC_ADVANCE_RIP_AND_FINISH();
8447
8448 IEM_MC_END();
8449}
8450
8451
8452/** Opcode 0xda !11/3. */
8453FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8454{
8455 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8456
8457 IEM_MC_BEGIN(3, 3);
8458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8459 IEM_MC_LOCAL(uint16_t, u16Fsw);
8460 IEM_MC_LOCAL(int32_t, i32Val2);
8461 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8462 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8463 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8464
8465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8467
8468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8470 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8471
8472 IEM_MC_PREPARE_FPU_USAGE();
8473 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8474 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8475 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8476 } IEM_MC_ELSE() {
8477 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8478 } IEM_MC_ENDIF();
8479 IEM_MC_ADVANCE_RIP_AND_FINISH();
8480
8481 IEM_MC_END();
8482}
8483
8484
8485/** Opcode 0xda !11/4. */
8486FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8487{
8488 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8489 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8490}
8491
8492
8493/** Opcode 0xda !11/5. */
8494FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8495{
8496 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8497 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8498}
8499
8500
8501/** Opcode 0xda !11/6. */
8502FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8503{
8504 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8505 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8506}
8507
8508
8509/** Opcode 0xda !11/7. */
8510FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8511{
8512 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8513 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8514}
8515
8516
8517/**
8518 * @opcode 0xda
8519 */
8520FNIEMOP_DEF(iemOp_EscF2)
8521{
8522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8523 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8524 if (IEM_IS_MODRM_REG_MODE(bRm))
8525 {
8526 switch (IEM_GET_MODRM_REG_8(bRm))
8527 {
8528 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8529 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8530 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8531 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8532 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8533 case 5:
8534 if (bRm == 0xe9)
8535 return FNIEMOP_CALL(iemOp_fucompp);
8536 return IEMOP_RAISE_INVALID_OPCODE();
8537 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8538 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8540 }
8541 }
8542 else
8543 {
8544 switch (IEM_GET_MODRM_REG_8(bRm))
8545 {
8546 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8547 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8548 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8549 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8550 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8551 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8552 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8553 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8555 }
8556 }
8557}
8558
8559
8560/** Opcode 0xdb !11/0. */
8561FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8562{
8563 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8564
8565 IEM_MC_BEGIN(2, 3);
8566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8567 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8568 IEM_MC_LOCAL(int32_t, i32Val);
8569 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8570 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8571
8572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8574
8575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8577 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8578
8579 IEM_MC_PREPARE_FPU_USAGE();
8580 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8581 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
8582 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8583 } IEM_MC_ELSE() {
8584 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8585 } IEM_MC_ENDIF();
8586 IEM_MC_ADVANCE_RIP_AND_FINISH();
8587
8588 IEM_MC_END();
8589}
8590
8591
8592/** Opcode 0xdb !11/1. */
8593FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8594{
8595 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8596 IEM_MC_BEGIN(3, 2);
8597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8598 IEM_MC_LOCAL(uint16_t, u16Fsw);
8599 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8600 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8601 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8602
8603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8605 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8606 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8607
8608 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8609 IEM_MC_PREPARE_FPU_USAGE();
8610 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8611 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8612 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8613 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8614 } IEM_MC_ELSE() {
8615 IEM_MC_IF_FCW_IM() {
8616 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8617 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8618 } IEM_MC_ENDIF();
8619 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8620 } IEM_MC_ENDIF();
8621 IEM_MC_ADVANCE_RIP_AND_FINISH();
8622
8623 IEM_MC_END();
8624}
8625
8626
8627/** Opcode 0xdb !11/2. */
8628FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8629{
8630 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8631 IEM_MC_BEGIN(3, 2);
8632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8633 IEM_MC_LOCAL(uint16_t, u16Fsw);
8634 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8635 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8636 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8637
8638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8642
8643 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8644 IEM_MC_PREPARE_FPU_USAGE();
8645 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8646 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8647 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8648 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8649 } IEM_MC_ELSE() {
8650 IEM_MC_IF_FCW_IM() {
8651 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8652 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8653 } IEM_MC_ENDIF();
8654 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8655 } IEM_MC_ENDIF();
8656 IEM_MC_ADVANCE_RIP_AND_FINISH();
8657
8658 IEM_MC_END();
8659}
8660
8661
8662/** Opcode 0xdb !11/3. */
8663FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8664{
8665 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8666 IEM_MC_BEGIN(3, 2);
8667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8668 IEM_MC_LOCAL(uint16_t, u16Fsw);
8669 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8670 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8671 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8672
8673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8677
8678 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8679 IEM_MC_PREPARE_FPU_USAGE();
8680 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8681 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8682 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8683 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8684 } IEM_MC_ELSE() {
8685 IEM_MC_IF_FCW_IM() {
8686 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8687 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8688 } IEM_MC_ENDIF();
8689 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8690 } IEM_MC_ENDIF();
8691 IEM_MC_ADVANCE_RIP_AND_FINISH();
8692
8693 IEM_MC_END();
8694}
8695
8696
8697/** Opcode 0xdb !11/5. */
8698FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8699{
8700 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8701
8702 IEM_MC_BEGIN(2, 3);
8703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8704 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8705 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8706 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8707 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8708
8709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8711
8712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8714 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8715
8716 IEM_MC_PREPARE_FPU_USAGE();
8717 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8718 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8719 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8720 } IEM_MC_ELSE() {
8721 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8722 } IEM_MC_ENDIF();
8723 IEM_MC_ADVANCE_RIP_AND_FINISH();
8724
8725 IEM_MC_END();
8726}
8727
8728
8729/** Opcode 0xdb !11/7. */
8730FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8731{
8732 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8733 IEM_MC_BEGIN(3, 2);
8734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8735 IEM_MC_LOCAL(uint16_t, u16Fsw);
8736 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8737 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8738 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8739
8740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8742 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8743 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8744
8745 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
8746 IEM_MC_PREPARE_FPU_USAGE();
8747 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8748 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8749 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8750 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8751 } IEM_MC_ELSE() {
8752 IEM_MC_IF_FCW_IM() {
8753 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8754 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8755 } IEM_MC_ENDIF();
8756 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8757 } IEM_MC_ENDIF();
8758 IEM_MC_ADVANCE_RIP_AND_FINISH();
8759
8760 IEM_MC_END();
8761}
8762
8763
8764/** Opcode 0xdb 11/0. */
8765FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8766{
8767 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8769
8770 IEM_MC_BEGIN(0, 1);
8771 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8772
8773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8775
8776 IEM_MC_PREPARE_FPU_USAGE();
8777 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
8778 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
8779 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8780 } IEM_MC_ENDIF();
8781 IEM_MC_UPDATE_FPU_OPCODE_IP();
8782 } IEM_MC_ELSE() {
8783 IEM_MC_FPU_STACK_UNDERFLOW(0);
8784 } IEM_MC_ENDIF();
8785 IEM_MC_ADVANCE_RIP_AND_FINISH();
8786
8787 IEM_MC_END();
8788}
8789
8790
8791/** Opcode 0xdb 11/1. */
8792FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8793{
8794 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8796
8797 IEM_MC_BEGIN(0, 1);
8798 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8799
8800 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8801 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8802
8803 IEM_MC_PREPARE_FPU_USAGE();
8804 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
8805 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
8806 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8807 } IEM_MC_ENDIF();
8808 IEM_MC_UPDATE_FPU_OPCODE_IP();
8809 } IEM_MC_ELSE() {
8810 IEM_MC_FPU_STACK_UNDERFLOW(0);
8811 } IEM_MC_ENDIF();
8812 IEM_MC_ADVANCE_RIP_AND_FINISH();
8813
8814 IEM_MC_END();
8815}
8816
8817
8818/** Opcode 0xdb 11/2. */
8819FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8820{
8821 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8823
8824 IEM_MC_BEGIN(0, 1);
8825 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8826
8827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8828 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8829
8830 IEM_MC_PREPARE_FPU_USAGE();
8831 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
8832 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8833 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8834 } IEM_MC_ENDIF();
8835 IEM_MC_UPDATE_FPU_OPCODE_IP();
8836 } IEM_MC_ELSE() {
8837 IEM_MC_FPU_STACK_UNDERFLOW(0);
8838 } IEM_MC_ENDIF();
8839 IEM_MC_ADVANCE_RIP_AND_FINISH();
8840
8841 IEM_MC_END();
8842}
8843
8844
8845/** Opcode 0xdb 11/3. */
8846FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8847{
8848 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8850
8851 IEM_MC_BEGIN(0, 1);
8852 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8853
8854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8856
8857 IEM_MC_PREPARE_FPU_USAGE();
8858 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
8859 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
8860 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8861 } IEM_MC_ENDIF();
8862 IEM_MC_UPDATE_FPU_OPCODE_IP();
8863 } IEM_MC_ELSE() {
8864 IEM_MC_FPU_STACK_UNDERFLOW(0);
8865 } IEM_MC_ENDIF();
8866 IEM_MC_ADVANCE_RIP_AND_FINISH();
8867
8868 IEM_MC_END();
8869}
8870
8871
8872/** Opcode 0xdb 0xe0. */
8873FNIEMOP_DEF(iemOp_fneni)
8874{
8875 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8877 IEM_MC_BEGIN(0,0);
8878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8879 IEM_MC_ADVANCE_RIP_AND_FINISH();
8880 IEM_MC_END();
8881}
8882
8883
8884/** Opcode 0xdb 0xe1. */
8885FNIEMOP_DEF(iemOp_fndisi)
8886{
8887 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8889 IEM_MC_BEGIN(0,0);
8890 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8891 IEM_MC_ADVANCE_RIP_AND_FINISH();
8892 IEM_MC_END();
8893}
8894
8895
8896/** Opcode 0xdb 0xe2. */
8897FNIEMOP_DEF(iemOp_fnclex)
8898{
8899 IEMOP_MNEMONIC(fnclex, "fnclex");
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901
8902 IEM_MC_BEGIN(0,0);
8903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8904 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8905 IEM_MC_CLEAR_FSW_EX();
8906 IEM_MC_ADVANCE_RIP_AND_FINISH();
8907 IEM_MC_END();
8908}
8909
8910
8911/** Opcode 0xdb 0xe3. */
8912FNIEMOP_DEF(iemOp_fninit)
8913{
8914 IEMOP_MNEMONIC(fninit, "fninit");
8915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8916 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8917}
8918
8919
8920/** Opcode 0xdb 0xe4. */
8921FNIEMOP_DEF(iemOp_fnsetpm)
8922{
8923 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8925 IEM_MC_BEGIN(0,0);
8926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8927 IEM_MC_ADVANCE_RIP_AND_FINISH();
8928 IEM_MC_END();
8929}
8930
8931
8932/** Opcode 0xdb 0xe5. */
8933FNIEMOP_DEF(iemOp_frstpm)
8934{
8935 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8936#if 0 /* #UDs on newer CPUs */
8937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8938 IEM_MC_BEGIN(0,0);
8939 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8940 IEM_MC_ADVANCE_RIP_AND_FINISH();
8941 IEM_MC_END();
8942 return VINF_SUCCESS;
8943#else
8944 return IEMOP_RAISE_INVALID_OPCODE();
8945#endif
8946}
8947
8948
8949/** Opcode 0xdb 11/5. */
8950FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8951{
8952 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8953 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8954}
8955
8956
8957/** Opcode 0xdb 11/6. */
8958FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8959{
8960 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8961 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8962}
8963
8964
8965/**
8966 * @opcode 0xdb
8967 */
8968FNIEMOP_DEF(iemOp_EscF3)
8969{
8970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8971 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8972 if (IEM_IS_MODRM_REG_MODE(bRm))
8973 {
8974 switch (IEM_GET_MODRM_REG_8(bRm))
8975 {
8976 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8977 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8978 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8979 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8980 case 4:
8981 switch (bRm)
8982 {
8983 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8984 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8985 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8986 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8987 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8988 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8989 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8990 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8992 }
8993 break;
8994 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8995 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8996 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8998 }
8999 }
9000 else
9001 {
9002 switch (IEM_GET_MODRM_REG_8(bRm))
9003 {
9004 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9005 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9006 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9007 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9008 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9009 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9010 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9011 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9013 }
9014 }
9015}
9016
9017
9018/**
9019 * Common worker for FPU instructions working on STn and ST0, and storing the
9020 * result in STn unless IE, DE or ZE was raised.
9021 *
9022 * @param bRm Mod R/M byte.
9023 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9024 */
9025FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9026{
9027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9028
9029 IEM_MC_BEGIN(3, 1);
9030 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9031 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9032 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9033 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9034
9035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9037
9038 IEM_MC_PREPARE_FPU_USAGE();
9039 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9040 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9041 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9042 } IEM_MC_ELSE() {
9043 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9044 } IEM_MC_ENDIF();
9045 IEM_MC_ADVANCE_RIP_AND_FINISH();
9046
9047 IEM_MC_END();
9048}
9049
9050
9051/** Opcode 0xdc 11/0. */
9052FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9053{
9054 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9055 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9056}
9057
9058
9059/** Opcode 0xdc 11/1. */
9060FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9061{
9062 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9063 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9064}
9065
9066
9067/** Opcode 0xdc 11/4. */
9068FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9069{
9070 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9071 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9072}
9073
9074
9075/** Opcode 0xdc 11/5. */
9076FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9077{
9078 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9079 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9080}
9081
9082
9083/** Opcode 0xdc 11/6. */
9084FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9085{
9086 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9087 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9088}
9089
9090
9091/** Opcode 0xdc 11/7. */
9092FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9093{
9094 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9095 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9096}
9097
9098
9099/**
9100 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9101 * memory operand, and storing the result in ST0.
9102 *
9103 * @param bRm Mod R/M byte.
9104 * @param pfnImpl Pointer to the instruction implementation (assembly).
9105 */
9106FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9107{
9108 IEM_MC_BEGIN(3, 3);
9109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9110 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9111 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9112 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9113 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9114 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9115
9116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9118 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9119 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9120
9121 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9122 IEM_MC_PREPARE_FPU_USAGE();
9123 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
9124 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9125 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9126 } IEM_MC_ELSE() {
9127 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9128 } IEM_MC_ENDIF();
9129 IEM_MC_ADVANCE_RIP_AND_FINISH();
9130
9131 IEM_MC_END();
9132}
9133
9134
9135/** Opcode 0xdc !11/0. */
9136FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9137{
9138 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9139 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9140}
9141
9142
9143/** Opcode 0xdc !11/1. */
9144FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9145{
9146 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9147 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9148}
9149
9150
9151/** Opcode 0xdc !11/2. */
9152FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9153{
9154 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9155
9156 IEM_MC_BEGIN(3, 3);
9157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9158 IEM_MC_LOCAL(uint16_t, u16Fsw);
9159 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9160 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9161 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9162 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9163
9164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9166
9167 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9168 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9169 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9170
9171 IEM_MC_PREPARE_FPU_USAGE();
9172 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9173 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9174 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9175 } IEM_MC_ELSE() {
9176 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9177 } IEM_MC_ENDIF();
9178 IEM_MC_ADVANCE_RIP_AND_FINISH();
9179
9180 IEM_MC_END();
9181}
9182
9183
9184/** Opcode 0xdc !11/3. */
9185FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9186{
9187 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9188
9189 IEM_MC_BEGIN(3, 3);
9190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9191 IEM_MC_LOCAL(uint16_t, u16Fsw);
9192 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9193 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9194 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9195 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9196
9197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9199
9200 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9201 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9202 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9203
9204 IEM_MC_PREPARE_FPU_USAGE();
9205 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9206 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9207 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9208 } IEM_MC_ELSE() {
9209 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9210 } IEM_MC_ENDIF();
9211 IEM_MC_ADVANCE_RIP_AND_FINISH();
9212
9213 IEM_MC_END();
9214}
9215
9216
9217/** Opcode 0xdc !11/4. */
9218FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9219{
9220 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9221 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9222}
9223
9224
9225/** Opcode 0xdc !11/5. */
9226FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9227{
9228 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9229 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9230}
9231
9232
9233/** Opcode 0xdc !11/6. */
9234FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9235{
9236 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9237 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9238}
9239
9240
9241/** Opcode 0xdc !11/7. */
9242FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9243{
9244 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9245 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9246}
9247
9248
9249/**
9250 * @opcode 0xdc
9251 */
9252FNIEMOP_DEF(iemOp_EscF4)
9253{
9254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9255 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9256 if (IEM_IS_MODRM_REG_MODE(bRm))
9257 {
9258 switch (IEM_GET_MODRM_REG_8(bRm))
9259 {
9260 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9261 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9262 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9263 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9264 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9265 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9266 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9267 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9269 }
9270 }
9271 else
9272 {
9273 switch (IEM_GET_MODRM_REG_8(bRm))
9274 {
9275 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9276 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9277 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9278 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9279 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9280 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9281 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9282 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9284 }
9285 }
9286}
9287
9288
9289/** Opcode 0xdd !11/0.
9290 * @sa iemOp_fld_m32r */
9291FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9292{
9293 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9294
9295 IEM_MC_BEGIN(2, 3);
9296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9297 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9298 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9299 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9300 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9301
9302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9304 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9305 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9306
9307 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9308 IEM_MC_PREPARE_FPU_USAGE();
9309 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9310 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
9311 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9312 } IEM_MC_ELSE() {
9313 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9314 } IEM_MC_ENDIF();
9315 IEM_MC_ADVANCE_RIP_AND_FINISH();
9316
9317 IEM_MC_END();
9318}
9319
9320
9321/** Opcode 0xdd !11/0. */
9322FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9323{
9324 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9325 IEM_MC_BEGIN(3, 2);
9326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9327 IEM_MC_LOCAL(uint16_t, u16Fsw);
9328 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9329 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9330 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9331
9332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9334 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9335 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9336
9337 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9338 IEM_MC_PREPARE_FPU_USAGE();
9339 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9340 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9341 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9342 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9343 } IEM_MC_ELSE() {
9344 IEM_MC_IF_FCW_IM() {
9345 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9346 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9347 } IEM_MC_ENDIF();
9348 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9349 } IEM_MC_ENDIF();
9350 IEM_MC_ADVANCE_RIP_AND_FINISH();
9351
9352 IEM_MC_END();
9353}
9354
9355
9356/** Opcode 0xdd !11/0. */
9357FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9358{
9359 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9360 IEM_MC_BEGIN(3, 2);
9361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9362 IEM_MC_LOCAL(uint16_t, u16Fsw);
9363 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9364 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9365 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9366
9367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9371
9372 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9373 IEM_MC_PREPARE_FPU_USAGE();
9374 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9375 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9376 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9377 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9378 } IEM_MC_ELSE() {
9379 IEM_MC_IF_FCW_IM() {
9380 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9381 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9382 } IEM_MC_ENDIF();
9383 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9384 } IEM_MC_ENDIF();
9385 IEM_MC_ADVANCE_RIP_AND_FINISH();
9386
9387 IEM_MC_END();
9388}
9389
9390
9391
9392
9393/** Opcode 0xdd !11/0. */
9394FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9395{
9396 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9397 IEM_MC_BEGIN(3, 2);
9398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9399 IEM_MC_LOCAL(uint16_t, u16Fsw);
9400 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9401 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9402 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9403
9404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9406 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9407 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9408
9409 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9410 IEM_MC_PREPARE_FPU_USAGE();
9411 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9412 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9413 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9414 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9415 } IEM_MC_ELSE() {
9416 IEM_MC_IF_FCW_IM() {
9417 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9418 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9419 } IEM_MC_ENDIF();
9420 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9421 } IEM_MC_ENDIF();
9422 IEM_MC_ADVANCE_RIP_AND_FINISH();
9423
9424 IEM_MC_END();
9425}
9426
9427
9428/** Opcode 0xdd !11/0. */
9429FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9430{
9431 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9432 IEM_MC_BEGIN(3, 0);
9433 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9434 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9435 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9438 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9439 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9440 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9441 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9442 IEM_MC_END();
9443}
9444
9445
9446/** Opcode 0xdd !11/0. */
9447FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9448{
9449 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9450 IEM_MC_BEGIN(3, 0);
9451 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9452 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9453 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9456 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9457 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
9458 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9459 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9460 IEM_MC_END();
9461}
9462
9463/** Opcode 0xdd !11/0. */
9464FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9465{
9466 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9467
9468 IEM_MC_BEGIN(0, 2);
9469 IEM_MC_LOCAL(uint16_t, u16Tmp);
9470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9471
9472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9474 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9475
9476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9477 IEM_MC_FETCH_FSW(u16Tmp);
9478 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9479 IEM_MC_ADVANCE_RIP_AND_FINISH();
9480
9481/** @todo Debug / drop a hint to the verifier that things may differ
9482 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9483 * NT4SP1. (X86_FSW_PE) */
9484 IEM_MC_END();
9485}
9486
9487
9488/** Opcode 0xdd 11/0. */
9489FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9490{
9491 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9493 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9494 unmodified. */
9495
9496 IEM_MC_BEGIN(0, 0);
9497
9498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9499 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9500
9501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9502 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9503 IEM_MC_UPDATE_FPU_OPCODE_IP();
9504
9505 IEM_MC_ADVANCE_RIP_AND_FINISH();
9506 IEM_MC_END();
9507}
9508
9509
9510/** Opcode 0xdd 11/1. */
9511FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9512{
9513 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9515
9516 IEM_MC_BEGIN(0, 2);
9517 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9518 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9520 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9521
9522 IEM_MC_PREPARE_FPU_USAGE();
9523 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9524 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9525 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9526 } IEM_MC_ELSE() {
9527 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9528 } IEM_MC_ENDIF();
9529
9530 IEM_MC_ADVANCE_RIP_AND_FINISH();
9531 IEM_MC_END();
9532}
9533
9534
9535/** Opcode 0xdd 11/3. */
9536FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9537{
9538 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9539 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9540}
9541
9542
9543/** Opcode 0xdd 11/4. */
9544FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9545{
9546 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9547 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9548}
9549
9550
9551/**
9552 * @opcode 0xdd
9553 */
9554FNIEMOP_DEF(iemOp_EscF5)
9555{
9556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9557 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9558 if (IEM_IS_MODRM_REG_MODE(bRm))
9559 {
9560 switch (IEM_GET_MODRM_REG_8(bRm))
9561 {
9562 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9563 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9564 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9565 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9566 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9567 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9568 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9569 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9571 }
9572 }
9573 else
9574 {
9575 switch (IEM_GET_MODRM_REG_8(bRm))
9576 {
9577 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9578 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9579 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9580 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9581 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9582 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9583 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9584 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9586 }
9587 }
9588}
9589
9590
9591/** Opcode 0xde 11/0. */
9592FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9593{
9594 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9595 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9596}
9597
9598
9599/** Opcode 0xde 11/0. */
9600FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9601{
9602 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9603 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9604}
9605
9606
9607/** Opcode 0xde 0xd9. */
9608FNIEMOP_DEF(iemOp_fcompp)
9609{
9610 IEMOP_MNEMONIC(fcompp, "fcompp");
9611 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
9612}
9613
9614
9615/** Opcode 0xde 11/4. */
9616FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9617{
9618 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9619 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9620}
9621
9622
9623/** Opcode 0xde 11/5. */
9624FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9625{
9626 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9627 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9628}
9629
9630
9631/** Opcode 0xde 11/6. */
9632FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9633{
9634 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9635 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9636}
9637
9638
9639/** Opcode 0xde 11/7. */
9640FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9641{
9642 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9643 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9644}
9645
9646
9647/**
9648 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9649 * the result in ST0.
9650 *
9651 * @param bRm Mod R/M byte.
9652 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9653 */
9654FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9655{
9656 IEM_MC_BEGIN(3, 3);
9657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9658 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9659 IEM_MC_LOCAL(int16_t, i16Val2);
9660 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9661 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9662 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9663
9664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9666
9667 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9669 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9670
9671 IEM_MC_PREPARE_FPU_USAGE();
9672 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9673 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9674 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9675 } IEM_MC_ELSE() {
9676 IEM_MC_FPU_STACK_UNDERFLOW(0);
9677 } IEM_MC_ENDIF();
9678 IEM_MC_ADVANCE_RIP_AND_FINISH();
9679
9680 IEM_MC_END();
9681}
9682
9683
9684/** Opcode 0xde !11/0. */
9685FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9686{
9687 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9688 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9689}
9690
9691
9692/** Opcode 0xde !11/1. */
9693FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9694{
9695 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9696 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9697}
9698
9699
9700/** Opcode 0xde !11/2. */
9701FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9702{
9703 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9704
9705 IEM_MC_BEGIN(3, 3);
9706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9707 IEM_MC_LOCAL(uint16_t, u16Fsw);
9708 IEM_MC_LOCAL(int16_t, i16Val2);
9709 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9710 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9711 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9712
9713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9715
9716 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9717 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9718 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9719
9720 IEM_MC_PREPARE_FPU_USAGE();
9721 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9722 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9723 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9724 } IEM_MC_ELSE() {
9725 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9726 } IEM_MC_ENDIF();
9727 IEM_MC_ADVANCE_RIP_AND_FINISH();
9728
9729 IEM_MC_END();
9730}
9731
9732
9733/** Opcode 0xde !11/3. */
9734FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9735{
9736 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9737
9738 IEM_MC_BEGIN(3, 3);
9739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9740 IEM_MC_LOCAL(uint16_t, u16Fsw);
9741 IEM_MC_LOCAL(int16_t, i16Val2);
9742 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9743 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9744 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9745
9746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9748
9749 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9750 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9751 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9752
9753 IEM_MC_PREPARE_FPU_USAGE();
9754 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9755 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9756 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9757 } IEM_MC_ELSE() {
9758 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9759 } IEM_MC_ENDIF();
9760 IEM_MC_ADVANCE_RIP_AND_FINISH();
9761
9762 IEM_MC_END();
9763}
9764
9765
9766/** Opcode 0xde !11/4. */
9767FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9768{
9769 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9770 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9771}
9772
9773
9774/** Opcode 0xde !11/5. */
9775FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9776{
9777 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9778 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9779}
9780
9781
9782/** Opcode 0xde !11/6. */
9783FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9784{
9785 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9786 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9787}
9788
9789
9790/** Opcode 0xde !11/7. */
9791FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9792{
9793 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9794 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9795}
9796
9797
9798/**
9799 * @opcode 0xde
9800 */
9801FNIEMOP_DEF(iemOp_EscF6)
9802{
9803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9804 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9805 if (IEM_IS_MODRM_REG_MODE(bRm))
9806 {
9807 switch (IEM_GET_MODRM_REG_8(bRm))
9808 {
9809 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9810 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9811 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9812 case 3: if (bRm == 0xd9)
9813 return FNIEMOP_CALL(iemOp_fcompp);
9814 return IEMOP_RAISE_INVALID_OPCODE();
9815 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9816 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9817 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9818 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9820 }
9821 }
9822 else
9823 {
9824 switch (IEM_GET_MODRM_REG_8(bRm))
9825 {
9826 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9827 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9828 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9829 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9830 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9831 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9832 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9833 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9835 }
9836 }
9837}
9838
9839
9840/** Opcode 0xdf 11/0.
9841 * Undocument instruction, assumed to work like ffree + fincstp. */
9842FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9843{
9844 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9846
9847 IEM_MC_BEGIN(0, 0);
9848
9849 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9850 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9851
9852 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9853 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9854 IEM_MC_FPU_STACK_INC_TOP();
9855 IEM_MC_UPDATE_FPU_OPCODE_IP();
9856
9857 IEM_MC_ADVANCE_RIP_AND_FINISH();
9858 IEM_MC_END();
9859}
9860
9861
9862/** Opcode 0xdf 0xe0. */
9863FNIEMOP_DEF(iemOp_fnstsw_ax)
9864{
9865 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9867
9868 IEM_MC_BEGIN(0, 1);
9869 IEM_MC_LOCAL(uint16_t, u16Tmp);
9870 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9871 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9872 IEM_MC_FETCH_FSW(u16Tmp);
9873 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9874 IEM_MC_ADVANCE_RIP_AND_FINISH();
9875 IEM_MC_END();
9876}
9877
9878
9879/** Opcode 0xdf 11/5. */
9880FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9881{
9882 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9883 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9884}
9885
9886
9887/** Opcode 0xdf 11/6. */
9888FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9889{
9890 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9891 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9892}
9893
9894
9895/** Opcode 0xdf !11/0. */
9896FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9897{
9898 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9899
9900 IEM_MC_BEGIN(2, 3);
9901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9902 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9903 IEM_MC_LOCAL(int16_t, i16Val);
9904 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9905 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9906
9907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9909
9910 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9911 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9912 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9913
9914 IEM_MC_PREPARE_FPU_USAGE();
9915 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9916 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
9917 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9918 } IEM_MC_ELSE() {
9919 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9920 } IEM_MC_ENDIF();
9921 IEM_MC_ADVANCE_RIP_AND_FINISH();
9922
9923 IEM_MC_END();
9924}
9925
9926
9927/** Opcode 0xdf !11/1. */
9928FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9929{
9930 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9931 IEM_MC_BEGIN(3, 2);
9932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9933 IEM_MC_LOCAL(uint16_t, u16Fsw);
9934 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9935 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9936 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9937
9938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9942
9943 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9944 IEM_MC_PREPARE_FPU_USAGE();
9945 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9946 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9947 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9948 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9949 } IEM_MC_ELSE() {
9950 IEM_MC_IF_FCW_IM() {
9951 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9952 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9953 } IEM_MC_ENDIF();
9954 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9955 } IEM_MC_ENDIF();
9956 IEM_MC_ADVANCE_RIP_AND_FINISH();
9957
9958 IEM_MC_END();
9959}
9960
9961
9962/** Opcode 0xdf !11/2. */
9963FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9964{
9965 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9966 IEM_MC_BEGIN(3, 2);
9967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9968 IEM_MC_LOCAL(uint16_t, u16Fsw);
9969 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9970 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9971 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9972
9973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9976 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9977
9978 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9979 IEM_MC_PREPARE_FPU_USAGE();
9980 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9981 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9982 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9983 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9984 } IEM_MC_ELSE() {
9985 IEM_MC_IF_FCW_IM() {
9986 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9987 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9988 } IEM_MC_ENDIF();
9989 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9990 } IEM_MC_ENDIF();
9991 IEM_MC_ADVANCE_RIP_AND_FINISH();
9992
9993 IEM_MC_END();
9994}
9995
9996
9997/** Opcode 0xdf !11/3. */
9998FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9999{
10000 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10001 IEM_MC_BEGIN(3, 2);
10002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10003 IEM_MC_LOCAL(uint16_t, u16Fsw);
10004 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10005 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10006 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10007
10008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10010 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10011 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10012
10013 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10014 IEM_MC_PREPARE_FPU_USAGE();
10015 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10016 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10017 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10018 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10019 } IEM_MC_ELSE() {
10020 IEM_MC_IF_FCW_IM() {
10021 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10022 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10023 } IEM_MC_ENDIF();
10024 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10025 } IEM_MC_ENDIF();
10026 IEM_MC_ADVANCE_RIP_AND_FINISH();
10027
10028 IEM_MC_END();
10029}
10030
10031
10032/** Opcode 0xdf !11/4. */
10033FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10034{
10035 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10036
10037 IEM_MC_BEGIN(2, 3);
10038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10039 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10040 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10041 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10042 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10043
10044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10046
10047 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10048 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10049 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10050
10051 IEM_MC_PREPARE_FPU_USAGE();
10052 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10053 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10054 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10055 } IEM_MC_ELSE() {
10056 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10057 } IEM_MC_ENDIF();
10058 IEM_MC_ADVANCE_RIP_AND_FINISH();
10059
10060 IEM_MC_END();
10061}
10062
10063
10064/** Opcode 0xdf !11/5. */
10065FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10066{
10067 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10068
10069 IEM_MC_BEGIN(2, 3);
10070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10071 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10072 IEM_MC_LOCAL(int64_t, i64Val);
10073 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10074 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10075
10076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10078
10079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10081 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10082
10083 IEM_MC_PREPARE_FPU_USAGE();
10084 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10085 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
10086 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10087 } IEM_MC_ELSE() {
10088 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10089 } IEM_MC_ENDIF();
10090 IEM_MC_ADVANCE_RIP_AND_FINISH();
10091
10092 IEM_MC_END();
10093}
10094
10095
10096/** Opcode 0xdf !11/6. */
10097FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
10098{
10099 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
10100 IEM_MC_BEGIN(3, 2);
10101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10102 IEM_MC_LOCAL(uint16_t, u16Fsw);
10103 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10104 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
10105 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10106
10107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10110 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10111
10112 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10113 IEM_MC_PREPARE_FPU_USAGE();
10114 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10115 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
10116 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10117 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10118 } IEM_MC_ELSE() {
10119 IEM_MC_IF_FCW_IM() {
10120 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
10121 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
10122 } IEM_MC_ENDIF();
10123 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10124 } IEM_MC_ENDIF();
10125 IEM_MC_ADVANCE_RIP_AND_FINISH();
10126
10127 IEM_MC_END();
10128}
10129
10130
10131/** Opcode 0xdf !11/7. */
10132FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10133{
10134 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10135 IEM_MC_BEGIN(3, 2);
10136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10137 IEM_MC_LOCAL(uint16_t, u16Fsw);
10138 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10139 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10140 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10141
10142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10144 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10145 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10146
10147 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10148 IEM_MC_PREPARE_FPU_USAGE();
10149 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10150 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10151 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10152 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10153 } IEM_MC_ELSE() {
10154 IEM_MC_IF_FCW_IM() {
10155 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10156 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10157 } IEM_MC_ENDIF();
10158 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10159 } IEM_MC_ENDIF();
10160 IEM_MC_ADVANCE_RIP_AND_FINISH();
10161
10162 IEM_MC_END();
10163}
10164
10165
10166/**
10167 * @opcode 0xdf
10168 */
10169FNIEMOP_DEF(iemOp_EscF7)
10170{
10171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10172 if (IEM_IS_MODRM_REG_MODE(bRm))
10173 {
10174 switch (IEM_GET_MODRM_REG_8(bRm))
10175 {
10176 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10177 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10178 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10179 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10180 case 4: if (bRm == 0xe0)
10181 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10182 return IEMOP_RAISE_INVALID_OPCODE();
10183 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10184 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10185 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10187 }
10188 }
10189 else
10190 {
10191 switch (IEM_GET_MODRM_REG_8(bRm))
10192 {
10193 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10194 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10195 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10196 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10197 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10198 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10199 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10200 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10202 }
10203 }
10204}
10205
10206
10207/**
10208 * @opcode 0xe0
10209 */
10210FNIEMOP_DEF(iemOp_loopne_Jb)
10211{
10212 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10213 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10215 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10216
10217 switch (pVCpu->iem.s.enmEffAddrMode)
10218 {
10219 case IEMMODE_16BIT:
10220 IEM_MC_BEGIN(0,0);
10221 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10222 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10223 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10224 } IEM_MC_ELSE() {
10225 IEM_MC_ADVANCE_RIP_AND_FINISH();
10226 } IEM_MC_ENDIF();
10227 IEM_MC_END();
10228 break;
10229
10230 case IEMMODE_32BIT:
10231 IEM_MC_BEGIN(0,0);
10232 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10233 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10234 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10235 } IEM_MC_ELSE() {
10236 IEM_MC_ADVANCE_RIP_AND_FINISH();
10237 } IEM_MC_ENDIF();
10238 IEM_MC_END();
10239 break;
10240
10241 case IEMMODE_64BIT:
10242 IEM_MC_BEGIN(0,0);
10243 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10244 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10245 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10246 } IEM_MC_ELSE() {
10247 IEM_MC_ADVANCE_RIP_AND_FINISH();
10248 } IEM_MC_ENDIF();
10249 IEM_MC_END();
10250 break;
10251
10252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10253 }
10254}
10255
10256
10257/**
10258 * @opcode 0xe1
10259 */
10260FNIEMOP_DEF(iemOp_loope_Jb)
10261{
10262 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10263 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10265 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10266
10267 switch (pVCpu->iem.s.enmEffAddrMode)
10268 {
10269 case IEMMODE_16BIT:
10270 IEM_MC_BEGIN(0,0);
10271 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10272 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10273 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10274 } IEM_MC_ELSE() {
10275 IEM_MC_ADVANCE_RIP_AND_FINISH();
10276 } IEM_MC_ENDIF();
10277 IEM_MC_END();
10278 break;
10279
10280 case IEMMODE_32BIT:
10281 IEM_MC_BEGIN(0,0);
10282 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10283 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10284 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10285 } IEM_MC_ELSE() {
10286 IEM_MC_ADVANCE_RIP_AND_FINISH();
10287 } IEM_MC_ENDIF();
10288 IEM_MC_END();
10289 break;
10290
10291 case IEMMODE_64BIT:
10292 IEM_MC_BEGIN(0,0);
10293 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10294 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10295 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10296 } IEM_MC_ELSE() {
10297 IEM_MC_ADVANCE_RIP_AND_FINISH();
10298 } IEM_MC_ENDIF();
10299 IEM_MC_END();
10300 break;
10301
10302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10303 }
10304}
10305
10306
10307/**
10308 * @opcode 0xe2
10309 */
10310FNIEMOP_DEF(iemOp_loop_Jb)
10311{
10312 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10313 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10315 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10316
10317 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
10318 * using the 32-bit operand size override. How can that be restarted? See
10319 * weird pseudo code in intel manual. */
10320
10321 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
10322 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
10323 * the loop causes guest crashes, but when logging it's nice to skip a few million
10324 * lines of useless output. */
10325#if defined(LOG_ENABLED)
10326 if ((LogIs3Enabled() || LogIs4Enabled()) && (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm))
10327 switch (pVCpu->iem.s.enmEffAddrMode)
10328 {
10329 case IEMMODE_16BIT:
10330 IEM_MC_BEGIN(0,0);
10331 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10332 IEM_MC_ADVANCE_RIP_AND_FINISH();
10333 IEM_MC_END();
10334 break;
10335
10336 case IEMMODE_32BIT:
10337 IEM_MC_BEGIN(0,0);
10338 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10339 IEM_MC_ADVANCE_RIP_AND_FINISH();
10340 IEM_MC_END();
10341 break;
10342
10343 case IEMMODE_64BIT:
10344 IEM_MC_BEGIN(0,0);
10345 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10346 IEM_MC_ADVANCE_RIP_AND_FINISH();
10347 IEM_MC_END();
10348 break;
10349
10350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10351 }
10352#endif
10353
10354 switch (pVCpu->iem.s.enmEffAddrMode)
10355 {
10356 case IEMMODE_16BIT:
10357 IEM_MC_BEGIN(0,0);
10358
10359 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10360 IEM_MC_IF_CX_IS_NZ() {
10361 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10362 } IEM_MC_ELSE() {
10363 IEM_MC_ADVANCE_RIP_AND_FINISH();
10364 } IEM_MC_ENDIF();
10365 IEM_MC_END();
10366 break;
10367
10368 case IEMMODE_32BIT:
10369 IEM_MC_BEGIN(0,0);
10370 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10371 IEM_MC_IF_ECX_IS_NZ() {
10372 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10373 } IEM_MC_ELSE() {
10374 IEM_MC_ADVANCE_RIP_AND_FINISH();
10375 } IEM_MC_ENDIF();
10376 IEM_MC_END();
10377 break;
10378
10379 case IEMMODE_64BIT:
10380 IEM_MC_BEGIN(0,0);
10381 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10382 IEM_MC_IF_RCX_IS_NZ() {
10383 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10384 } IEM_MC_ELSE() {
10385 IEM_MC_ADVANCE_RIP_AND_FINISH();
10386 } IEM_MC_ENDIF();
10387 IEM_MC_END();
10388 break;
10389
10390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10391 }
10392}
10393
10394
10395/**
10396 * @opcode 0xe3
10397 */
10398FNIEMOP_DEF(iemOp_jecxz_Jb)
10399{
10400 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10401 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10403 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10404
10405 switch (pVCpu->iem.s.enmEffAddrMode)
10406 {
10407 case IEMMODE_16BIT:
10408 IEM_MC_BEGIN(0,0);
10409 IEM_MC_IF_CX_IS_NZ() {
10410 IEM_MC_ADVANCE_RIP_AND_FINISH();
10411 } IEM_MC_ELSE() {
10412 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10413 } IEM_MC_ENDIF();
10414 IEM_MC_END();
10415 break;
10416
10417 case IEMMODE_32BIT:
10418 IEM_MC_BEGIN(0,0);
10419 IEM_MC_IF_ECX_IS_NZ() {
10420 IEM_MC_ADVANCE_RIP_AND_FINISH();
10421 } IEM_MC_ELSE() {
10422 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10423 } IEM_MC_ENDIF();
10424 IEM_MC_END();
10425 break;
10426
10427 case IEMMODE_64BIT:
10428 IEM_MC_BEGIN(0,0);
10429 IEM_MC_IF_RCX_IS_NZ() {
10430 IEM_MC_ADVANCE_RIP_AND_FINISH();
10431 } IEM_MC_ELSE() {
10432 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10433 } IEM_MC_ENDIF();
10434 IEM_MC_END();
10435 break;
10436
10437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10438 }
10439}
10440
10441
10442/** Opcode 0xe4 */
10443FNIEMOP_DEF(iemOp_in_AL_Ib)
10444{
10445 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10446 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10448 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
10449}
10450
10451
10452/** Opcode 0xe5 */
10453FNIEMOP_DEF(iemOp_in_eAX_Ib)
10454{
10455 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10456 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10458 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10459}
10460
10461
10462/** Opcode 0xe6 */
10463FNIEMOP_DEF(iemOp_out_Ib_AL)
10464{
10465 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10466 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10468 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
10469}
10470
10471
10472/** Opcode 0xe7 */
10473FNIEMOP_DEF(iemOp_out_Ib_eAX)
10474{
10475 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10476 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10478 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10479}
10480
10481
10482/**
10483 * @opcode 0xe8
10484 */
10485FNIEMOP_DEF(iemOp_call_Jv)
10486{
10487 IEMOP_MNEMONIC(call_Jv, "call Jv");
10488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
10489 switch (pVCpu->iem.s.enmEffOpSize)
10490 {
10491 case IEMMODE_16BIT:
10492 {
10493 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10494 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10495 }
10496
10497 case IEMMODE_32BIT:
10498 {
10499 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10500 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10501 }
10502
10503 case IEMMODE_64BIT:
10504 {
10505 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10506 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10507 }
10508
10509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10510 }
10511}
10512
10513
10514/**
10515 * @opcode 0xe9
10516 */
10517FNIEMOP_DEF(iemOp_jmp_Jv)
10518{
10519 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10520 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
10521 switch (pVCpu->iem.s.enmEffOpSize)
10522 {
10523 case IEMMODE_16BIT:
10524 {
10525 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10526 IEM_MC_BEGIN(0, 0);
10527 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
10528 IEM_MC_END();
10529 break;
10530 }
10531
10532 case IEMMODE_64BIT:
10533 case IEMMODE_32BIT:
10534 {
10535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10536 IEM_MC_BEGIN(0, 0);
10537 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
10538 IEM_MC_END();
10539 break;
10540 }
10541
10542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10543 }
10544}
10545
10546
10547/**
10548 * @opcode 0xea
10549 */
10550FNIEMOP_DEF(iemOp_jmp_Ap)
10551{
10552 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10553 IEMOP_HLP_NO_64BIT();
10554
10555 /* Decode the far pointer address and pass it on to the far call C implementation. */
10556 uint32_t offSeg;
10557 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10558 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10559 else
10560 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10561 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10563 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10564}
10565
10566
10567/**
10568 * @opcode 0xeb
10569 */
10570FNIEMOP_DEF(iemOp_jmp_Jb)
10571{
10572 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10573 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10575 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
10576
10577 IEM_MC_BEGIN(0, 0);
10578 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10579 IEM_MC_END();
10580}
10581
10582
10583/** Opcode 0xec */
10584FNIEMOP_DEF(iemOp_in_AL_DX)
10585{
10586 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10588 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10589}
10590
10591
10592/** Opcode 0xed */
10593FNIEMOP_DEF(iemOp_in_eAX_DX)
10594{
10595 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10597 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10598}
10599
10600
10601/** Opcode 0xee */
10602FNIEMOP_DEF(iemOp_out_DX_AL)
10603{
10604 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10606 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10607}
10608
10609
10610/** Opcode 0xef */
10611FNIEMOP_DEF(iemOp_out_DX_eAX)
10612{
10613 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10615 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10616}
10617
10618
10619/**
10620 * @opcode 0xf0
10621 */
10622FNIEMOP_DEF(iemOp_lock)
10623{
10624 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10625 if (!pVCpu->iem.s.fDisregardLock)
10626 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10627
10628 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10629 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10630}
10631
10632
10633/**
10634 * @opcode 0xf1
10635 */
10636FNIEMOP_DEF(iemOp_int1)
10637{
10638 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10639 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
10640 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
10641 * LOADALL memo. Needs some testing. */
10642 IEMOP_HLP_MIN_386();
10643 /** @todo testcase! */
10644 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10645}
10646
10647
10648/**
10649 * @opcode 0xf2
10650 */
10651FNIEMOP_DEF(iemOp_repne)
10652{
10653 /* This overrides any previous REPE prefix. */
10654 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10655 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10656 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10657
10658 /* For the 4 entry opcode tables, REPNZ overrides any previous
10659 REPZ and operand size prefixes. */
10660 pVCpu->iem.s.idxPrefix = 3;
10661
10662 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10663 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10664}
10665
10666
10667/**
10668 * @opcode 0xf3
10669 */
10670FNIEMOP_DEF(iemOp_repe)
10671{
10672 /* This overrides any previous REPNE prefix. */
10673 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10674 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10675 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10676
10677 /* For the 4 entry opcode tables, REPNZ overrides any previous
10678 REPNZ and operand size prefixes. */
10679 pVCpu->iem.s.idxPrefix = 2;
10680
10681 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10682 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10683}
10684
10685
10686/**
10687 * @opcode 0xf4
10688 */
10689FNIEMOP_DEF(iemOp_hlt)
10690{
10691 IEMOP_MNEMONIC(hlt, "hlt");
10692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10693 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10694}
10695
10696
10697/**
10698 * @opcode 0xf5
10699 */
10700FNIEMOP_DEF(iemOp_cmc)
10701{
10702 IEMOP_MNEMONIC(cmc, "cmc");
10703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10704 IEM_MC_BEGIN(0, 0);
10705 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10706 IEM_MC_ADVANCE_RIP_AND_FINISH();
10707 IEM_MC_END();
10708}
10709
10710
10711/**
10712 * Common implementation of 'inc/dec/not/neg Eb'.
10713 *
10714 * @param bRm The RM byte.
10715 * @param pImpl The instruction implementation.
10716 */
10717FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10718{
10719 if (IEM_IS_MODRM_REG_MODE(bRm))
10720 {
10721 /* register access */
10722 IEM_MC_BEGIN(2, 0);
10723 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10724 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10725 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10726 IEM_MC_REF_EFLAGS(pEFlags);
10727 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10728 IEM_MC_ADVANCE_RIP_AND_FINISH();
10729 IEM_MC_END();
10730 }
10731 else
10732 {
10733 /* memory access. */
10734 IEM_MC_BEGIN(2, 2);
10735 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10736 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10738
10739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10740 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10741 IEM_MC_FETCH_EFLAGS(EFlags);
10742 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10743 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10744 else
10745 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10746
10747 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10748 IEM_MC_COMMIT_EFLAGS(EFlags);
10749 IEM_MC_ADVANCE_RIP_AND_FINISH();
10750 IEM_MC_END();
10751 }
10752}
10753
10754
10755/**
10756 * Common implementation of 'inc/dec/not/neg Ev'.
10757 *
10758 * @param bRm The RM byte.
10759 * @param pImpl The instruction implementation.
10760 */
10761FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10762{
10763 /* Registers are handled by a common worker. */
10764 if (IEM_IS_MODRM_REG_MODE(bRm))
10765 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, IEM_GET_MODRM_RM(pVCpu, bRm));
10766
10767 /* Memory we do here. */
10768 switch (pVCpu->iem.s.enmEffOpSize)
10769 {
10770 case IEMMODE_16BIT:
10771 IEM_MC_BEGIN(2, 2);
10772 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10773 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10775
10776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10777 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10778 IEM_MC_FETCH_EFLAGS(EFlags);
10779 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10780 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10781 else
10782 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10783
10784 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10785 IEM_MC_COMMIT_EFLAGS(EFlags);
10786 IEM_MC_ADVANCE_RIP_AND_FINISH();
10787 IEM_MC_END();
10788 break;
10789
10790 case IEMMODE_32BIT:
10791 IEM_MC_BEGIN(2, 2);
10792 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10793 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10795
10796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10797 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10798 IEM_MC_FETCH_EFLAGS(EFlags);
10799 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10800 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10801 else
10802 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10803
10804 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10805 IEM_MC_COMMIT_EFLAGS(EFlags);
10806 IEM_MC_ADVANCE_RIP_AND_FINISH();
10807 IEM_MC_END();
10808 break;
10809
10810 case IEMMODE_64BIT:
10811 IEM_MC_BEGIN(2, 2);
10812 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10813 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10815
10816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10817 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10818 IEM_MC_FETCH_EFLAGS(EFlags);
10819 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10820 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10821 else
10822 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10823
10824 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10825 IEM_MC_COMMIT_EFLAGS(EFlags);
10826 IEM_MC_ADVANCE_RIP_AND_FINISH();
10827 IEM_MC_END();
10828 break;
10829
10830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10831 }
10832}
10833
10834
10835/** Opcode 0xf6 /0. */
10836FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10837{
10838 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10840
10841 if (IEM_IS_MODRM_REG_MODE(bRm))
10842 {
10843 /* register access */
10844 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10846
10847 IEM_MC_BEGIN(3, 0);
10848 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10849 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10850 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10851 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10852 IEM_MC_REF_EFLAGS(pEFlags);
10853 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10854 IEM_MC_ADVANCE_RIP_AND_FINISH();
10855 IEM_MC_END();
10856 }
10857 else
10858 {
10859 /* memory access. */
10860 IEM_MC_BEGIN(3, 2);
10861 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10862 IEM_MC_ARG(uint8_t, u8Src, 1);
10863 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10865
10866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10867 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10868 IEM_MC_ASSIGN(u8Src, u8Imm);
10869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10870 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10871 IEM_MC_FETCH_EFLAGS(EFlags);
10872 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10873
10874 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10875 IEM_MC_COMMIT_EFLAGS(EFlags);
10876 IEM_MC_ADVANCE_RIP_AND_FINISH();
10877 IEM_MC_END();
10878 }
10879}
10880
10881
10882/** Opcode 0xf7 /0. */
10883FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10884{
10885 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10886 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10887
10888 if (IEM_IS_MODRM_REG_MODE(bRm))
10889 {
10890 /* register access */
10891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10892 switch (pVCpu->iem.s.enmEffOpSize)
10893 {
10894 case IEMMODE_16BIT:
10895 {
10896 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10897 IEM_MC_BEGIN(3, 0);
10898 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10899 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10901 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10902 IEM_MC_REF_EFLAGS(pEFlags);
10903 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10904 IEM_MC_ADVANCE_RIP_AND_FINISH();
10905 IEM_MC_END();
10906 break;
10907 }
10908
10909 case IEMMODE_32BIT:
10910 {
10911 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10912 IEM_MC_BEGIN(3, 0);
10913 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10914 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10915 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10916 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10917 IEM_MC_REF_EFLAGS(pEFlags);
10918 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10919 /* No clearing the high dword here - test doesn't write back the result. */
10920 IEM_MC_ADVANCE_RIP_AND_FINISH();
10921 IEM_MC_END();
10922 break;
10923 }
10924
10925 case IEMMODE_64BIT:
10926 {
10927 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10928 IEM_MC_BEGIN(3, 0);
10929 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10930 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10931 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10932 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10933 IEM_MC_REF_EFLAGS(pEFlags);
10934 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10935 IEM_MC_ADVANCE_RIP_AND_FINISH();
10936 IEM_MC_END();
10937 break;
10938 }
10939
10940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10941 }
10942 }
10943 else
10944 {
10945 /* memory access. */
10946 switch (pVCpu->iem.s.enmEffOpSize)
10947 {
10948 case IEMMODE_16BIT:
10949 {
10950 IEM_MC_BEGIN(3, 2);
10951 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10952 IEM_MC_ARG(uint16_t, u16Src, 1);
10953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10955
10956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10957 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10958 IEM_MC_ASSIGN(u16Src, u16Imm);
10959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10960 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10961 IEM_MC_FETCH_EFLAGS(EFlags);
10962 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10963
10964 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10965 IEM_MC_COMMIT_EFLAGS(EFlags);
10966 IEM_MC_ADVANCE_RIP_AND_FINISH();
10967 IEM_MC_END();
10968 break;
10969 }
10970
10971 case IEMMODE_32BIT:
10972 {
10973 IEM_MC_BEGIN(3, 2);
10974 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10975 IEM_MC_ARG(uint32_t, u32Src, 1);
10976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10978
10979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10980 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10981 IEM_MC_ASSIGN(u32Src, u32Imm);
10982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10983 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10984 IEM_MC_FETCH_EFLAGS(EFlags);
10985 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10986
10987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10988 IEM_MC_COMMIT_EFLAGS(EFlags);
10989 IEM_MC_ADVANCE_RIP_AND_FINISH();
10990 IEM_MC_END();
10991 break;
10992 }
10993
10994 case IEMMODE_64BIT:
10995 {
10996 IEM_MC_BEGIN(3, 2);
10997 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10998 IEM_MC_ARG(uint64_t, u64Src, 1);
10999 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11001
11002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11003 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11004 IEM_MC_ASSIGN(u64Src, u64Imm);
11005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11006 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11007 IEM_MC_FETCH_EFLAGS(EFlags);
11008 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11009
11010 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
11011 IEM_MC_COMMIT_EFLAGS(EFlags);
11012 IEM_MC_ADVANCE_RIP_AND_FINISH();
11013 IEM_MC_END();
11014 break;
11015 }
11016
11017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11018 }
11019 }
11020}
11021
11022
11023/** Opcode 0xf6 /4, /5, /6 and /7. */
11024FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11025{
11026 if (IEM_IS_MODRM_REG_MODE(bRm))
11027 {
11028 /* register access */
11029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11030 IEM_MC_BEGIN(3, 1);
11031 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11032 IEM_MC_ARG(uint8_t, u8Value, 1);
11033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11034 IEM_MC_LOCAL(int32_t, rc);
11035
11036 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11037 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11038 IEM_MC_REF_EFLAGS(pEFlags);
11039 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11040 IEM_MC_IF_LOCAL_IS_Z(rc) {
11041 IEM_MC_ADVANCE_RIP_AND_FINISH();
11042 } IEM_MC_ELSE() {
11043 IEM_MC_RAISE_DIVIDE_ERROR();
11044 } IEM_MC_ENDIF();
11045
11046 IEM_MC_END();
11047 }
11048 else
11049 {
11050 /* memory access. */
11051 IEM_MC_BEGIN(3, 2);
11052 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11053 IEM_MC_ARG(uint8_t, u8Value, 1);
11054 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11056 IEM_MC_LOCAL(int32_t, rc);
11057
11058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11060 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11061 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11062 IEM_MC_REF_EFLAGS(pEFlags);
11063 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11064 IEM_MC_IF_LOCAL_IS_Z(rc) {
11065 IEM_MC_ADVANCE_RIP_AND_FINISH();
11066 } IEM_MC_ELSE() {
11067 IEM_MC_RAISE_DIVIDE_ERROR();
11068 } IEM_MC_ENDIF();
11069
11070 IEM_MC_END();
11071 }
11072}
11073
11074
11075/** Opcode 0xf7 /4, /5, /6 and /7. */
11076FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11077{
11078 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11079
11080 if (IEM_IS_MODRM_REG_MODE(bRm))
11081 {
11082 /* register access */
11083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11084 switch (pVCpu->iem.s.enmEffOpSize)
11085 {
11086 case IEMMODE_16BIT:
11087 {
11088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11089 IEM_MC_BEGIN(4, 1);
11090 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11091 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11092 IEM_MC_ARG(uint16_t, u16Value, 2);
11093 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11094 IEM_MC_LOCAL(int32_t, rc);
11095
11096 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11097 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11098 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11099 IEM_MC_REF_EFLAGS(pEFlags);
11100 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11101 IEM_MC_IF_LOCAL_IS_Z(rc) {
11102 IEM_MC_ADVANCE_RIP_AND_FINISH();
11103 } IEM_MC_ELSE() {
11104 IEM_MC_RAISE_DIVIDE_ERROR();
11105 } IEM_MC_ENDIF();
11106
11107 IEM_MC_END();
11108 break;
11109 }
11110
11111 case IEMMODE_32BIT:
11112 {
11113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11114 IEM_MC_BEGIN(4, 1);
11115 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11116 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11117 IEM_MC_ARG(uint32_t, u32Value, 2);
11118 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11119 IEM_MC_LOCAL(int32_t, rc);
11120
11121 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11122 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11123 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11124 IEM_MC_REF_EFLAGS(pEFlags);
11125 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11126 IEM_MC_IF_LOCAL_IS_Z(rc) {
11127 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11128 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11129 IEM_MC_ADVANCE_RIP_AND_FINISH();
11130 } IEM_MC_ELSE() {
11131 IEM_MC_RAISE_DIVIDE_ERROR();
11132 } IEM_MC_ENDIF();
11133
11134 IEM_MC_END();
11135 break;
11136 }
11137
11138 case IEMMODE_64BIT:
11139 {
11140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11141 IEM_MC_BEGIN(4, 1);
11142 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11143 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11144 IEM_MC_ARG(uint64_t, u64Value, 2);
11145 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11146 IEM_MC_LOCAL(int32_t, rc);
11147
11148 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11149 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11150 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11151 IEM_MC_REF_EFLAGS(pEFlags);
11152 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11153 IEM_MC_IF_LOCAL_IS_Z(rc) {
11154 IEM_MC_ADVANCE_RIP_AND_FINISH();
11155 } IEM_MC_ELSE() {
11156 IEM_MC_RAISE_DIVIDE_ERROR();
11157 } IEM_MC_ENDIF();
11158
11159 IEM_MC_END();
11160 break;
11161 }
11162
11163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11164 }
11165 }
11166 else
11167 {
11168 /* memory access. */
11169 switch (pVCpu->iem.s.enmEffOpSize)
11170 {
11171 case IEMMODE_16BIT:
11172 {
11173 IEM_MC_BEGIN(4, 2);
11174 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11175 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11176 IEM_MC_ARG(uint16_t, u16Value, 2);
11177 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11179 IEM_MC_LOCAL(int32_t, rc);
11180
11181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11183 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11184 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11185 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11186 IEM_MC_REF_EFLAGS(pEFlags);
11187 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11188 IEM_MC_IF_LOCAL_IS_Z(rc) {
11189 IEM_MC_ADVANCE_RIP_AND_FINISH();
11190 } IEM_MC_ELSE() {
11191 IEM_MC_RAISE_DIVIDE_ERROR();
11192 } IEM_MC_ENDIF();
11193
11194 IEM_MC_END();
11195 break;
11196 }
11197
11198 case IEMMODE_32BIT:
11199 {
11200 IEM_MC_BEGIN(4, 2);
11201 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11202 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11203 IEM_MC_ARG(uint32_t, u32Value, 2);
11204 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11206 IEM_MC_LOCAL(int32_t, rc);
11207
11208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11210 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11211 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11212 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11213 IEM_MC_REF_EFLAGS(pEFlags);
11214 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11215 IEM_MC_IF_LOCAL_IS_Z(rc) {
11216 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11217 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11218 IEM_MC_ADVANCE_RIP_AND_FINISH();
11219 } IEM_MC_ELSE() {
11220 IEM_MC_RAISE_DIVIDE_ERROR();
11221 } IEM_MC_ENDIF();
11222
11223 IEM_MC_END();
11224 break;
11225 }
11226
11227 case IEMMODE_64BIT:
11228 {
11229 IEM_MC_BEGIN(4, 2);
11230 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11231 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11232 IEM_MC_ARG(uint64_t, u64Value, 2);
11233 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11235 IEM_MC_LOCAL(int32_t, rc);
11236
11237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11239 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11240 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11241 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11242 IEM_MC_REF_EFLAGS(pEFlags);
11243 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11244 IEM_MC_IF_LOCAL_IS_Z(rc) {
11245 IEM_MC_ADVANCE_RIP_AND_FINISH();
11246 } IEM_MC_ELSE() {
11247 IEM_MC_RAISE_DIVIDE_ERROR();
11248 } IEM_MC_ENDIF();
11249
11250 IEM_MC_END();
11251 break;
11252 }
11253
11254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11255 }
11256 }
11257}
11258
11259/**
11260 * @opcode 0xf6
11261 */
11262FNIEMOP_DEF(iemOp_Grp3_Eb)
11263{
11264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11265 switch (IEM_GET_MODRM_REG_8(bRm))
11266 {
11267 case 0:
11268 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11269 case 1:
11270/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11271 return IEMOP_RAISE_INVALID_OPCODE();
11272 case 2:
11273 IEMOP_MNEMONIC(not_Eb, "not Eb");
11274 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11275 case 3:
11276 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11277 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11278 case 4:
11279 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11281 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
11282 case 5:
11283 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11284 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11285 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
11286 case 6:
11287 IEMOP_MNEMONIC(div_Eb, "div Eb");
11288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11289 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
11290 case 7:
11291 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11293 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
11294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11295 }
11296}
11297
11298
11299/**
11300 * @opcode 0xf7
11301 */
11302FNIEMOP_DEF(iemOp_Grp3_Ev)
11303{
11304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11305 switch (IEM_GET_MODRM_REG_8(bRm))
11306 {
11307 case 0:
11308 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11309 case 1:
11310/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11311 return IEMOP_RAISE_INVALID_OPCODE();
11312 case 2:
11313 IEMOP_MNEMONIC(not_Ev, "not Ev");
11314 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11315 case 3:
11316 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11317 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11318 case 4:
11319 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11321 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
11322 case 5:
11323 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11325 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
11326 case 6:
11327 IEMOP_MNEMONIC(div_Ev, "div Ev");
11328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11329 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
11330 case 7:
11331 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11333 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
11334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11335 }
11336}
11337
11338
11339/**
11340 * @opcode 0xf8
11341 */
11342FNIEMOP_DEF(iemOp_clc)
11343{
11344 IEMOP_MNEMONIC(clc, "clc");
11345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11346 IEM_MC_BEGIN(0, 0);
11347 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11348 IEM_MC_ADVANCE_RIP_AND_FINISH();
11349 IEM_MC_END();
11350}
11351
11352
11353/**
11354 * @opcode 0xf9
11355 */
11356FNIEMOP_DEF(iemOp_stc)
11357{
11358 IEMOP_MNEMONIC(stc, "stc");
11359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11360 IEM_MC_BEGIN(0, 0);
11361 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11362 IEM_MC_ADVANCE_RIP_AND_FINISH();
11363 IEM_MC_END();
11364}
11365
11366
11367/**
11368 * @opcode 0xfa
11369 */
11370FNIEMOP_DEF(iemOp_cli)
11371{
11372 IEMOP_MNEMONIC(cli, "cli");
11373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11374 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11375}
11376
11377
11378FNIEMOP_DEF(iemOp_sti)
11379{
11380 IEMOP_MNEMONIC(sti, "sti");
11381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11382 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11383}
11384
11385
11386/**
11387 * @opcode 0xfc
11388 */
11389FNIEMOP_DEF(iemOp_cld)
11390{
11391 IEMOP_MNEMONIC(cld, "cld");
11392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11393 IEM_MC_BEGIN(0, 0);
11394 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11395 IEM_MC_ADVANCE_RIP_AND_FINISH();
11396 IEM_MC_END();
11397}
11398
11399
11400/**
11401 * @opcode 0xfd
11402 */
11403FNIEMOP_DEF(iemOp_std)
11404{
11405 IEMOP_MNEMONIC(std, "std");
11406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11407 IEM_MC_BEGIN(0, 0);
11408 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11409 IEM_MC_ADVANCE_RIP_AND_FINISH();
11410 IEM_MC_END();
11411}
11412
11413
11414/**
11415 * @opcode 0xfe
11416 */
11417FNIEMOP_DEF(iemOp_Grp4)
11418{
11419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11420 switch (IEM_GET_MODRM_REG_8(bRm))
11421 {
11422 case 0:
11423 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11424 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11425 case 1:
11426 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11427 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11428 default:
11429 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11430 return IEMOP_RAISE_INVALID_OPCODE();
11431 }
11432}
11433
11434
11435/**
11436 * Opcode 0xff /2.
11437 * @param bRm The RM byte.
11438 */
11439FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11440{
11441 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11442 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11443
11444 if (IEM_IS_MODRM_REG_MODE(bRm))
11445 {
11446 /* The new RIP is taken from a register. */
11447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11448 switch (pVCpu->iem.s.enmEffOpSize)
11449 {
11450 case IEMMODE_16BIT:
11451 IEM_MC_BEGIN(1, 0);
11452 IEM_MC_ARG(uint16_t, u16Target, 0);
11453 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11454 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11455 IEM_MC_END();
11456 break;
11457
11458 case IEMMODE_32BIT:
11459 IEM_MC_BEGIN(1, 0);
11460 IEM_MC_ARG(uint32_t, u32Target, 0);
11461 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11462 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11463 IEM_MC_END();
11464 break;
11465
11466 case IEMMODE_64BIT:
11467 IEM_MC_BEGIN(1, 0);
11468 IEM_MC_ARG(uint64_t, u64Target, 0);
11469 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11470 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11471 IEM_MC_END();
11472 break;
11473
11474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11475 }
11476 }
11477 else
11478 {
11479 /* The new RIP is taken from a register. */
11480 switch (pVCpu->iem.s.enmEffOpSize)
11481 {
11482 case IEMMODE_16BIT:
11483 IEM_MC_BEGIN(1, 1);
11484 IEM_MC_ARG(uint16_t, u16Target, 0);
11485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11488 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11489 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11490 IEM_MC_END();
11491 break;
11492
11493 case IEMMODE_32BIT:
11494 IEM_MC_BEGIN(1, 1);
11495 IEM_MC_ARG(uint32_t, u32Target, 0);
11496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11499 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11500 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11501 IEM_MC_END();
11502 break;
11503
11504 case IEMMODE_64BIT:
11505 IEM_MC_BEGIN(1, 1);
11506 IEM_MC_ARG(uint64_t, u64Target, 0);
11507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11510 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11511 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11512 IEM_MC_END();
11513 break;
11514
11515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11516 }
11517 }
11518}
11519
11520typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11521
11522FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11523{
11524 /* Registers? How?? */
11525 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(bRm)))
11526 { /* likely */ }
11527 else
11528 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11529
11530 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */
11531 /** @todo what does VIA do? */
11532 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu))
11533 { /* likely */ }
11534 else
11535 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT;
11536
11537 /* Far pointer loaded from memory. */
11538 switch (pVCpu->iem.s.enmEffOpSize)
11539 {
11540 case IEMMODE_16BIT:
11541 IEM_MC_BEGIN(3, 1);
11542 IEM_MC_ARG(uint16_t, u16Sel, 0);
11543 IEM_MC_ARG(uint16_t, offSeg, 1);
11544 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11548 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11549 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11550 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11551 IEM_MC_END();
11552 break;
11553
11554 case IEMMODE_32BIT:
11555 IEM_MC_BEGIN(3, 1);
11556 IEM_MC_ARG(uint16_t, u16Sel, 0);
11557 IEM_MC_ARG(uint32_t, offSeg, 1);
11558 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11562 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11563 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11564 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11565 IEM_MC_END();
11566 break;
11567
11568 case IEMMODE_64BIT:
11569 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu));
11570 IEM_MC_BEGIN(3, 1);
11571 IEM_MC_ARG(uint16_t, u16Sel, 0);
11572 IEM_MC_ARG(uint64_t, offSeg, 1);
11573 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2);
11574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11577 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11578 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11579 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11580 IEM_MC_END();
11581 break;
11582
11583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11584 }
11585}
11586
11587
11588/**
11589 * Opcode 0xff /3.
11590 * @param bRm The RM byte.
11591 */
11592FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11593{
11594 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11595 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11596}
11597
11598
11599/**
11600 * Opcode 0xff /4.
11601 * @param bRm The RM byte.
11602 */
11603FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11604{
11605 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11607
11608 if (IEM_IS_MODRM_REG_MODE(bRm))
11609 {
11610 /* The new RIP is taken from a register. */
11611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11612 switch (pVCpu->iem.s.enmEffOpSize)
11613 {
11614 case IEMMODE_16BIT:
11615 IEM_MC_BEGIN(0, 1);
11616 IEM_MC_LOCAL(uint16_t, u16Target);
11617 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11618 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
11619 IEM_MC_END();
11620 break;
11621
11622 case IEMMODE_32BIT:
11623 IEM_MC_BEGIN(0, 1);
11624 IEM_MC_LOCAL(uint32_t, u32Target);
11625 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11626 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
11627 IEM_MC_END();
11628 break;
11629
11630 case IEMMODE_64BIT:
11631 IEM_MC_BEGIN(0, 1);
11632 IEM_MC_LOCAL(uint64_t, u64Target);
11633 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11634 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
11635 IEM_MC_END();
11636 break;
11637
11638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11639 }
11640 }
11641 else
11642 {
11643 /* The new RIP is taken from a memory location. */
11644 switch (pVCpu->iem.s.enmEffOpSize)
11645 {
11646 case IEMMODE_16BIT:
11647 IEM_MC_BEGIN(0, 2);
11648 IEM_MC_LOCAL(uint16_t, u16Target);
11649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11652 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11653 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
11654 IEM_MC_END();
11655 break;
11656
11657 case IEMMODE_32BIT:
11658 IEM_MC_BEGIN(0, 2);
11659 IEM_MC_LOCAL(uint32_t, u32Target);
11660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11663 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11664 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
11665 IEM_MC_END();
11666 break;
11667
11668 case IEMMODE_64BIT:
11669 IEM_MC_BEGIN(0, 2);
11670 IEM_MC_LOCAL(uint64_t, u64Target);
11671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11674 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11675 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
11676 IEM_MC_END();
11677 break;
11678
11679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11680 }
11681 }
11682}
11683
11684
11685/**
11686 * Opcode 0xff /5.
11687 * @param bRm The RM byte.
11688 */
11689FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11690{
11691 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11692 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11693}
11694
11695
11696/**
11697 * Opcode 0xff /6.
11698 * @param bRm The RM byte.
11699 */
11700FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11701{
11702 IEMOP_MNEMONIC(push_Ev, "push Ev");
11703
11704 /* Registers are handled by a common worker. */
11705 if (IEM_IS_MODRM_REG_MODE(bRm))
11706 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
11707
11708 /* Memory we do here. */
11709 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11710 switch (pVCpu->iem.s.enmEffOpSize)
11711 {
11712 case IEMMODE_16BIT:
11713 IEM_MC_BEGIN(0, 2);
11714 IEM_MC_LOCAL(uint16_t, u16Src);
11715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11718 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11719 IEM_MC_PUSH_U16(u16Src);
11720 IEM_MC_ADVANCE_RIP_AND_FINISH();
11721 IEM_MC_END();
11722 break;
11723
11724 case IEMMODE_32BIT:
11725 IEM_MC_BEGIN(0, 2);
11726 IEM_MC_LOCAL(uint32_t, u32Src);
11727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11730 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11731 IEM_MC_PUSH_U32(u32Src);
11732 IEM_MC_ADVANCE_RIP_AND_FINISH();
11733 IEM_MC_END();
11734 break;
11735
11736 case IEMMODE_64BIT:
11737 IEM_MC_BEGIN(0, 2);
11738 IEM_MC_LOCAL(uint64_t, u64Src);
11739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11742 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11743 IEM_MC_PUSH_U64(u64Src);
11744 IEM_MC_ADVANCE_RIP_AND_FINISH();
11745 IEM_MC_END();
11746 break;
11747
11748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11749 }
11750}
11751
11752
11753/**
11754 * @opcode 0xff
11755 */
11756FNIEMOP_DEF(iemOp_Grp5)
11757{
11758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11759 switch (IEM_GET_MODRM_REG_8(bRm))
11760 {
11761 case 0:
11762 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11763 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11764 case 1:
11765 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11766 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11767 case 2:
11768 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11769 case 3:
11770 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11771 case 4:
11772 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11773 case 5:
11774 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11775 case 6:
11776 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11777 case 7:
11778 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11779 return IEMOP_RAISE_INVALID_OPCODE();
11780 }
11781 AssertFailedReturn(VERR_IEM_IPE_3);
11782}
11783
11784
11785
11786const PFNIEMOP g_apfnOneByteMap[256] =
11787{
11788 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11789 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11790 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11791 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11792 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11793 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11794 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11795 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11796 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11797 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11798 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11799 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11800 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11801 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11802 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11803 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11804 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11805 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11806 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11807 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11808 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11809 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11810 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11811 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11812 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11813 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11814 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11815 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11816 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11817 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11818 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11819 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11820 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11821 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11822 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11823 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11824 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11825 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11826 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11827 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11828 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11829 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11830 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11831 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11832 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11833 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11834 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11835 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11836 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11837 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11838 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11839 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11840 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11841 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11842 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11843 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11844 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11845 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11846 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11847 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11848 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11849 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11850 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11851 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11852};
11853
11854
11855/** @} */
11856
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette