VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 66550

最後變更 在這個檔案從66550是 66479,由 vboxsync 提交於 8 年 前

IEM: Stubbed VEX opcode map 2 and 3.

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 392.2 KB
 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 66479 2017-04-07 15:55:21Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.alldomusa.eu.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/* Instruction group definitions: */
25
26/** @defgroup og_gen General
27 * @{ */
28 /** @defgroup og_gen_arith Arithmetic
29 * @{ */
30 /** @defgroup og_gen_arith_bin Binary numbers */
31 /** @defgroup og_gen_arith_dec Decimal numbers */
32 /** @} */
33/** @} */
34
35/** @defgroup og_stack Stack
36 * @{ */
37 /** @defgroup og_stack_sreg Segment registers */
38/** @} */
39
40/** @defgroup og_prefix Prefixes */
41/** @defgroup og_escapes Escape bytes */
42
43
44
45/** @name One byte opcodes.
46 * @{
47 */
48
49/* Instruction specification format - work in progress: */
50
51/**
52 * @opcode 0x00
53 * @opmnemonic add
54 * @op1 rm:Eb
55 * @op2 reg:Gb
56 * @opmaps one
57 * @openc ModR/M
58 * @opflmodify cf,pf,af,zf,sf,of
59 * @ophints harmless ignores_op_size
60 * @opstats add_Eb_Gb
61 * @opgroup og_gen_arith_bin
62 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
63 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
64 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
65 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
66 */
67FNIEMOP_DEF(iemOp_add_Eb_Gb)
68{
69 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
70 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
71}
72
73
74/**
75 * @opcode 0x01
76 * @opgroup og_gen_arith_bin
77 * @opflmodify cf,pf,af,zf,sf,of
78 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
79 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
80 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
81 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
82 */
83FNIEMOP_DEF(iemOp_add_Ev_Gv)
84{
85 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
86 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
87}
88
89
90/**
91 * @opcode 0x02
92 * @opgroup og_gen_arith_bin
93 * @opflmodify cf,pf,af,zf,sf,of
94 * @opcopytests iemOp_add_Eb_Gb
95 */
96FNIEMOP_DEF(iemOp_add_Gb_Eb)
97{
98 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
99 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
100}
101
102
103/**
104 * @opcode 0x03
105 * @opgroup og_gen_arith_bin
106 * @opflmodify cf,pf,af,zf,sf,of
107 * @opcopytests iemOp_add_Ev_Gv
108 */
109FNIEMOP_DEF(iemOp_add_Gv_Ev)
110{
111 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
113}
114
115
116/**
117 * @opcode 0x04
118 * @opgroup og_gen_arith_bin
119 * @opflmodify cf,pf,af,zf,sf,of
120 * @opcopytests iemOp_add_Eb_Gb
121 */
122FNIEMOP_DEF(iemOp_add_Al_Ib)
123{
124 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
126}
127
128
129/**
130 * @opcode 0x05
131 * @opgroup og_gen_arith_bin
132 * @opflmodify cf,pf,af,zf,sf,of
133 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
134 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
135 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
136 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
137 */
138FNIEMOP_DEF(iemOp_add_eAX_Iz)
139{
140 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
142}
143
144
145/**
146 * @opcode 0x06
147 * @opgroup og_stack_sreg
148 */
149FNIEMOP_DEF(iemOp_push_ES)
150{
151 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
152 IEMOP_HLP_NO_64BIT();
153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
154}
155
156
157/**
158 * @opcode 0x07
159 * @opgroup og_stack_sreg
160 */
161FNIEMOP_DEF(iemOp_pop_ES)
162{
163 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
164 IEMOP_HLP_NO_64BIT();
165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
167}
168
169
170/**
171 * @opcode 0x08
172 * @opgroup og_gen_arith_bin
173 * @opflmodify cf,pf,af,zf,sf,of
174 * @opflundef af
175 * @opflclear of,cf
176 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
177 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
178 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
179 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
180 */
181FNIEMOP_DEF(iemOp_or_Eb_Gb)
182{
183 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
186}
187
188
189/*
190 * @opcode 0x09
191 * @opgroup og_gen_arith_bin
192 * @opflmodify cf,pf,af,zf,sf,of
193 * @opflundef af
194 * @opflclear of,cf
195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
201 */
202FNIEMOP_DEF(iemOp_or_Ev_Gv)
203{
204 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
207}
208
209
210/**
211 * @opcode 0x0a
212 * @opgroup og_gen_arith_bin
213 * @opflmodify cf,pf,af,zf,sf,of
214 * @opflundef af
215 * @opflclear of,cf
216 * @opcopytests iemOp_or_Eb_Gb
217 */
218FNIEMOP_DEF(iemOp_or_Gb_Eb)
219{
220 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
222 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
223}
224
225
226/**
227 * @opcode 0x0b
228 * @opgroup og_gen_arith_bin
229 * @opflmodify cf,pf,af,zf,sf,of
230 * @opflundef af
231 * @opflclear of,cf
232 * @opcopytests iemOp_or_Ev_Gv
233 */
234FNIEMOP_DEF(iemOp_or_Gv_Ev)
235{
236 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
239}
240
241
242/**
243 * @opcode 0x0c
244 * @opgroup og_gen_arith_bin
245 * @opflmodify cf,pf,af,zf,sf,of
246 * @opflundef af
247 * @opflclear of,cf
248 * @opcopytests iemOp_or_Eb_Gb
249 */
250FNIEMOP_DEF(iemOp_or_Al_Ib)
251{
252 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
255}
256
257
258/**
259 * @opcode 0x0d
260 * @opgroup og_gen_arith_bin
261 * @opflmodify cf,pf,af,zf,sf,of
262 * @opflundef af
263 * @opflclear of,cf
264 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
265 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
266 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
267 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
268 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
269 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
270 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
271 */
272FNIEMOP_DEF(iemOp_or_eAX_Iz)
273{
274 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
277}
278
279
280/**
281 * @opcode 0x0e
282 * @opgroup og_stack_sreg
283 */
284FNIEMOP_DEF(iemOp_push_CS)
285{
286 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
287 IEMOP_HLP_NO_64BIT();
288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
289}
290
291
292/**
293 * @opcode 0x0f
294 * @opmnemonic EscTwo0f
295 * @openc two0f
296 * @opdisenum OP_2B_ESC
297 * @ophints harmless
298 * @opgroup og_escapes
299 */
300FNIEMOP_DEF(iemOp_2byteEscape)
301{
302#ifdef VBOX_STRICT
303 /* Sanity check the table the first time around. */
304 static bool s_fTested = false;
305 if (RT_LIKELY(s_fTested)) { /* likely */ }
306 else
307 {
308 s_fTested = true;
309 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
310 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
311 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
312 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
313 }
314#endif
315
316 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
317 {
318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
319 IEMOP_HLP_MIN_286();
320 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
321 }
322 /* @opdone */
323
324 /*
325 * On the 8086 this is a POP CS instruction.
326 * For the time being we don't specify this this.
327 */
328 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
329 IEMOP_HLP_NO_64BIT();
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
332}
333
334/**
335 * @opcode 0x10
336 * @opgroup og_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify cf,pf,af,zf,sf,of
339 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
340 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
341 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
342 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
343 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
344 */
345FNIEMOP_DEF(iemOp_adc_Eb_Gb)
346{
347 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
349}
350
351
352/**
353 * @opcode 0x11
354 * @opgroup og_gen_arith_bin
355 * @opfltest cf
356 * @opflmodify cf,pf,af,zf,sf,of
357 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
358 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
359 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
360 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
361 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
362 */
363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
364{
365 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
367}
368
369
370/**
371 * @opcode 0x12
372 * @opgroup og_gen_arith_bin
373 * @opfltest cf
374 * @opflmodify cf,pf,af,zf,sf,of
375 * @opcopytests iemOp_adc_Eb_Gb
376 */
377FNIEMOP_DEF(iemOp_adc_Gb_Eb)
378{
379 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
381}
382
383
384/**
385 * @opcode 0x13
386 * @opgroup og_gen_arith_bin
387 * @opfltest cf
388 * @opflmodify cf,pf,af,zf,sf,of
389 * @opcopytests iemOp_adc_Ev_Gv
390 */
391FNIEMOP_DEF(iemOp_adc_Gv_Ev)
392{
393 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
395}
396
397
398/**
399 * @opcode 0x14
400 * @opgroup og_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify cf,pf,af,zf,sf,of
403 * @opcopytests iemOp_adc_Eb_Gb
404 */
405FNIEMOP_DEF(iemOp_adc_Al_Ib)
406{
407 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
409}
410
411
412/**
413 * @opcode 0x15
414 * @opgroup og_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify cf,pf,af,zf,sf,of
417 * @opcopytests iemOp_adc_Ev_Gv
418 */
419FNIEMOP_DEF(iemOp_adc_eAX_Iz)
420{
421 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
423}
424
425
426/**
427 * @opcode 0x16
428 */
429FNIEMOP_DEF(iemOp_push_SS)
430{
431 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
432 IEMOP_HLP_NO_64BIT();
433 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
434}
435
436
437/**
438 * @opcode 0x17
439 * @opgroup og_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify cf,pf,af,zf,sf,of
442 */
443FNIEMOP_DEF(iemOp_pop_SS)
444{
445 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
447 IEMOP_HLP_NO_64BIT();
448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
449}
450
451
452/**
453 * @opcode 0x18
454 * @opgroup og_gen_arith_bin
455 * @opfltest cf
456 * @opflmodify cf,pf,af,zf,sf,of
457 */
458FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
459{
460 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
461 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
462}
463
464
465/**
466 * @opcode 0x19
467 * @opgroup og_gen_arith_bin
468 * @opfltest cf
469 * @opflmodify cf,pf,af,zf,sf,of
470 */
471FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
472{
473 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
475}
476
477
478/**
479 * @opcode 0x1a
480 * @opgroup og_gen_arith_bin
481 * @opfltest cf
482 * @opflmodify cf,pf,af,zf,sf,of
483 */
484FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
485{
486 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
488}
489
490
491/**
492 * @opcode 0x1b
493 * @opgroup og_gen_arith_bin
494 * @opfltest cf
495 * @opflmodify cf,pf,af,zf,sf,of
496 */
497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
498{
499 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
501}
502
503
504/**
505 * @opcode 0x1c
506 * @opgroup og_gen_arith_bin
507 * @opfltest cf
508 * @opflmodify cf,pf,af,zf,sf,of
509 */
510FNIEMOP_DEF(iemOp_sbb_Al_Ib)
511{
512 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
514}
515
516
517/**
518 * @opcode 0x1d
519 * @opgroup og_gen_arith_bin
520 * @opfltest cf
521 * @opflmodify cf,pf,af,zf,sf,of
522 */
523FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
524{
525 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
527}
528
529
530/**
531 * @opcode 0x1e
532 * @opgroup og_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_push_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
537 IEMOP_HLP_NO_64BIT();
538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
539}
540
541
542/**
543 * @opcode 0x1f
544 * @opgroup og_stack_sreg
545 */
546FNIEMOP_DEF(iemOp_pop_DS)
547{
548 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
550 IEMOP_HLP_NO_64BIT();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
552}
553
554
555/**
556 * @opcode 0x20
557 * @opgroup og_gen_arith_bin
558 * @opflmodify cf,pf,af,zf,sf,of
559 * @opflundef af
560 * @opflclear of,cf
561 */
562FNIEMOP_DEF(iemOp_and_Eb_Gb)
563{
564 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
567}
568
569
570/**
571 * @opcode 0x21
572 * @opgroup og_gen_arith_bin
573 * @opflmodify cf,pf,af,zf,sf,of
574 * @opflundef af
575 * @opflclear of,cf
576 */
577FNIEMOP_DEF(iemOp_and_Ev_Gv)
578{
579 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
582}
583
584
585/**
586 * @opcode 0x22
587 * @opgroup og_gen_arith_bin
588 * @opflmodify cf,pf,af,zf,sf,of
589 * @opflundef af
590 * @opflclear of,cf
591 */
592FNIEMOP_DEF(iemOp_and_Gb_Eb)
593{
594 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
597}
598
599
600/**
601 * @opcode 0x23
602 * @opgroup og_gen_arith_bin
603 * @opflmodify cf,pf,af,zf,sf,of
604 * @opflundef af
605 * @opflclear of,cf
606 */
607FNIEMOP_DEF(iemOp_and_Gv_Ev)
608{
609 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
612}
613
614
615/**
616 * @opcode 0x24
617 * @opgroup og_gen_arith_bin
618 * @opflmodify cf,pf,af,zf,sf,of
619 * @opflundef af
620 * @opflclear of,cf
621 */
622FNIEMOP_DEF(iemOp_and_Al_Ib)
623{
624 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
627}
628
629
630/**
631 * @opcode 0x25
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 */
637FNIEMOP_DEF(iemOp_and_eAX_Iz)
638{
639 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
642}
643
644
645/**
646 * @opcode 0x26
647 * @opmnemonic SEG
648 * @op1 ES
649 * @opgroup og_prefix
650 * @openc prefix
651 * @opdisenum OP_SEG
652 * @ophints harmless
653 */
654FNIEMOP_DEF(iemOp_seg_ES)
655{
656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
657 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
658 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
659
660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
662}
663
664
665/**
666 * @opcode 0x27
667 * @opfltest af,cf
668 * @opflmodify cf,pf,af,zf,sf,of
669 * @opflundef of
670 */
671FNIEMOP_DEF(iemOp_daa)
672{
673 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
678}
679
680
681/**
682 * @opcode 0x28
683 * @opgroup og_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Eb_Gb)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x29
695 * @opgroup og_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Ev_Gv)
699{
700 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2a
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gb_Eb)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2b
719 * @opgroup og_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Gv_Ev)
723{
724 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2c
731 * @opgroup og_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_Al_Ib)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2d
743 * @opgroup og_gen_arith_bin
744 * @opflmodify cf,pf,af,zf,sf,of
745 */
746FNIEMOP_DEF(iemOp_sub_eAX_Iz)
747{
748 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
750}
751
752
753/**
754 * @opcode 0x2e
755 * @opmnemonic SEG
756 * @op1 CS
757 * @opgroup og_prefix
758 * @openc prefix
759 * @opdisenum OP_SEG
760 * @ophints harmless
761 */
762FNIEMOP_DEF(iemOp_seg_CS)
763{
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
766 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770}
771
772
773/**
774 * @opcode 0x2f
775 * @opfltest af,cf
776 * @opflmodify cf,pf,af,zf,sf,of
777 * @opflundef of
778 */
779FNIEMOP_DEF(iemOp_das)
780{
781 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
782 IEMOP_HLP_NO_64BIT();
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
786}
787
788
789/**
790 * @opcode 0x30
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @opflundef af
794 * @opflclear of,cf
795 */
796FNIEMOP_DEF(iemOp_xor_Eb_Gb)
797{
798 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_LOCK_ALLOWED);
799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
801}
802
803
804/**
805 * @opcode 0x31
806 * @opgroup og_gen_arith_bin
807 * @opflmodify cf,pf,af,zf,sf,of
808 * @opflundef af
809 * @opflclear of,cf
810 */
811FNIEMOP_DEF(iemOp_xor_Ev_Gv)
812{
813 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
816}
817
818
819/**
820 * @opcode 0x32
821 * @opgroup og_gen_arith_bin
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @opflundef af
824 * @opflclear of,cf
825 */
826FNIEMOP_DEF(iemOp_xor_Gb_Eb)
827{
828 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
831}
832
833
834/**
835 * @opcode 0x33
836 * @opgroup og_gen_arith_bin
837 * @opflmodify cf,pf,af,zf,sf,of
838 * @opflundef af
839 * @opflclear of,cf
840 */
841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
842{
843 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
846}
847
848
849/**
850 * @opcode 0x34
851 * @opgroup og_gen_arith_bin
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opflundef af
854 * @opflclear of,cf
855 */
856FNIEMOP_DEF(iemOp_xor_Al_Ib)
857{
858 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
861}
862
863
864/**
865 * @opcode 0x35
866 * @opgroup og_gen_arith_bin
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opflundef af
869 * @opflclear of,cf
870 */
871FNIEMOP_DEF(iemOp_xor_eAX_Iz)
872{
873 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
876}
877
878
879/**
880 * @opcode 0x36
881 * @opmnemonic SEG
882 * @op1 SS
883 * @opgroup og_prefix
884 * @openc prefix
885 * @opdisenum OP_SEG
886 * @ophints harmless
887 */
888FNIEMOP_DEF(iemOp_seg_SS)
889{
890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
892 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
893
894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
896}
897
898
899/**
900 * @opcode 0x37
901 * @opfltest af,cf
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef pf,zf,sf,of
904 * @opgroup og_gen_arith_dec
905 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
906 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
907 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
908 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
909 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
910 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
911 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
912 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
913 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
914 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
915 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
916 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
917 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
918 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
919 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
920 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
921 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
922 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
925 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
926 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
927 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
928 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
929 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
930 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
931 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
933 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
936 */
937FNIEMOP_DEF(iemOp_aaa)
938{
939 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
940 IEMOP_HLP_NO_64BIT();
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
943
944 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
945}
946
947
948/**
949 * @opcode 0x38
950 */
951FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
952{
953 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
954 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
955}
956
957
958/**
959 * @opcode 0x39
960 */
961FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
962{
963 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x3a
970 */
971FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
972{
973 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3b
980 */
981FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
982{
983 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3c
990 */
991FNIEMOP_DEF(iemOp_cmp_Al_Ib)
992{
993 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3d
1000 */
1001FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1002{
1003 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3e
1010 */
1011FNIEMOP_DEF(iemOp_seg_DS)
1012{
1013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1015 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1016
1017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1019}
1020
1021
1022/**
1023 * @opcode 0x3f
1024 * @opfltest af,cf
1025 * @opflmodify cf,pf,af,zf,sf,of
1026 * @opflundef pf,zf,sf,of
1027 * @opgroup og_gen_arith_dec
1028 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1029 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1030 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1031 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1032 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1033 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1034 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1035 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1036 * @optest8 amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1037 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1038 * @optest10 amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1039 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1040 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1041 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1042 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1043 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1045 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1046 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1047 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1048 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1049 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1050 * @optest22 amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1051 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1052 * @optest24 amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1053 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest26 amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest28 amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1058 * @optest30 amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1059 * @optest31 intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1060 * @optest32 amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1061 * @optest33 intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1062 * @optest34 amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1063 */
1064FNIEMOP_DEF(iemOp_aas)
1065{
1066 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1067 IEMOP_HLP_NO_64BIT();
1068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1069 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1070
1071 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1072}
1073
1074
1075/**
1076 * Common 'inc/dec/not/neg register' helper.
1077 */
1078FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1079{
1080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1081 switch (pVCpu->iem.s.enmEffOpSize)
1082 {
1083 case IEMMODE_16BIT:
1084 IEM_MC_BEGIN(2, 0);
1085 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1086 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1087 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1088 IEM_MC_REF_EFLAGS(pEFlags);
1089 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 return VINF_SUCCESS;
1093
1094 case IEMMODE_32BIT:
1095 IEM_MC_BEGIN(2, 0);
1096 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1097 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1098 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1099 IEM_MC_REF_EFLAGS(pEFlags);
1100 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1101 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 return VINF_SUCCESS;
1105
1106 case IEMMODE_64BIT:
1107 IEM_MC_BEGIN(2, 0);
1108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1109 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1110 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1111 IEM_MC_REF_EFLAGS(pEFlags);
1112 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1113 IEM_MC_ADVANCE_RIP();
1114 IEM_MC_END();
1115 return VINF_SUCCESS;
1116 }
1117 return VINF_SUCCESS;
1118}
1119
1120
1121/**
1122 * @opcode 0x40
1123 */
1124FNIEMOP_DEF(iemOp_inc_eAX)
1125{
1126 /*
1127 * This is a REX prefix in 64-bit mode.
1128 */
1129 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1130 {
1131 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1132 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1133
1134 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1135 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1136 }
1137
1138 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1139 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1140}
1141
1142
1143/**
1144 * @opcode 0x41
1145 */
1146FNIEMOP_DEF(iemOp_inc_eCX)
1147{
1148 /*
1149 * This is a REX prefix in 64-bit mode.
1150 */
1151 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1152 {
1153 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1154 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1155 pVCpu->iem.s.uRexB = 1 << 3;
1156
1157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1159 }
1160
1161 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1162 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1163}
1164
1165
1166/**
1167 * @opcode 0x42
1168 */
1169FNIEMOP_DEF(iemOp_inc_eDX)
1170{
1171 /*
1172 * This is a REX prefix in 64-bit mode.
1173 */
1174 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1175 {
1176 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1177 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1178 pVCpu->iem.s.uRexIndex = 1 << 3;
1179
1180 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1181 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1182 }
1183
1184 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1185 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1186}
1187
1188
1189
1190/**
1191 * @opcode 0x43
1192 */
1193FNIEMOP_DEF(iemOp_inc_eBX)
1194{
1195 /*
1196 * This is a REX prefix in 64-bit mode.
1197 */
1198 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1199 {
1200 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1201 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1202 pVCpu->iem.s.uRexB = 1 << 3;
1203 pVCpu->iem.s.uRexIndex = 1 << 3;
1204
1205 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1206 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1207 }
1208
1209 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1210 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1211}
1212
1213
1214/**
1215 * @opcode 0x44
1216 */
1217FNIEMOP_DEF(iemOp_inc_eSP)
1218{
1219 /*
1220 * This is a REX prefix in 64-bit mode.
1221 */
1222 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1223 {
1224 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1225 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1226 pVCpu->iem.s.uRexReg = 1 << 3;
1227
1228 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1229 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1230 }
1231
1232 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1233 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1234}
1235
1236
1237/**
1238 * @opcode 0x45
1239 */
1240FNIEMOP_DEF(iemOp_inc_eBP)
1241{
1242 /*
1243 * This is a REX prefix in 64-bit mode.
1244 */
1245 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1246 {
1247 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1248 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1249 pVCpu->iem.s.uRexReg = 1 << 3;
1250 pVCpu->iem.s.uRexB = 1 << 3;
1251
1252 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1253 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1254 }
1255
1256 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1257 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1258}
1259
1260
1261/**
1262 * @opcode 0x46
1263 */
1264FNIEMOP_DEF(iemOp_inc_eSI)
1265{
1266 /*
1267 * This is a REX prefix in 64-bit mode.
1268 */
1269 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1270 {
1271 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1272 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1273 pVCpu->iem.s.uRexReg = 1 << 3;
1274 pVCpu->iem.s.uRexIndex = 1 << 3;
1275
1276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1277 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1278 }
1279
1280 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1281 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1282}
1283
1284
1285/**
1286 * @opcode 0x47
1287 */
1288FNIEMOP_DEF(iemOp_inc_eDI)
1289{
1290 /*
1291 * This is a REX prefix in 64-bit mode.
1292 */
1293 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1294 {
1295 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1296 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1297 pVCpu->iem.s.uRexReg = 1 << 3;
1298 pVCpu->iem.s.uRexB = 1 << 3;
1299 pVCpu->iem.s.uRexIndex = 1 << 3;
1300
1301 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1302 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1303 }
1304
1305 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1306 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1307}
1308
1309
1310/**
1311 * @opcode 0x48
1312 */
1313FNIEMOP_DEF(iemOp_dec_eAX)
1314{
1315 /*
1316 * This is a REX prefix in 64-bit mode.
1317 */
1318 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1319 {
1320 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1321 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1322 iemRecalEffOpSize(pVCpu);
1323
1324 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1325 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1326 }
1327
1328 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1329 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1330}
1331
1332
1333/**
1334 * @opcode 0x49
1335 */
1336FNIEMOP_DEF(iemOp_dec_eCX)
1337{
1338 /*
1339 * This is a REX prefix in 64-bit mode.
1340 */
1341 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1342 {
1343 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1344 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1345 pVCpu->iem.s.uRexB = 1 << 3;
1346 iemRecalEffOpSize(pVCpu);
1347
1348 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1349 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1350 }
1351
1352 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1353 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1354}
1355
1356
1357/**
1358 * @opcode 0x4a
1359 */
1360FNIEMOP_DEF(iemOp_dec_eDX)
1361{
1362 /*
1363 * This is a REX prefix in 64-bit mode.
1364 */
1365 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1366 {
1367 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1368 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1369 pVCpu->iem.s.uRexIndex = 1 << 3;
1370 iemRecalEffOpSize(pVCpu);
1371
1372 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1373 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1374 }
1375
1376 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1377 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1378}
1379
1380
1381/**
1382 * @opcode 0x4b
1383 */
1384FNIEMOP_DEF(iemOp_dec_eBX)
1385{
1386 /*
1387 * This is a REX prefix in 64-bit mode.
1388 */
1389 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1390 {
1391 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1392 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1393 pVCpu->iem.s.uRexB = 1 << 3;
1394 pVCpu->iem.s.uRexIndex = 1 << 3;
1395 iemRecalEffOpSize(pVCpu);
1396
1397 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1398 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1399 }
1400
1401 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1402 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1403}
1404
1405
1406/**
1407 * @opcode 0x4c
1408 */
1409FNIEMOP_DEF(iemOp_dec_eSP)
1410{
1411 /*
1412 * This is a REX prefix in 64-bit mode.
1413 */
1414 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1415 {
1416 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1417 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1418 pVCpu->iem.s.uRexReg = 1 << 3;
1419 iemRecalEffOpSize(pVCpu);
1420
1421 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1422 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1423 }
1424
1425 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1426 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1427}
1428
1429
1430/**
1431 * @opcode 0x4d
1432 */
1433FNIEMOP_DEF(iemOp_dec_eBP)
1434{
1435 /*
1436 * This is a REX prefix in 64-bit mode.
1437 */
1438 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1439 {
1440 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1441 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1442 pVCpu->iem.s.uRexReg = 1 << 3;
1443 pVCpu->iem.s.uRexB = 1 << 3;
1444 iemRecalEffOpSize(pVCpu);
1445
1446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1448 }
1449
1450 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1451 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1452}
1453
1454
1455/**
1456 * @opcode 0x4e
1457 */
1458FNIEMOP_DEF(iemOp_dec_eSI)
1459{
1460 /*
1461 * This is a REX prefix in 64-bit mode.
1462 */
1463 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1464 {
1465 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1466 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1467 pVCpu->iem.s.uRexReg = 1 << 3;
1468 pVCpu->iem.s.uRexIndex = 1 << 3;
1469 iemRecalEffOpSize(pVCpu);
1470
1471 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1472 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1473 }
1474
1475 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1476 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1477}
1478
1479
1480/**
1481 * @opcode 0x4f
1482 */
1483FNIEMOP_DEF(iemOp_dec_eDI)
1484{
1485 /*
1486 * This is a REX prefix in 64-bit mode.
1487 */
1488 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1489 {
1490 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1491 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1492 pVCpu->iem.s.uRexReg = 1 << 3;
1493 pVCpu->iem.s.uRexB = 1 << 3;
1494 pVCpu->iem.s.uRexIndex = 1 << 3;
1495 iemRecalEffOpSize(pVCpu);
1496
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1499 }
1500
1501 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1502 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1503}
1504
1505
1506/**
1507 * Common 'push register' helper.
1508 */
1509FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1510{
1511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1512 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1513 {
1514 iReg |= pVCpu->iem.s.uRexB;
1515 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1516 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1517 }
1518
1519 switch (pVCpu->iem.s.enmEffOpSize)
1520 {
1521 case IEMMODE_16BIT:
1522 IEM_MC_BEGIN(0, 1);
1523 IEM_MC_LOCAL(uint16_t, u16Value);
1524 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1525 IEM_MC_PUSH_U16(u16Value);
1526 IEM_MC_ADVANCE_RIP();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_32BIT:
1531 IEM_MC_BEGIN(0, 1);
1532 IEM_MC_LOCAL(uint32_t, u32Value);
1533 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1534 IEM_MC_PUSH_U32(u32Value);
1535 IEM_MC_ADVANCE_RIP();
1536 IEM_MC_END();
1537 break;
1538
1539 case IEMMODE_64BIT:
1540 IEM_MC_BEGIN(0, 1);
1541 IEM_MC_LOCAL(uint64_t, u64Value);
1542 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1543 IEM_MC_PUSH_U64(u64Value);
1544 IEM_MC_ADVANCE_RIP();
1545 IEM_MC_END();
1546 break;
1547 }
1548
1549 return VINF_SUCCESS;
1550}
1551
1552
1553/**
1554 * @opcode 0x50
1555 */
1556FNIEMOP_DEF(iemOp_push_eAX)
1557{
1558 IEMOP_MNEMONIC(push_rAX, "push rAX");
1559 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1560}
1561
1562
1563/**
1564 * @opcode 0x51
1565 */
1566FNIEMOP_DEF(iemOp_push_eCX)
1567{
1568 IEMOP_MNEMONIC(push_rCX, "push rCX");
1569 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1570}
1571
1572
1573/**
1574 * @opcode 0x52
1575 */
1576FNIEMOP_DEF(iemOp_push_eDX)
1577{
1578 IEMOP_MNEMONIC(push_rDX, "push rDX");
1579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1580}
1581
1582
1583/**
1584 * @opcode 0x53
1585 */
1586FNIEMOP_DEF(iemOp_push_eBX)
1587{
1588 IEMOP_MNEMONIC(push_rBX, "push rBX");
1589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1590}
1591
1592
1593/**
1594 * @opcode 0x54
1595 */
1596FNIEMOP_DEF(iemOp_push_eSP)
1597{
1598 IEMOP_MNEMONIC(push_rSP, "push rSP");
1599 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1600 {
1601 IEM_MC_BEGIN(0, 1);
1602 IEM_MC_LOCAL(uint16_t, u16Value);
1603 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1604 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1605 IEM_MC_PUSH_U16(u16Value);
1606 IEM_MC_ADVANCE_RIP();
1607 IEM_MC_END();
1608 }
1609 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1610}
1611
1612
1613/**
1614 * @opcode 0x55
1615 */
1616FNIEMOP_DEF(iemOp_push_eBP)
1617{
1618 IEMOP_MNEMONIC(push_rBP, "push rBP");
1619 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1620}
1621
1622
1623/**
1624 * @opcode 0x56
1625 */
1626FNIEMOP_DEF(iemOp_push_eSI)
1627{
1628 IEMOP_MNEMONIC(push_rSI, "push rSI");
1629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1630}
1631
1632
1633/**
1634 * @opcode 0x57
1635 */
1636FNIEMOP_DEF(iemOp_push_eDI)
1637{
1638 IEMOP_MNEMONIC(push_rDI, "push rDI");
1639 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1640}
1641
1642
1643/**
1644 * Common 'pop register' helper.
1645 */
1646FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1647{
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1649 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1650 {
1651 iReg |= pVCpu->iem.s.uRexB;
1652 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1653 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1654 }
1655
1656 switch (pVCpu->iem.s.enmEffOpSize)
1657 {
1658 case IEMMODE_16BIT:
1659 IEM_MC_BEGIN(0, 1);
1660 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1661 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1662 IEM_MC_POP_U16(pu16Dst);
1663 IEM_MC_ADVANCE_RIP();
1664 IEM_MC_END();
1665 break;
1666
1667 case IEMMODE_32BIT:
1668 IEM_MC_BEGIN(0, 1);
1669 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1670 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1671 IEM_MC_POP_U32(pu32Dst);
1672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 break;
1676
1677 case IEMMODE_64BIT:
1678 IEM_MC_BEGIN(0, 1);
1679 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1680 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1681 IEM_MC_POP_U64(pu64Dst);
1682 IEM_MC_ADVANCE_RIP();
1683 IEM_MC_END();
1684 break;
1685 }
1686
1687 return VINF_SUCCESS;
1688}
1689
1690
1691/**
1692 * @opcode 0x58
1693 */
1694FNIEMOP_DEF(iemOp_pop_eAX)
1695{
1696 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1697 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1698}
1699
1700
1701/**
1702 * @opcode 0x59
1703 */
1704FNIEMOP_DEF(iemOp_pop_eCX)
1705{
1706 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1708}
1709
1710
1711/**
1712 * @opcode 0x5a
1713 */
1714FNIEMOP_DEF(iemOp_pop_eDX)
1715{
1716 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1718}
1719
1720
1721/**
1722 * @opcode 0x5b
1723 */
1724FNIEMOP_DEF(iemOp_pop_eBX)
1725{
1726 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1728}
1729
1730
1731/**
1732 * @opcode 0x5c
1733 */
1734FNIEMOP_DEF(iemOp_pop_eSP)
1735{
1736 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1737 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1738 {
1739 if (pVCpu->iem.s.uRexB)
1740 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1741 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1742 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1743 }
1744
1745 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1746 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1747 /** @todo add testcase for this instruction. */
1748 switch (pVCpu->iem.s.enmEffOpSize)
1749 {
1750 case IEMMODE_16BIT:
1751 IEM_MC_BEGIN(0, 1);
1752 IEM_MC_LOCAL(uint16_t, u16Dst);
1753 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1754 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1755 IEM_MC_ADVANCE_RIP();
1756 IEM_MC_END();
1757 break;
1758
1759 case IEMMODE_32BIT:
1760 IEM_MC_BEGIN(0, 1);
1761 IEM_MC_LOCAL(uint32_t, u32Dst);
1762 IEM_MC_POP_U32(&u32Dst);
1763 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1764 IEM_MC_ADVANCE_RIP();
1765 IEM_MC_END();
1766 break;
1767
1768 case IEMMODE_64BIT:
1769 IEM_MC_BEGIN(0, 1);
1770 IEM_MC_LOCAL(uint64_t, u64Dst);
1771 IEM_MC_POP_U64(&u64Dst);
1772 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1773 IEM_MC_ADVANCE_RIP();
1774 IEM_MC_END();
1775 break;
1776 }
1777
1778 return VINF_SUCCESS;
1779}
1780
1781
1782/**
1783 * @opcode 0x5d
1784 */
1785FNIEMOP_DEF(iemOp_pop_eBP)
1786{
1787 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1788 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1789}
1790
1791
1792/**
1793 * @opcode 0x5e
1794 */
1795FNIEMOP_DEF(iemOp_pop_eSI)
1796{
1797 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1798 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1799}
1800
1801
1802/**
1803 * @opcode 0x5f
1804 */
1805FNIEMOP_DEF(iemOp_pop_eDI)
1806{
1807 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1808 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1809}
1810
1811
1812/**
1813 * @opcode 0x60
1814 */
1815FNIEMOP_DEF(iemOp_pusha)
1816{
1817 IEMOP_MNEMONIC(pusha, "pusha");
1818 IEMOP_HLP_MIN_186();
1819 IEMOP_HLP_NO_64BIT();
1820 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1821 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1822 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1823 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1824}
1825
1826
1827/**
1828 * @opcode 0x61
1829 */
1830FNIEMOP_DEF(iemOp_popa__mvex)
1831{
1832 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1833 {
1834 IEMOP_MNEMONIC(popa, "popa");
1835 IEMOP_HLP_MIN_186();
1836 IEMOP_HLP_NO_64BIT();
1837 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1838 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1839 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1840 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1841 }
1842 IEMOP_MNEMONIC(mvex, "mvex");
1843 Log(("mvex prefix is not supported!\n"));
1844 return IEMOP_RAISE_INVALID_OPCODE();
1845}
1846
1847
1848/**
1849 * @opcode 0x62
1850 * @opmnemonic bound
1851 * @op1 Gv
1852 * @op2 Ma
1853 * @opmincpu 80186
1854 * @ophints harmless invalid_64
1855 * @optest op1=0 op2=0 ->
1856 * @optest op1=1 op2=0 -> value.xcpt=5
1857 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1858 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1859 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1860 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1861 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1862 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1863 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1864 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1865 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1866 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1867 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1868 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1869 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1870 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1871 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1872 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1873 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1874 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1875 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1876 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1877 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1878 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1879 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1880 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1881 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1882 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1883 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1884 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1885 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1886 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1887 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1888 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1889 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1890 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1891 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1892 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1893 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1894 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1895 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1896 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1897 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1898 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1899 */
1900FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1901{
1902 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1903 compatability mode it is invalid with MOD=3.
1904
1905 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1906 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1907 given as R and X without an exact description, so we assume it builds on
1908 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1909 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1910 uint8_t bRm;
1911 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1912 {
1913 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1914 IEMOP_HLP_MIN_186();
1915 IEM_OPCODE_GET_NEXT_U8(&bRm);
1916 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1917 {
1918 /** @todo testcase: check that there are two memory accesses involved. Check
1919 * whether they're both read before the \#BR triggers. */
1920 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1921 {
1922 IEM_MC_BEGIN(3, 1);
1923 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1924 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1925 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1927
1928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1930
1931 IEM_MC_FETCH_GREG_U16(u16Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1932 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1933 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1934
1935 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1936 IEM_MC_END();
1937 }
1938 else /* 32-bit operands */
1939 {
1940 IEM_MC_BEGIN(3, 1);
1941 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1942 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1943 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1945
1946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1948
1949 IEM_MC_FETCH_GREG_U32(u32Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1950 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1951 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1952
1953 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1954 IEM_MC_END();
1955 }
1956 }
1957
1958 /*
1959 * @opdone
1960 */
1961 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1962 {
1963 /* Note that there is no need for the CPU to fetch further bytes
1964 here because MODRM.MOD == 3. */
1965 Log(("evex not supported by the guest CPU!\n"));
1966 return IEMOP_RAISE_INVALID_OPCODE();
1967 }
1968 }
1969 else
1970 {
1971 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1972 * does modr/m read, whereas AMD probably doesn't... */
1973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1974 {
1975 Log(("evex not supported by the guest CPU!\n"));
1976 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1977 }
1978 IEM_OPCODE_GET_NEXT_U8(&bRm);
1979 }
1980
1981 IEMOP_MNEMONIC(evex, "evex");
1982 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1983 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1984 Log(("evex prefix is not implemented!\n"));
1985 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1986}
1987
1988
1989/** Opcode 0x63 - non-64-bit modes. */
1990FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1991{
1992 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1993 IEMOP_HLP_MIN_286();
1994 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996
1997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1998 {
1999 /* Register */
2000 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2001 IEM_MC_BEGIN(3, 0);
2002 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2003 IEM_MC_ARG(uint16_t, u16Src, 1);
2004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2005
2006 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2007 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
2008 IEM_MC_REF_EFLAGS(pEFlags);
2009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 else
2015 {
2016 /* Memory */
2017 IEM_MC_BEGIN(3, 2);
2018 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2019 IEM_MC_ARG(uint16_t, u16Src, 1);
2020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2024 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2025 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2026 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2027 IEM_MC_FETCH_EFLAGS(EFlags);
2028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2029
2030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2031 IEM_MC_COMMIT_EFLAGS(EFlags);
2032 IEM_MC_ADVANCE_RIP();
2033 IEM_MC_END();
2034 }
2035 return VINF_SUCCESS;
2036
2037}
2038
2039
2040/**
2041 * @opcode 0x63
2042 *
2043 * @note This is a weird one. It works like a regular move instruction if
2044 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2045 * @todo This definitely needs a testcase to verify the odd cases. */
2046FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2047{
2048 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2049
2050 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052
2053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2054 {
2055 /*
2056 * Register to register.
2057 */
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEM_MC_BEGIN(0, 1);
2060 IEM_MC_LOCAL(uint64_t, u64Value);
2061 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2062 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2063 IEM_MC_ADVANCE_RIP();
2064 IEM_MC_END();
2065 }
2066 else
2067 {
2068 /*
2069 * We're loading a register from memory.
2070 */
2071 IEM_MC_BEGIN(0, 2);
2072 IEM_MC_LOCAL(uint64_t, u64Value);
2073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2076 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2077 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2078 IEM_MC_ADVANCE_RIP();
2079 IEM_MC_END();
2080 }
2081 return VINF_SUCCESS;
2082}
2083
2084
2085/**
2086 * @opcode 0x64
2087 * @opmnemonic segfs
2088 * @opmincpu 80386
2089 * @opgroup og_prefixes
2090 */
2091FNIEMOP_DEF(iemOp_seg_FS)
2092{
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2094 IEMOP_HLP_MIN_386();
2095
2096 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2097 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2098
2099 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2100 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2101}
2102
2103
2104/**
2105 * @opcode 0x65
2106 * @opmnemonic seggs
2107 * @opmincpu 80386
2108 * @opgroup og_prefixes
2109 */
2110FNIEMOP_DEF(iemOp_seg_GS)
2111{
2112 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2113 IEMOP_HLP_MIN_386();
2114
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2116 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2117
2118 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2119 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2120}
2121
2122
2123/**
2124 * @opcode 0x66
2125 * @opmnemonic opsize
2126 * @openc prefix
2127 * @opmincpu 80386
2128 * @ophints harmless
2129 * @opgroup og_prefixes
2130 */
2131FNIEMOP_DEF(iemOp_op_size)
2132{
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2134 IEMOP_HLP_MIN_386();
2135
2136 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2140 when REPZ or REPNZ are present. */
2141 if (pVCpu->iem.s.idxPrefix == 0)
2142 pVCpu->iem.s.idxPrefix = 1;
2143
2144 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2145 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2146}
2147
2148
2149/**
2150 * @opcode 0x67
2151 * @opmnemonic addrsize
2152 * @openc prefix
2153 * @opmincpu 80386
2154 * @ophints harmless
2155 * @opgroup og_prefixes
2156 */
2157FNIEMOP_DEF(iemOp_addr_size)
2158{
2159 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2160 IEMOP_HLP_MIN_386();
2161
2162 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2163 switch (pVCpu->iem.s.enmDefAddrMode)
2164 {
2165 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2166 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2167 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2168 default: AssertFailed();
2169 }
2170
2171 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2172 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2173}
2174
2175
2176/**
2177 * @opcode 0x68
2178 */
2179FNIEMOP_DEF(iemOp_push_Iz)
2180{
2181 IEMOP_MNEMONIC(push_Iz, "push Iz");
2182 IEMOP_HLP_MIN_186();
2183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2184 switch (pVCpu->iem.s.enmEffOpSize)
2185 {
2186 case IEMMODE_16BIT:
2187 {
2188 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2190 IEM_MC_BEGIN(0,0);
2191 IEM_MC_PUSH_U16(u16Imm);
2192 IEM_MC_ADVANCE_RIP();
2193 IEM_MC_END();
2194 return VINF_SUCCESS;
2195 }
2196
2197 case IEMMODE_32BIT:
2198 {
2199 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2201 IEM_MC_BEGIN(0,0);
2202 IEM_MC_PUSH_U32(u32Imm);
2203 IEM_MC_ADVANCE_RIP();
2204 IEM_MC_END();
2205 return VINF_SUCCESS;
2206 }
2207
2208 case IEMMODE_64BIT:
2209 {
2210 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2212 IEM_MC_BEGIN(0,0);
2213 IEM_MC_PUSH_U64(u64Imm);
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 return VINF_SUCCESS;
2217 }
2218
2219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2220 }
2221}
2222
2223
2224/**
2225 * @opcode 0x69
2226 */
2227FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2228{
2229 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2230 IEMOP_HLP_MIN_186();
2231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2233
2234 switch (pVCpu->iem.s.enmEffOpSize)
2235 {
2236 case IEMMODE_16BIT:
2237 {
2238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2239 {
2240 /* register operand */
2241 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2243
2244 IEM_MC_BEGIN(3, 1);
2245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2246 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2248 IEM_MC_LOCAL(uint16_t, u16Tmp);
2249
2250 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2251 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2252 IEM_MC_REF_EFLAGS(pEFlags);
2253 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2254 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2255
2256 IEM_MC_ADVANCE_RIP();
2257 IEM_MC_END();
2258 }
2259 else
2260 {
2261 /* memory operand */
2262 IEM_MC_BEGIN(3, 2);
2263 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2264 IEM_MC_ARG(uint16_t, u16Src, 1);
2265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2266 IEM_MC_LOCAL(uint16_t, u16Tmp);
2267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2270 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2271 IEM_MC_ASSIGN(u16Src, u16Imm);
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2274 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2275 IEM_MC_REF_EFLAGS(pEFlags);
2276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2277 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2278
2279 IEM_MC_ADVANCE_RIP();
2280 IEM_MC_END();
2281 }
2282 return VINF_SUCCESS;
2283 }
2284
2285 case IEMMODE_32BIT:
2286 {
2287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2288 {
2289 /* register operand */
2290 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2292
2293 IEM_MC_BEGIN(3, 1);
2294 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2295 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2296 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2297 IEM_MC_LOCAL(uint32_t, u32Tmp);
2298
2299 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2300 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2301 IEM_MC_REF_EFLAGS(pEFlags);
2302 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2303 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2304
2305 IEM_MC_ADVANCE_RIP();
2306 IEM_MC_END();
2307 }
2308 else
2309 {
2310 /* memory operand */
2311 IEM_MC_BEGIN(3, 2);
2312 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2313 IEM_MC_ARG(uint32_t, u32Src, 1);
2314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2315 IEM_MC_LOCAL(uint32_t, u32Tmp);
2316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2317
2318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2319 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2320 IEM_MC_ASSIGN(u32Src, u32Imm);
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2322 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2323 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2324 IEM_MC_REF_EFLAGS(pEFlags);
2325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2326 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2327
2328 IEM_MC_ADVANCE_RIP();
2329 IEM_MC_END();
2330 }
2331 return VINF_SUCCESS;
2332 }
2333
2334 case IEMMODE_64BIT:
2335 {
2336 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2337 {
2338 /* register operand */
2339 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2341
2342 IEM_MC_BEGIN(3, 1);
2343 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2344 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2345 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2346 IEM_MC_LOCAL(uint64_t, u64Tmp);
2347
2348 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2349 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2350 IEM_MC_REF_EFLAGS(pEFlags);
2351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2352 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2353
2354 IEM_MC_ADVANCE_RIP();
2355 IEM_MC_END();
2356 }
2357 else
2358 {
2359 /* memory operand */
2360 IEM_MC_BEGIN(3, 2);
2361 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2362 IEM_MC_ARG(uint64_t, u64Src, 1);
2363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2364 IEM_MC_LOCAL(uint64_t, u64Tmp);
2365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2366
2367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2368 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2369 IEM_MC_ASSIGN(u64Src, u64Imm);
2370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2371 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2372 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2373 IEM_MC_REF_EFLAGS(pEFlags);
2374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2375 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2376
2377 IEM_MC_ADVANCE_RIP();
2378 IEM_MC_END();
2379 }
2380 return VINF_SUCCESS;
2381 }
2382 }
2383 AssertFailedReturn(VERR_IEM_IPE_9);
2384}
2385
2386
2387/**
2388 * @opcode 0x6a
2389 */
2390FNIEMOP_DEF(iemOp_push_Ib)
2391{
2392 IEMOP_MNEMONIC(push_Ib, "push Ib");
2393 IEMOP_HLP_MIN_186();
2394 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2397
2398 IEM_MC_BEGIN(0,0);
2399 switch (pVCpu->iem.s.enmEffOpSize)
2400 {
2401 case IEMMODE_16BIT:
2402 IEM_MC_PUSH_U16(i8Imm);
2403 break;
2404 case IEMMODE_32BIT:
2405 IEM_MC_PUSH_U32(i8Imm);
2406 break;
2407 case IEMMODE_64BIT:
2408 IEM_MC_PUSH_U64(i8Imm);
2409 break;
2410 }
2411 IEM_MC_ADVANCE_RIP();
2412 IEM_MC_END();
2413 return VINF_SUCCESS;
2414}
2415
2416
2417/**
2418 * @opcode 0x6b
2419 */
2420FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2421{
2422 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2423 IEMOP_HLP_MIN_186();
2424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2425 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2426
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2431 {
2432 /* register operand */
2433 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2435
2436 IEM_MC_BEGIN(3, 1);
2437 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2438 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2439 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2440 IEM_MC_LOCAL(uint16_t, u16Tmp);
2441
2442 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2443 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2444 IEM_MC_REF_EFLAGS(pEFlags);
2445 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2446 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2447
2448 IEM_MC_ADVANCE_RIP();
2449 IEM_MC_END();
2450 }
2451 else
2452 {
2453 /* memory operand */
2454 IEM_MC_BEGIN(3, 2);
2455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2456 IEM_MC_ARG(uint16_t, u16Src, 1);
2457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2458 IEM_MC_LOCAL(uint16_t, u16Tmp);
2459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2460
2461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2462 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2463 IEM_MC_ASSIGN(u16Src, u16Imm);
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2465 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2466 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2467 IEM_MC_REF_EFLAGS(pEFlags);
2468 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2469 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2470
2471 IEM_MC_ADVANCE_RIP();
2472 IEM_MC_END();
2473 }
2474 return VINF_SUCCESS;
2475
2476 case IEMMODE_32BIT:
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /* register operand */
2480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482
2483 IEM_MC_BEGIN(3, 1);
2484 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2485 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2486 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2487 IEM_MC_LOCAL(uint32_t, u32Tmp);
2488
2489 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2490 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2491 IEM_MC_REF_EFLAGS(pEFlags);
2492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2493 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2494
2495 IEM_MC_ADVANCE_RIP();
2496 IEM_MC_END();
2497 }
2498 else
2499 {
2500 /* memory operand */
2501 IEM_MC_BEGIN(3, 2);
2502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2503 IEM_MC_ARG(uint32_t, u32Src, 1);
2504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2505 IEM_MC_LOCAL(uint32_t, u32Tmp);
2506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2507
2508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2509 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2510 IEM_MC_ASSIGN(u32Src, u32Imm);
2511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2512 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2513 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2514 IEM_MC_REF_EFLAGS(pEFlags);
2515 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2516 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2517
2518 IEM_MC_ADVANCE_RIP();
2519 IEM_MC_END();
2520 }
2521 return VINF_SUCCESS;
2522
2523 case IEMMODE_64BIT:
2524 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2525 {
2526 /* register operand */
2527 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2529
2530 IEM_MC_BEGIN(3, 1);
2531 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2532 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2534 IEM_MC_LOCAL(uint64_t, u64Tmp);
2535
2536 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2537 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2538 IEM_MC_REF_EFLAGS(pEFlags);
2539 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2540 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2541
2542 IEM_MC_ADVANCE_RIP();
2543 IEM_MC_END();
2544 }
2545 else
2546 {
2547 /* memory operand */
2548 IEM_MC_BEGIN(3, 2);
2549 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2550 IEM_MC_ARG(uint64_t, u64Src, 1);
2551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2552 IEM_MC_LOCAL(uint64_t, u64Tmp);
2553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2554
2555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2556 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2557 IEM_MC_ASSIGN(u64Src, u64Imm);
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2559 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2560 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2561 IEM_MC_REF_EFLAGS(pEFlags);
2562 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2563 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2564
2565 IEM_MC_ADVANCE_RIP();
2566 IEM_MC_END();
2567 }
2568 return VINF_SUCCESS;
2569 }
2570 AssertFailedReturn(VERR_IEM_IPE_8);
2571}
2572
2573
2574/**
2575 * @opcode 0x6c
2576 */
2577FNIEMOP_DEF(iemOp_insb_Yb_DX)
2578{
2579 IEMOP_HLP_MIN_186();
2580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2581 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2582 {
2583 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2584 switch (pVCpu->iem.s.enmEffAddrMode)
2585 {
2586 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2587 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2588 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2590 }
2591 }
2592 else
2593 {
2594 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2595 switch (pVCpu->iem.s.enmEffAddrMode)
2596 {
2597 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2598 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2599 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2601 }
2602 }
2603}
2604
2605
2606/**
2607 * @opcode 0x6d
2608 */
2609FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2610{
2611 IEMOP_HLP_MIN_186();
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2614 {
2615 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2616 switch (pVCpu->iem.s.enmEffOpSize)
2617 {
2618 case IEMMODE_16BIT:
2619 switch (pVCpu->iem.s.enmEffAddrMode)
2620 {
2621 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2622 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2623 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2625 }
2626 break;
2627 case IEMMODE_64BIT:
2628 case IEMMODE_32BIT:
2629 switch (pVCpu->iem.s.enmEffAddrMode)
2630 {
2631 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2632 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2633 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2635 }
2636 break;
2637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2638 }
2639 }
2640 else
2641 {
2642 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2643 switch (pVCpu->iem.s.enmEffOpSize)
2644 {
2645 case IEMMODE_16BIT:
2646 switch (pVCpu->iem.s.enmEffAddrMode)
2647 {
2648 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2649 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2650 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2652 }
2653 break;
2654 case IEMMODE_64BIT:
2655 case IEMMODE_32BIT:
2656 switch (pVCpu->iem.s.enmEffAddrMode)
2657 {
2658 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2659 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2660 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2662 }
2663 break;
2664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2665 }
2666 }
2667}
2668
2669
2670/**
2671 * @opcode 0x6e
2672 */
2673FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2674{
2675 IEMOP_HLP_MIN_186();
2676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2677 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2678 {
2679 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2680 switch (pVCpu->iem.s.enmEffAddrMode)
2681 {
2682 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2683 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2684 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2686 }
2687 }
2688 else
2689 {
2690 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2691 switch (pVCpu->iem.s.enmEffAddrMode)
2692 {
2693 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2694 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2695 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2697 }
2698 }
2699}
2700
2701
2702/**
2703 * @opcode 0x6f
2704 */
2705FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2706{
2707 IEMOP_HLP_MIN_186();
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2710 {
2711 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2712 switch (pVCpu->iem.s.enmEffOpSize)
2713 {
2714 case IEMMODE_16BIT:
2715 switch (pVCpu->iem.s.enmEffAddrMode)
2716 {
2717 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2718 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2719 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2721 }
2722 break;
2723 case IEMMODE_64BIT:
2724 case IEMMODE_32BIT:
2725 switch (pVCpu->iem.s.enmEffAddrMode)
2726 {
2727 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2728 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2729 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2731 }
2732 break;
2733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2734 }
2735 }
2736 else
2737 {
2738 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2739 switch (pVCpu->iem.s.enmEffOpSize)
2740 {
2741 case IEMMODE_16BIT:
2742 switch (pVCpu->iem.s.enmEffAddrMode)
2743 {
2744 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2748 }
2749 break;
2750 case IEMMODE_64BIT:
2751 case IEMMODE_32BIT:
2752 switch (pVCpu->iem.s.enmEffAddrMode)
2753 {
2754 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2755 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2756 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2758 }
2759 break;
2760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2761 }
2762 }
2763}
2764
2765
2766/**
2767 * @opcode 0x70
2768 */
2769FNIEMOP_DEF(iemOp_jo_Jb)
2770{
2771 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2772 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2774 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2775
2776 IEM_MC_BEGIN(0, 0);
2777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2778 IEM_MC_REL_JMP_S8(i8Imm);
2779 } IEM_MC_ELSE() {
2780 IEM_MC_ADVANCE_RIP();
2781 } IEM_MC_ENDIF();
2782 IEM_MC_END();
2783 return VINF_SUCCESS;
2784}
2785
2786
2787/**
2788 * @opcode 0x71
2789 */
2790FNIEMOP_DEF(iemOp_jno_Jb)
2791{
2792 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2793 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2795 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2796
2797 IEM_MC_BEGIN(0, 0);
2798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2799 IEM_MC_ADVANCE_RIP();
2800 } IEM_MC_ELSE() {
2801 IEM_MC_REL_JMP_S8(i8Imm);
2802 } IEM_MC_ENDIF();
2803 IEM_MC_END();
2804 return VINF_SUCCESS;
2805}
2806
2807/**
2808 * @opcode 0x72
2809 */
2810FNIEMOP_DEF(iemOp_jc_Jb)
2811{
2812 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2813 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2816
2817 IEM_MC_BEGIN(0, 0);
2818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2819 IEM_MC_REL_JMP_S8(i8Imm);
2820 } IEM_MC_ELSE() {
2821 IEM_MC_ADVANCE_RIP();
2822 } IEM_MC_ENDIF();
2823 IEM_MC_END();
2824 return VINF_SUCCESS;
2825}
2826
2827
2828/**
2829 * @opcode 0x73
2830 */
2831FNIEMOP_DEF(iemOp_jnc_Jb)
2832{
2833 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2834 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2837
2838 IEM_MC_BEGIN(0, 0);
2839 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2840 IEM_MC_ADVANCE_RIP();
2841 } IEM_MC_ELSE() {
2842 IEM_MC_REL_JMP_S8(i8Imm);
2843 } IEM_MC_ENDIF();
2844 IEM_MC_END();
2845 return VINF_SUCCESS;
2846}
2847
2848
2849/**
2850 * @opcode 0x74
2851 */
2852FNIEMOP_DEF(iemOp_je_Jb)
2853{
2854 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2855 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2857 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2858
2859 IEM_MC_BEGIN(0, 0);
2860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2861 IEM_MC_REL_JMP_S8(i8Imm);
2862 } IEM_MC_ELSE() {
2863 IEM_MC_ADVANCE_RIP();
2864 } IEM_MC_ENDIF();
2865 IEM_MC_END();
2866 return VINF_SUCCESS;
2867}
2868
2869
2870/**
2871 * @opcode 0x75
2872 */
2873FNIEMOP_DEF(iemOp_jne_Jb)
2874{
2875 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2876 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2879
2880 IEM_MC_BEGIN(0, 0);
2881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2882 IEM_MC_ADVANCE_RIP();
2883 } IEM_MC_ELSE() {
2884 IEM_MC_REL_JMP_S8(i8Imm);
2885 } IEM_MC_ENDIF();
2886 IEM_MC_END();
2887 return VINF_SUCCESS;
2888}
2889
2890
2891/**
2892 * @opcode 0x76
2893 */
2894FNIEMOP_DEF(iemOp_jbe_Jb)
2895{
2896 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2897 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2900
2901 IEM_MC_BEGIN(0, 0);
2902 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2903 IEM_MC_REL_JMP_S8(i8Imm);
2904 } IEM_MC_ELSE() {
2905 IEM_MC_ADVANCE_RIP();
2906 } IEM_MC_ENDIF();
2907 IEM_MC_END();
2908 return VINF_SUCCESS;
2909}
2910
2911
2912/**
2913 * @opcode 0x77
2914 */
2915FNIEMOP_DEF(iemOp_jnbe_Jb)
2916{
2917 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2918 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2921
2922 IEM_MC_BEGIN(0, 0);
2923 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2924 IEM_MC_ADVANCE_RIP();
2925 } IEM_MC_ELSE() {
2926 IEM_MC_REL_JMP_S8(i8Imm);
2927 } IEM_MC_ENDIF();
2928 IEM_MC_END();
2929 return VINF_SUCCESS;
2930}
2931
2932
2933/**
2934 * @opcode 0x78
2935 */
2936FNIEMOP_DEF(iemOp_js_Jb)
2937{
2938 IEMOP_MNEMONIC(js_Jb, "js Jb");
2939 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2942
2943 IEM_MC_BEGIN(0, 0);
2944 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2945 IEM_MC_REL_JMP_S8(i8Imm);
2946 } IEM_MC_ELSE() {
2947 IEM_MC_ADVANCE_RIP();
2948 } IEM_MC_ENDIF();
2949 IEM_MC_END();
2950 return VINF_SUCCESS;
2951}
2952
2953
2954/**
2955 * @opcode 0x79
2956 */
2957FNIEMOP_DEF(iemOp_jns_Jb)
2958{
2959 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2960 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2963
2964 IEM_MC_BEGIN(0, 0);
2965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2966 IEM_MC_ADVANCE_RIP();
2967 } IEM_MC_ELSE() {
2968 IEM_MC_REL_JMP_S8(i8Imm);
2969 } IEM_MC_ENDIF();
2970 IEM_MC_END();
2971 return VINF_SUCCESS;
2972}
2973
2974
2975/**
2976 * @opcode 0x7a
2977 */
2978FNIEMOP_DEF(iemOp_jp_Jb)
2979{
2980 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2981 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2983 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2984
2985 IEM_MC_BEGIN(0, 0);
2986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2987 IEM_MC_REL_JMP_S8(i8Imm);
2988 } IEM_MC_ELSE() {
2989 IEM_MC_ADVANCE_RIP();
2990 } IEM_MC_ENDIF();
2991 IEM_MC_END();
2992 return VINF_SUCCESS;
2993}
2994
2995
2996/**
2997 * @opcode 0x7b
2998 */
2999FNIEMOP_DEF(iemOp_jnp_Jb)
3000{
3001 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3002 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3004 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3005
3006 IEM_MC_BEGIN(0, 0);
3007 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3008 IEM_MC_ADVANCE_RIP();
3009 } IEM_MC_ELSE() {
3010 IEM_MC_REL_JMP_S8(i8Imm);
3011 } IEM_MC_ENDIF();
3012 IEM_MC_END();
3013 return VINF_SUCCESS;
3014}
3015
3016
3017/**
3018 * @opcode 0x7c
3019 */
3020FNIEMOP_DEF(iemOp_jl_Jb)
3021{
3022 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3023 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3025 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3026
3027 IEM_MC_BEGIN(0, 0);
3028 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3029 IEM_MC_REL_JMP_S8(i8Imm);
3030 } IEM_MC_ELSE() {
3031 IEM_MC_ADVANCE_RIP();
3032 } IEM_MC_ENDIF();
3033 IEM_MC_END();
3034 return VINF_SUCCESS;
3035}
3036
3037
3038/**
3039 * @opcode 0x7d
3040 */
3041FNIEMOP_DEF(iemOp_jnl_Jb)
3042{
3043 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3047
3048 IEM_MC_BEGIN(0, 0);
3049 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3050 IEM_MC_ADVANCE_RIP();
3051 } IEM_MC_ELSE() {
3052 IEM_MC_REL_JMP_S8(i8Imm);
3053 } IEM_MC_ENDIF();
3054 IEM_MC_END();
3055 return VINF_SUCCESS;
3056}
3057
3058
3059/**
3060 * @opcode 0x7e
3061 */
3062FNIEMOP_DEF(iemOp_jle_Jb)
3063{
3064 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3065 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3068
3069 IEM_MC_BEGIN(0, 0);
3070 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3071 IEM_MC_REL_JMP_S8(i8Imm);
3072 } IEM_MC_ELSE() {
3073 IEM_MC_ADVANCE_RIP();
3074 } IEM_MC_ENDIF();
3075 IEM_MC_END();
3076 return VINF_SUCCESS;
3077}
3078
3079
3080/**
3081 * @opcode 0x7f
3082 */
3083FNIEMOP_DEF(iemOp_jnle_Jb)
3084{
3085 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3086 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3089
3090 IEM_MC_BEGIN(0, 0);
3091 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3092 IEM_MC_ADVANCE_RIP();
3093 } IEM_MC_ELSE() {
3094 IEM_MC_REL_JMP_S8(i8Imm);
3095 } IEM_MC_ENDIF();
3096 IEM_MC_END();
3097 return VINF_SUCCESS;
3098}
3099
3100
3101/**
3102 * @opcode 0x80
3103 */
3104FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3105{
3106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3107 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3108 {
3109 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3110 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3111 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3112 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3113 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3114 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3115 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3116 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3117 }
3118 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3119
3120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3121 {
3122 /* register target */
3123 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3125 IEM_MC_BEGIN(3, 0);
3126 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3127 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3129
3130 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3131 IEM_MC_REF_EFLAGS(pEFlags);
3132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3133
3134 IEM_MC_ADVANCE_RIP();
3135 IEM_MC_END();
3136 }
3137 else
3138 {
3139 /* memory target */
3140 uint32_t fAccess;
3141 if (pImpl->pfnLockedU8)
3142 fAccess = IEM_ACCESS_DATA_RW;
3143 else /* CMP */
3144 fAccess = IEM_ACCESS_DATA_R;
3145 IEM_MC_BEGIN(3, 2);
3146 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3149
3150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3152 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3153 if (pImpl->pfnLockedU8)
3154 IEMOP_HLP_DONE_DECODING();
3155 else
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157
3158 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3159 IEM_MC_FETCH_EFLAGS(EFlags);
3160 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3161 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3162 else
3163 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3164
3165 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3166 IEM_MC_COMMIT_EFLAGS(EFlags);
3167 IEM_MC_ADVANCE_RIP();
3168 IEM_MC_END();
3169 }
3170 return VINF_SUCCESS;
3171}
3172
3173
3174/**
3175 * @opcode 0x81
3176 */
3177FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3178{
3179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3180 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3181 {
3182 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3183 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3184 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3185 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3186 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3187 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3188 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3189 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3190 }
3191 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3192
3193 switch (pVCpu->iem.s.enmEffOpSize)
3194 {
3195 case IEMMODE_16BIT:
3196 {
3197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3198 {
3199 /* register target */
3200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3202 IEM_MC_BEGIN(3, 0);
3203 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3204 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3206
3207 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3208 IEM_MC_REF_EFLAGS(pEFlags);
3209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3210
3211 IEM_MC_ADVANCE_RIP();
3212 IEM_MC_END();
3213 }
3214 else
3215 {
3216 /* memory target */
3217 uint32_t fAccess;
3218 if (pImpl->pfnLockedU16)
3219 fAccess = IEM_ACCESS_DATA_RW;
3220 else /* CMP, TEST */
3221 fAccess = IEM_ACCESS_DATA_R;
3222 IEM_MC_BEGIN(3, 2);
3223 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3224 IEM_MC_ARG(uint16_t, u16Src, 1);
3225 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3227
3228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3229 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3230 IEM_MC_ASSIGN(u16Src, u16Imm);
3231 if (pImpl->pfnLockedU16)
3232 IEMOP_HLP_DONE_DECODING();
3233 else
3234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3235 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3236 IEM_MC_FETCH_EFLAGS(EFlags);
3237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3239 else
3240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3241
3242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3243 IEM_MC_COMMIT_EFLAGS(EFlags);
3244 IEM_MC_ADVANCE_RIP();
3245 IEM_MC_END();
3246 }
3247 break;
3248 }
3249
3250 case IEMMODE_32BIT:
3251 {
3252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3253 {
3254 /* register target */
3255 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257 IEM_MC_BEGIN(3, 0);
3258 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3259 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3261
3262 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3263 IEM_MC_REF_EFLAGS(pEFlags);
3264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3265 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3266
3267 IEM_MC_ADVANCE_RIP();
3268 IEM_MC_END();
3269 }
3270 else
3271 {
3272 /* memory target */
3273 uint32_t fAccess;
3274 if (pImpl->pfnLockedU32)
3275 fAccess = IEM_ACCESS_DATA_RW;
3276 else /* CMP, TEST */
3277 fAccess = IEM_ACCESS_DATA_R;
3278 IEM_MC_BEGIN(3, 2);
3279 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3280 IEM_MC_ARG(uint32_t, u32Src, 1);
3281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3283
3284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3285 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3286 IEM_MC_ASSIGN(u32Src, u32Imm);
3287 if (pImpl->pfnLockedU32)
3288 IEMOP_HLP_DONE_DECODING();
3289 else
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3292 IEM_MC_FETCH_EFLAGS(EFlags);
3293 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3295 else
3296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3297
3298 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3299 IEM_MC_COMMIT_EFLAGS(EFlags);
3300 IEM_MC_ADVANCE_RIP();
3301 IEM_MC_END();
3302 }
3303 break;
3304 }
3305
3306 case IEMMODE_64BIT:
3307 {
3308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3309 {
3310 /* register target */
3311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313 IEM_MC_BEGIN(3, 0);
3314 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3315 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3316 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3317
3318 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3319 IEM_MC_REF_EFLAGS(pEFlags);
3320 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3321
3322 IEM_MC_ADVANCE_RIP();
3323 IEM_MC_END();
3324 }
3325 else
3326 {
3327 /* memory target */
3328 uint32_t fAccess;
3329 if (pImpl->pfnLockedU64)
3330 fAccess = IEM_ACCESS_DATA_RW;
3331 else /* CMP */
3332 fAccess = IEM_ACCESS_DATA_R;
3333 IEM_MC_BEGIN(3, 2);
3334 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3335 IEM_MC_ARG(uint64_t, u64Src, 1);
3336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3338
3339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3340 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3341 if (pImpl->pfnLockedU64)
3342 IEMOP_HLP_DONE_DECODING();
3343 else
3344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3345 IEM_MC_ASSIGN(u64Src, u64Imm);
3346 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3347 IEM_MC_FETCH_EFLAGS(EFlags);
3348 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3349 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3350 else
3351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3352
3353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3354 IEM_MC_COMMIT_EFLAGS(EFlags);
3355 IEM_MC_ADVANCE_RIP();
3356 IEM_MC_END();
3357 }
3358 break;
3359 }
3360 }
3361 return VINF_SUCCESS;
3362}
3363
3364
3365/**
3366 * @opcode 0x82
3367 * @opmnemonic grp1_82
3368 * @opgroup og_groups
3369 */
3370FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3371{
3372 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3373 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3374}
3375
3376
3377/**
3378 * @opcode 0x83
3379 */
3380FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3381{
3382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3383 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3384 {
3385 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3386 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3387 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3388 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3389 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3390 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3391 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3392 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3393 }
3394 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3395 to the 386 even if absent in the intel reference manuals and some
3396 3rd party opcode listings. */
3397 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3398
3399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3400 {
3401 /*
3402 * Register target
3403 */
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3406 switch (pVCpu->iem.s.enmEffOpSize)
3407 {
3408 case IEMMODE_16BIT:
3409 {
3410 IEM_MC_BEGIN(3, 0);
3411 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3412 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3413 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3414
3415 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3416 IEM_MC_REF_EFLAGS(pEFlags);
3417 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3418
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 break;
3422 }
3423
3424 case IEMMODE_32BIT:
3425 {
3426 IEM_MC_BEGIN(3, 0);
3427 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3428 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3429 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3430
3431 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3432 IEM_MC_REF_EFLAGS(pEFlags);
3433 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3434 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3435
3436 IEM_MC_ADVANCE_RIP();
3437 IEM_MC_END();
3438 break;
3439 }
3440
3441 case IEMMODE_64BIT:
3442 {
3443 IEM_MC_BEGIN(3, 0);
3444 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3445 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3446 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3447
3448 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3449 IEM_MC_REF_EFLAGS(pEFlags);
3450 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3451
3452 IEM_MC_ADVANCE_RIP();
3453 IEM_MC_END();
3454 break;
3455 }
3456 }
3457 }
3458 else
3459 {
3460 /*
3461 * Memory target.
3462 */
3463 uint32_t fAccess;
3464 if (pImpl->pfnLockedU16)
3465 fAccess = IEM_ACCESS_DATA_RW;
3466 else /* CMP */
3467 fAccess = IEM_ACCESS_DATA_R;
3468
3469 switch (pVCpu->iem.s.enmEffOpSize)
3470 {
3471 case IEMMODE_16BIT:
3472 {
3473 IEM_MC_BEGIN(3, 2);
3474 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3475 IEM_MC_ARG(uint16_t, u16Src, 1);
3476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3478
3479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3481 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3482 if (pImpl->pfnLockedU16)
3483 IEMOP_HLP_DONE_DECODING();
3484 else
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3487 IEM_MC_FETCH_EFLAGS(EFlags);
3488 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3490 else
3491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3492
3493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3494 IEM_MC_COMMIT_EFLAGS(EFlags);
3495 IEM_MC_ADVANCE_RIP();
3496 IEM_MC_END();
3497 break;
3498 }
3499
3500 case IEMMODE_32BIT:
3501 {
3502 IEM_MC_BEGIN(3, 2);
3503 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3504 IEM_MC_ARG(uint32_t, u32Src, 1);
3505 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3509 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3510 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3511 if (pImpl->pfnLockedU32)
3512 IEMOP_HLP_DONE_DECODING();
3513 else
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3516 IEM_MC_FETCH_EFLAGS(EFlags);
3517 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3519 else
3520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3521
3522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3523 IEM_MC_COMMIT_EFLAGS(EFlags);
3524 IEM_MC_ADVANCE_RIP();
3525 IEM_MC_END();
3526 break;
3527 }
3528
3529 case IEMMODE_64BIT:
3530 {
3531 IEM_MC_BEGIN(3, 2);
3532 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3533 IEM_MC_ARG(uint64_t, u64Src, 1);
3534 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3536
3537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3538 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3539 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3540 if (pImpl->pfnLockedU64)
3541 IEMOP_HLP_DONE_DECODING();
3542 else
3543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3544 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3545 IEM_MC_FETCH_EFLAGS(EFlags);
3546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3548 else
3549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3550
3551 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3552 IEM_MC_COMMIT_EFLAGS(EFlags);
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 break;
3556 }
3557 }
3558 }
3559 return VINF_SUCCESS;
3560}
3561
3562
3563/**
3564 * @opcode 0x84
3565 */
3566FNIEMOP_DEF(iemOp_test_Eb_Gb)
3567{
3568 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3569 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3570 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3571}
3572
3573
3574/**
3575 * @opcode 0x85
3576 */
3577FNIEMOP_DEF(iemOp_test_Ev_Gv)
3578{
3579 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3582}
3583
3584
3585/**
3586 * @opcode 0x86
3587 */
3588FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3589{
3590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3591 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3592
3593 /*
3594 * If rm is denoting a register, no more instruction bytes.
3595 */
3596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3597 {
3598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3599
3600 IEM_MC_BEGIN(0, 2);
3601 IEM_MC_LOCAL(uint8_t, uTmp1);
3602 IEM_MC_LOCAL(uint8_t, uTmp2);
3603
3604 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3605 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3606 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3607 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3608
3609 IEM_MC_ADVANCE_RIP();
3610 IEM_MC_END();
3611 }
3612 else
3613 {
3614 /*
3615 * We're accessing memory.
3616 */
3617/** @todo the register must be committed separately! */
3618 IEM_MC_BEGIN(2, 2);
3619 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3620 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3622
3623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3624 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3625 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3626 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3628
3629 IEM_MC_ADVANCE_RIP();
3630 IEM_MC_END();
3631 }
3632 return VINF_SUCCESS;
3633}
3634
3635
3636/**
3637 * @opcode 0x87
3638 */
3639FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3640{
3641 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3643
3644 /*
3645 * If rm is denoting a register, no more instruction bytes.
3646 */
3647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3648 {
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3650
3651 switch (pVCpu->iem.s.enmEffOpSize)
3652 {
3653 case IEMMODE_16BIT:
3654 IEM_MC_BEGIN(0, 2);
3655 IEM_MC_LOCAL(uint16_t, uTmp1);
3656 IEM_MC_LOCAL(uint16_t, uTmp2);
3657
3658 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3659 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3660 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3661 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3662
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 return VINF_SUCCESS;
3666
3667 case IEMMODE_32BIT:
3668 IEM_MC_BEGIN(0, 2);
3669 IEM_MC_LOCAL(uint32_t, uTmp1);
3670 IEM_MC_LOCAL(uint32_t, uTmp2);
3671
3672 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3673 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3674 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3675 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3676
3677 IEM_MC_ADVANCE_RIP();
3678 IEM_MC_END();
3679 return VINF_SUCCESS;
3680
3681 case IEMMODE_64BIT:
3682 IEM_MC_BEGIN(0, 2);
3683 IEM_MC_LOCAL(uint64_t, uTmp1);
3684 IEM_MC_LOCAL(uint64_t, uTmp2);
3685
3686 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3687 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3688 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3689 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3690
3691 IEM_MC_ADVANCE_RIP();
3692 IEM_MC_END();
3693 return VINF_SUCCESS;
3694
3695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3696 }
3697 }
3698 else
3699 {
3700 /*
3701 * We're accessing memory.
3702 */
3703 switch (pVCpu->iem.s.enmEffOpSize)
3704 {
3705/** @todo the register must be committed separately! */
3706 case IEMMODE_16BIT:
3707 IEM_MC_BEGIN(2, 2);
3708 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3709 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3711
3712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3713 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3714 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3715 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3717
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 return VINF_SUCCESS;
3721
3722 case IEMMODE_32BIT:
3723 IEM_MC_BEGIN(2, 2);
3724 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3725 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3727
3728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3729 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3730 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3731 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3733
3734 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3735 IEM_MC_ADVANCE_RIP();
3736 IEM_MC_END();
3737 return VINF_SUCCESS;
3738
3739 case IEMMODE_64BIT:
3740 IEM_MC_BEGIN(2, 2);
3741 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3742 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3744
3745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3746 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3747 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3748 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3750
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 return VINF_SUCCESS;
3754
3755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3756 }
3757 }
3758}
3759
3760
3761/**
3762 * @opcode 0x88
3763 */
3764FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3765{
3766 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3767
3768 uint8_t bRm;
3769 IEM_OPCODE_GET_NEXT_U8(&bRm);
3770
3771 /*
3772 * If rm is denoting a register, no more instruction bytes.
3773 */
3774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3775 {
3776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3777 IEM_MC_BEGIN(0, 1);
3778 IEM_MC_LOCAL(uint8_t, u8Value);
3779 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3780 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3781 IEM_MC_ADVANCE_RIP();
3782 IEM_MC_END();
3783 }
3784 else
3785 {
3786 /*
3787 * We're writing a register to memory.
3788 */
3789 IEM_MC_BEGIN(0, 2);
3790 IEM_MC_LOCAL(uint8_t, u8Value);
3791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3795 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3796 IEM_MC_ADVANCE_RIP();
3797 IEM_MC_END();
3798 }
3799 return VINF_SUCCESS;
3800
3801}
3802
3803
3804/**
3805 * @opcode 0x89
3806 */
3807FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3808{
3809 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3810
3811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3812
3813 /*
3814 * If rm is denoting a register, no more instruction bytes.
3815 */
3816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3817 {
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819 switch (pVCpu->iem.s.enmEffOpSize)
3820 {
3821 case IEMMODE_16BIT:
3822 IEM_MC_BEGIN(0, 1);
3823 IEM_MC_LOCAL(uint16_t, u16Value);
3824 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3825 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3826 IEM_MC_ADVANCE_RIP();
3827 IEM_MC_END();
3828 break;
3829
3830 case IEMMODE_32BIT:
3831 IEM_MC_BEGIN(0, 1);
3832 IEM_MC_LOCAL(uint32_t, u32Value);
3833 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3834 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3835 IEM_MC_ADVANCE_RIP();
3836 IEM_MC_END();
3837 break;
3838
3839 case IEMMODE_64BIT:
3840 IEM_MC_BEGIN(0, 1);
3841 IEM_MC_LOCAL(uint64_t, u64Value);
3842 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3843 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 break;
3847 }
3848 }
3849 else
3850 {
3851 /*
3852 * We're writing a register to memory.
3853 */
3854 switch (pVCpu->iem.s.enmEffOpSize)
3855 {
3856 case IEMMODE_16BIT:
3857 IEM_MC_BEGIN(0, 2);
3858 IEM_MC_LOCAL(uint16_t, u16Value);
3859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3863 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3864 IEM_MC_ADVANCE_RIP();
3865 IEM_MC_END();
3866 break;
3867
3868 case IEMMODE_32BIT:
3869 IEM_MC_BEGIN(0, 2);
3870 IEM_MC_LOCAL(uint32_t, u32Value);
3871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3874 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3875 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3876 IEM_MC_ADVANCE_RIP();
3877 IEM_MC_END();
3878 break;
3879
3880 case IEMMODE_64BIT:
3881 IEM_MC_BEGIN(0, 2);
3882 IEM_MC_LOCAL(uint64_t, u64Value);
3883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3887 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3888 IEM_MC_ADVANCE_RIP();
3889 IEM_MC_END();
3890 break;
3891 }
3892 }
3893 return VINF_SUCCESS;
3894}
3895
3896
3897/**
3898 * @opcode 0x8a
3899 */
3900FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3901{
3902 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3903
3904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3905
3906 /*
3907 * If rm is denoting a register, no more instruction bytes.
3908 */
3909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3910 {
3911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3912 IEM_MC_BEGIN(0, 1);
3913 IEM_MC_LOCAL(uint8_t, u8Value);
3914 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3915 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3916 IEM_MC_ADVANCE_RIP();
3917 IEM_MC_END();
3918 }
3919 else
3920 {
3921 /*
3922 * We're loading a register from memory.
3923 */
3924 IEM_MC_BEGIN(0, 2);
3925 IEM_MC_LOCAL(uint8_t, u8Value);
3926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3929 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3930 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3931 IEM_MC_ADVANCE_RIP();
3932 IEM_MC_END();
3933 }
3934 return VINF_SUCCESS;
3935}
3936
3937
3938/**
3939 * @opcode 0x8b
3940 */
3941FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3942{
3943 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3944
3945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3946
3947 /*
3948 * If rm is denoting a register, no more instruction bytes.
3949 */
3950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3951 {
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3953 switch (pVCpu->iem.s.enmEffOpSize)
3954 {
3955 case IEMMODE_16BIT:
3956 IEM_MC_BEGIN(0, 1);
3957 IEM_MC_LOCAL(uint16_t, u16Value);
3958 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3959 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3960 IEM_MC_ADVANCE_RIP();
3961 IEM_MC_END();
3962 break;
3963
3964 case IEMMODE_32BIT:
3965 IEM_MC_BEGIN(0, 1);
3966 IEM_MC_LOCAL(uint32_t, u32Value);
3967 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3968 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 break;
3972
3973 case IEMMODE_64BIT:
3974 IEM_MC_BEGIN(0, 1);
3975 IEM_MC_LOCAL(uint64_t, u64Value);
3976 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3977 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3978 IEM_MC_ADVANCE_RIP();
3979 IEM_MC_END();
3980 break;
3981 }
3982 }
3983 else
3984 {
3985 /*
3986 * We're loading a register from memory.
3987 */
3988 switch (pVCpu->iem.s.enmEffOpSize)
3989 {
3990 case IEMMODE_16BIT:
3991 IEM_MC_BEGIN(0, 2);
3992 IEM_MC_LOCAL(uint16_t, u16Value);
3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3996 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3997 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3998 IEM_MC_ADVANCE_RIP();
3999 IEM_MC_END();
4000 break;
4001
4002 case IEMMODE_32BIT:
4003 IEM_MC_BEGIN(0, 2);
4004 IEM_MC_LOCAL(uint32_t, u32Value);
4005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4008 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4009 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
4010 IEM_MC_ADVANCE_RIP();
4011 IEM_MC_END();
4012 break;
4013
4014 case IEMMODE_64BIT:
4015 IEM_MC_BEGIN(0, 2);
4016 IEM_MC_LOCAL(uint64_t, u64Value);
4017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4021 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
4022 IEM_MC_ADVANCE_RIP();
4023 IEM_MC_END();
4024 break;
4025 }
4026 }
4027 return VINF_SUCCESS;
4028}
4029
4030
4031/**
4032 * opcode 0x63
4033 * @todo Table fixme
4034 */
4035FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4036{
4037 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4038 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4039 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4040 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4041 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4042}
4043
4044
4045/**
4046 * @opcode 0x8c
4047 */
4048FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4049{
4050 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4051
4052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4053
4054 /*
4055 * Check that the destination register exists. The REX.R prefix is ignored.
4056 */
4057 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4058 if ( iSegReg > X86_SREG_GS)
4059 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4060
4061 /*
4062 * If rm is denoting a register, no more instruction bytes.
4063 * In that case, the operand size is respected and the upper bits are
4064 * cleared (starting with some pentium).
4065 */
4066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4067 {
4068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4069 switch (pVCpu->iem.s.enmEffOpSize)
4070 {
4071 case IEMMODE_16BIT:
4072 IEM_MC_BEGIN(0, 1);
4073 IEM_MC_LOCAL(uint16_t, u16Value);
4074 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4075 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
4076 IEM_MC_ADVANCE_RIP();
4077 IEM_MC_END();
4078 break;
4079
4080 case IEMMODE_32BIT:
4081 IEM_MC_BEGIN(0, 1);
4082 IEM_MC_LOCAL(uint32_t, u32Value);
4083 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4084 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
4085 IEM_MC_ADVANCE_RIP();
4086 IEM_MC_END();
4087 break;
4088
4089 case IEMMODE_64BIT:
4090 IEM_MC_BEGIN(0, 1);
4091 IEM_MC_LOCAL(uint64_t, u64Value);
4092 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4093 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
4094 IEM_MC_ADVANCE_RIP();
4095 IEM_MC_END();
4096 break;
4097 }
4098 }
4099 else
4100 {
4101 /*
4102 * We're saving the register to memory. The access is word sized
4103 * regardless of operand size prefixes.
4104 */
4105#if 0 /* not necessary */
4106 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4107#endif
4108 IEM_MC_BEGIN(0, 2);
4109 IEM_MC_LOCAL(uint16_t, u16Value);
4110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4113 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4114 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 }
4118 return VINF_SUCCESS;
4119}
4120
4121
4122
4123
4124/**
4125 * @opcode 0x8d
4126 */
4127FNIEMOP_DEF(iemOp_lea_Gv_M)
4128{
4129 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4132 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4133
4134 switch (pVCpu->iem.s.enmEffOpSize)
4135 {
4136 case IEMMODE_16BIT:
4137 IEM_MC_BEGIN(0, 2);
4138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4139 IEM_MC_LOCAL(uint16_t, u16Cast);
4140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4142 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4143 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
4144 IEM_MC_ADVANCE_RIP();
4145 IEM_MC_END();
4146 return VINF_SUCCESS;
4147
4148 case IEMMODE_32BIT:
4149 IEM_MC_BEGIN(0, 2);
4150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4151 IEM_MC_LOCAL(uint32_t, u32Cast);
4152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4154 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4155 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
4156 IEM_MC_ADVANCE_RIP();
4157 IEM_MC_END();
4158 return VINF_SUCCESS;
4159
4160 case IEMMODE_64BIT:
4161 IEM_MC_BEGIN(0, 1);
4162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
4166 IEM_MC_ADVANCE_RIP();
4167 IEM_MC_END();
4168 return VINF_SUCCESS;
4169 }
4170 AssertFailedReturn(VERR_IEM_IPE_7);
4171}
4172
4173
4174/**
4175 * @opcode 0x8e
4176 */
4177FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4178{
4179 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4180
4181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4182
4183 /*
4184 * The practical operand size is 16-bit.
4185 */
4186#if 0 /* not necessary */
4187 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4188#endif
4189
4190 /*
4191 * Check that the destination register exists and can be used with this
4192 * instruction. The REX.R prefix is ignored.
4193 */
4194 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4195 if ( iSegReg == X86_SREG_CS
4196 || iSegReg > X86_SREG_GS)
4197 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4198
4199 /*
4200 * If rm is denoting a register, no more instruction bytes.
4201 */
4202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4203 {
4204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4205 IEM_MC_BEGIN(2, 0);
4206 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4207 IEM_MC_ARG(uint16_t, u16Value, 1);
4208 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4209 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4210 IEM_MC_END();
4211 }
4212 else
4213 {
4214 /*
4215 * We're loading the register from memory. The access is word sized
4216 * regardless of operand size prefixes.
4217 */
4218 IEM_MC_BEGIN(2, 1);
4219 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4220 IEM_MC_ARG(uint16_t, u16Value, 1);
4221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4224 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4225 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4226 IEM_MC_END();
4227 }
4228 return VINF_SUCCESS;
4229}
4230
4231
4232/** Opcode 0x8f /0. */
4233FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4234{
4235 /* This bugger is rather annoying as it requires rSP to be updated before
4236 doing the effective address calculations. Will eventually require a
4237 split between the R/M+SIB decoding and the effective address
4238 calculation - which is something that is required for any attempt at
4239 reusing this code for a recompiler. It may also be good to have if we
4240 need to delay #UD exception caused by invalid lock prefixes.
4241
4242 For now, we'll do a mostly safe interpreter-only implementation here. */
4243 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4244 * now until tests show it's checked.. */
4245 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4246
4247 /* Register access is relatively easy and can share code. */
4248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4249 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4250
4251 /*
4252 * Memory target.
4253 *
4254 * Intel says that RSP is incremented before it's used in any effective
4255 * address calcuations. This means some serious extra annoyance here since
4256 * we decode and calculate the effective address in one step and like to
4257 * delay committing registers till everything is done.
4258 *
4259 * So, we'll decode and calculate the effective address twice. This will
4260 * require some recoding if turned into a recompiler.
4261 */
4262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4263
4264#ifndef TST_IEM_CHECK_MC
4265 /* Calc effective address with modified ESP. */
4266/** @todo testcase */
4267 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
4268 RTGCPTR GCPtrEff;
4269 VBOXSTRICTRC rcStrict;
4270 switch (pVCpu->iem.s.enmEffOpSize)
4271 {
4272 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4273 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4274 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4276 }
4277 if (rcStrict != VINF_SUCCESS)
4278 return rcStrict;
4279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4280
4281 /* Perform the operation - this should be CImpl. */
4282 RTUINT64U TmpRsp;
4283 TmpRsp.u = pCtx->rsp;
4284 switch (pVCpu->iem.s.enmEffOpSize)
4285 {
4286 case IEMMODE_16BIT:
4287 {
4288 uint16_t u16Value;
4289 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4290 if (rcStrict == VINF_SUCCESS)
4291 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4292 break;
4293 }
4294
4295 case IEMMODE_32BIT:
4296 {
4297 uint32_t u32Value;
4298 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4299 if (rcStrict == VINF_SUCCESS)
4300 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4301 break;
4302 }
4303
4304 case IEMMODE_64BIT:
4305 {
4306 uint64_t u64Value;
4307 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4308 if (rcStrict == VINF_SUCCESS)
4309 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4310 break;
4311 }
4312
4313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4314 }
4315 if (rcStrict == VINF_SUCCESS)
4316 {
4317 pCtx->rsp = TmpRsp.u;
4318 iemRegUpdateRipAndClearRF(pVCpu);
4319 }
4320 return rcStrict;
4321
4322#else
4323 return VERR_IEM_IPE_2;
4324#endif
4325}
4326
4327
4328/**
4329 * @opcode 0x8f
4330 */
4331FNIEMOP_DEF(iemOp_Grp1A__xop)
4332{
4333 /*
4334 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4335 * three byte VEX prefix, except that the mmmmm field cannot have the values
4336 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4337 */
4338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4339 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4340 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4341
4342 IEMOP_MNEMONIC(xop, "xop");
4343 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4344 {
4345 /** @todo Test when exctly the XOP conformance checks kick in during
4346 * instruction decoding and fetching (using \#PF). */
4347 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4348 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4349 if ( ( pVCpu->iem.s.fPrefixes
4350 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4351 == 0)
4352 {
4353 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4354 if (bXop2 & 0x80 /* XOP.W */)
4355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4356 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
4357 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4358 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4359 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4360 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4361 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4362
4363 /** @todo XOP: Just use new tables and decoders. */
4364 switch (bRm & 0x1f)
4365 {
4366 case 8: /* xop opcode map 8. */
4367 IEMOP_BITCH_ABOUT_STUB();
4368 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4369
4370 case 9: /* xop opcode map 9. */
4371 IEMOP_BITCH_ABOUT_STUB();
4372 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4373
4374 case 10: /* xop opcode map 10. */
4375 IEMOP_BITCH_ABOUT_STUB();
4376 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4377
4378 default:
4379 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4380 return IEMOP_RAISE_INVALID_OPCODE();
4381 }
4382 }
4383 else
4384 Log(("XOP: Invalid prefix mix!\n"));
4385 }
4386 else
4387 Log(("XOP: XOP support disabled!\n"));
4388 return IEMOP_RAISE_INVALID_OPCODE();
4389}
4390
4391
4392/**
4393 * Common 'xchg reg,rAX' helper.
4394 */
4395FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4396{
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398
4399 iReg |= pVCpu->iem.s.uRexB;
4400 switch (pVCpu->iem.s.enmEffOpSize)
4401 {
4402 case IEMMODE_16BIT:
4403 IEM_MC_BEGIN(0, 2);
4404 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4405 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4406 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4407 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4408 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4409 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 return VINF_SUCCESS;
4413
4414 case IEMMODE_32BIT:
4415 IEM_MC_BEGIN(0, 2);
4416 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4417 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4418 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4419 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4420 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4421 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4422 IEM_MC_ADVANCE_RIP();
4423 IEM_MC_END();
4424 return VINF_SUCCESS;
4425
4426 case IEMMODE_64BIT:
4427 IEM_MC_BEGIN(0, 2);
4428 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4429 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4430 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4431 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4432 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4433 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4434 IEM_MC_ADVANCE_RIP();
4435 IEM_MC_END();
4436 return VINF_SUCCESS;
4437
4438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4439 }
4440}
4441
4442
4443/**
4444 * @opcode 0x90
4445 */
4446FNIEMOP_DEF(iemOp_nop)
4447{
4448 /* R8/R8D and RAX/EAX can be exchanged. */
4449 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4450 {
4451 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4452 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4453 }
4454
4455 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4456 IEMOP_MNEMONIC(pause, "pause");
4457 else
4458 IEMOP_MNEMONIC(nop, "nop");
4459 IEM_MC_BEGIN(0, 0);
4460 IEM_MC_ADVANCE_RIP();
4461 IEM_MC_END();
4462 return VINF_SUCCESS;
4463}
4464
4465
4466/**
4467 * @opcode 0x91
4468 */
4469FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4470{
4471 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4472 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4473}
4474
4475
4476/**
4477 * @opcode 0x92
4478 */
4479FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4480{
4481 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4482 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4483}
4484
4485
4486/**
4487 * @opcode 0x93
4488 */
4489FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4490{
4491 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4492 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4493}
4494
4495
4496/**
4497 * @opcode 0x94
4498 */
4499FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4500{
4501 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4502 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4503}
4504
4505
4506/**
4507 * @opcode 0x95
4508 */
4509FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4510{
4511 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4512 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4513}
4514
4515
4516/**
4517 * @opcode 0x96
4518 */
4519FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4520{
4521 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4522 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4523}
4524
4525
4526/**
4527 * @opcode 0x97
4528 */
4529FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4530{
4531 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4532 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4533}
4534
4535
4536/**
4537 * @opcode 0x98
4538 */
4539FNIEMOP_DEF(iemOp_cbw)
4540{
4541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4542 switch (pVCpu->iem.s.enmEffOpSize)
4543 {
4544 case IEMMODE_16BIT:
4545 IEMOP_MNEMONIC(cbw, "cbw");
4546 IEM_MC_BEGIN(0, 1);
4547 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4548 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4549 } IEM_MC_ELSE() {
4550 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4551 } IEM_MC_ENDIF();
4552 IEM_MC_ADVANCE_RIP();
4553 IEM_MC_END();
4554 return VINF_SUCCESS;
4555
4556 case IEMMODE_32BIT:
4557 IEMOP_MNEMONIC(cwde, "cwde");
4558 IEM_MC_BEGIN(0, 1);
4559 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4560 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4561 } IEM_MC_ELSE() {
4562 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4563 } IEM_MC_ENDIF();
4564 IEM_MC_ADVANCE_RIP();
4565 IEM_MC_END();
4566 return VINF_SUCCESS;
4567
4568 case IEMMODE_64BIT:
4569 IEMOP_MNEMONIC(cdqe, "cdqe");
4570 IEM_MC_BEGIN(0, 1);
4571 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4572 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4573 } IEM_MC_ELSE() {
4574 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4575 } IEM_MC_ENDIF();
4576 IEM_MC_ADVANCE_RIP();
4577 IEM_MC_END();
4578 return VINF_SUCCESS;
4579
4580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4581 }
4582}
4583
4584
4585/**
4586 * @opcode 0x99
4587 */
4588FNIEMOP_DEF(iemOp_cwd)
4589{
4590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4591 switch (pVCpu->iem.s.enmEffOpSize)
4592 {
4593 case IEMMODE_16BIT:
4594 IEMOP_MNEMONIC(cwd, "cwd");
4595 IEM_MC_BEGIN(0, 1);
4596 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4597 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4598 } IEM_MC_ELSE() {
4599 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4600 } IEM_MC_ENDIF();
4601 IEM_MC_ADVANCE_RIP();
4602 IEM_MC_END();
4603 return VINF_SUCCESS;
4604
4605 case IEMMODE_32BIT:
4606 IEMOP_MNEMONIC(cdq, "cdq");
4607 IEM_MC_BEGIN(0, 1);
4608 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4609 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4610 } IEM_MC_ELSE() {
4611 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4612 } IEM_MC_ENDIF();
4613 IEM_MC_ADVANCE_RIP();
4614 IEM_MC_END();
4615 return VINF_SUCCESS;
4616
4617 case IEMMODE_64BIT:
4618 IEMOP_MNEMONIC(cqo, "cqo");
4619 IEM_MC_BEGIN(0, 1);
4620 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4621 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4622 } IEM_MC_ELSE() {
4623 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4624 } IEM_MC_ENDIF();
4625 IEM_MC_ADVANCE_RIP();
4626 IEM_MC_END();
4627 return VINF_SUCCESS;
4628
4629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4630 }
4631}
4632
4633
4634/**
4635 * @opcode 0x9a
4636 */
4637FNIEMOP_DEF(iemOp_call_Ap)
4638{
4639 IEMOP_MNEMONIC(call_Ap, "call Ap");
4640 IEMOP_HLP_NO_64BIT();
4641
4642 /* Decode the far pointer address and pass it on to the far call C implementation. */
4643 uint32_t offSeg;
4644 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4645 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4646 else
4647 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4648 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4651}
4652
4653
4654/** Opcode 0x9b. (aka fwait) */
4655FNIEMOP_DEF(iemOp_wait)
4656{
4657 IEMOP_MNEMONIC(wait, "wait");
4658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4659
4660 IEM_MC_BEGIN(0, 0);
4661 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4662 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4663 IEM_MC_ADVANCE_RIP();
4664 IEM_MC_END();
4665 return VINF_SUCCESS;
4666}
4667
4668
4669/**
4670 * @opcode 0x9c
4671 */
4672FNIEMOP_DEF(iemOp_pushf_Fv)
4673{
4674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4675 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4676 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4677}
4678
4679
4680/**
4681 * @opcode 0x9d
4682 */
4683FNIEMOP_DEF(iemOp_popf_Fv)
4684{
4685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4686 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4687 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4688}
4689
4690
4691/**
4692 * @opcode 0x9e
4693 */
4694FNIEMOP_DEF(iemOp_sahf)
4695{
4696 IEMOP_MNEMONIC(sahf, "sahf");
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4699 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4700 return IEMOP_RAISE_INVALID_OPCODE();
4701 IEM_MC_BEGIN(0, 2);
4702 IEM_MC_LOCAL(uint32_t, u32Flags);
4703 IEM_MC_LOCAL(uint32_t, EFlags);
4704 IEM_MC_FETCH_EFLAGS(EFlags);
4705 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4706 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4707 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4708 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4709 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4710 IEM_MC_COMMIT_EFLAGS(EFlags);
4711 IEM_MC_ADVANCE_RIP();
4712 IEM_MC_END();
4713 return VINF_SUCCESS;
4714}
4715
4716
4717/**
4718 * @opcode 0x9f
4719 */
4720FNIEMOP_DEF(iemOp_lahf)
4721{
4722 IEMOP_MNEMONIC(lahf, "lahf");
4723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4724 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4725 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4726 return IEMOP_RAISE_INVALID_OPCODE();
4727 IEM_MC_BEGIN(0, 1);
4728 IEM_MC_LOCAL(uint8_t, u8Flags);
4729 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4730 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4731 IEM_MC_ADVANCE_RIP();
4732 IEM_MC_END();
4733 return VINF_SUCCESS;
4734}
4735
4736
4737/**
4738 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4739 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4740 * prefixes. Will return on failures.
4741 * @param a_GCPtrMemOff The variable to store the offset in.
4742 */
4743#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4744 do \
4745 { \
4746 switch (pVCpu->iem.s.enmEffAddrMode) \
4747 { \
4748 case IEMMODE_16BIT: \
4749 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4750 break; \
4751 case IEMMODE_32BIT: \
4752 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4753 break; \
4754 case IEMMODE_64BIT: \
4755 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4756 break; \
4757 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4758 } \
4759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4760 } while (0)
4761
4762/**
4763 * @opcode 0xa0
4764 */
4765FNIEMOP_DEF(iemOp_mov_AL_Ob)
4766{
4767 /*
4768 * Get the offset and fend of lock prefixes.
4769 */
4770 RTGCPTR GCPtrMemOff;
4771 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4772
4773 /*
4774 * Fetch AL.
4775 */
4776 IEM_MC_BEGIN(0,1);
4777 IEM_MC_LOCAL(uint8_t, u8Tmp);
4778 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4779 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4780 IEM_MC_ADVANCE_RIP();
4781 IEM_MC_END();
4782 return VINF_SUCCESS;
4783}
4784
4785
4786/**
4787 * @opcode 0xa1
4788 */
4789FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4790{
4791 /*
4792 * Get the offset and fend of lock prefixes.
4793 */
4794 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4795 RTGCPTR GCPtrMemOff;
4796 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4797
4798 /*
4799 * Fetch rAX.
4800 */
4801 switch (pVCpu->iem.s.enmEffOpSize)
4802 {
4803 case IEMMODE_16BIT:
4804 IEM_MC_BEGIN(0,1);
4805 IEM_MC_LOCAL(uint16_t, u16Tmp);
4806 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4807 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4808 IEM_MC_ADVANCE_RIP();
4809 IEM_MC_END();
4810 return VINF_SUCCESS;
4811
4812 case IEMMODE_32BIT:
4813 IEM_MC_BEGIN(0,1);
4814 IEM_MC_LOCAL(uint32_t, u32Tmp);
4815 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4816 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4817 IEM_MC_ADVANCE_RIP();
4818 IEM_MC_END();
4819 return VINF_SUCCESS;
4820
4821 case IEMMODE_64BIT:
4822 IEM_MC_BEGIN(0,1);
4823 IEM_MC_LOCAL(uint64_t, u64Tmp);
4824 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4825 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4826 IEM_MC_ADVANCE_RIP();
4827 IEM_MC_END();
4828 return VINF_SUCCESS;
4829
4830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4831 }
4832}
4833
4834
4835/**
4836 * @opcode 0xa2
4837 */
4838FNIEMOP_DEF(iemOp_mov_Ob_AL)
4839{
4840 /*
4841 * Get the offset and fend of lock prefixes.
4842 */
4843 RTGCPTR GCPtrMemOff;
4844 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4845
4846 /*
4847 * Store AL.
4848 */
4849 IEM_MC_BEGIN(0,1);
4850 IEM_MC_LOCAL(uint8_t, u8Tmp);
4851 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4852 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4853 IEM_MC_ADVANCE_RIP();
4854 IEM_MC_END();
4855 return VINF_SUCCESS;
4856}
4857
4858
4859/**
4860 * @opcode 0xa3
4861 */
4862FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4863{
4864 /*
4865 * Get the offset and fend of lock prefixes.
4866 */
4867 RTGCPTR GCPtrMemOff;
4868 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4869
4870 /*
4871 * Store rAX.
4872 */
4873 switch (pVCpu->iem.s.enmEffOpSize)
4874 {
4875 case IEMMODE_16BIT:
4876 IEM_MC_BEGIN(0,1);
4877 IEM_MC_LOCAL(uint16_t, u16Tmp);
4878 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4879 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883
4884 case IEMMODE_32BIT:
4885 IEM_MC_BEGIN(0,1);
4886 IEM_MC_LOCAL(uint32_t, u32Tmp);
4887 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4888 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4889 IEM_MC_ADVANCE_RIP();
4890 IEM_MC_END();
4891 return VINF_SUCCESS;
4892
4893 case IEMMODE_64BIT:
4894 IEM_MC_BEGIN(0,1);
4895 IEM_MC_LOCAL(uint64_t, u64Tmp);
4896 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4897 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4898 IEM_MC_ADVANCE_RIP();
4899 IEM_MC_END();
4900 return VINF_SUCCESS;
4901
4902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4903 }
4904}
4905
4906/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4907#define IEM_MOVS_CASE(ValBits, AddrBits) \
4908 IEM_MC_BEGIN(0, 2); \
4909 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4910 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4911 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4912 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4913 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4914 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4916 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4917 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4918 } IEM_MC_ELSE() { \
4919 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4920 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4921 } IEM_MC_ENDIF(); \
4922 IEM_MC_ADVANCE_RIP(); \
4923 IEM_MC_END();
4924
4925/**
4926 * @opcode 0xa4
4927 */
4928FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4929{
4930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4931
4932 /*
4933 * Use the C implementation if a repeat prefix is encountered.
4934 */
4935 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4936 {
4937 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4938 switch (pVCpu->iem.s.enmEffAddrMode)
4939 {
4940 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4941 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4942 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4944 }
4945 }
4946 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4947
4948 /*
4949 * Sharing case implementation with movs[wdq] below.
4950 */
4951 switch (pVCpu->iem.s.enmEffAddrMode)
4952 {
4953 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4954 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4955 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4956 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4957 }
4958 return VINF_SUCCESS;
4959}
4960
4961
4962/**
4963 * @opcode 0xa5
4964 */
4965FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4966{
4967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4968
4969 /*
4970 * Use the C implementation if a repeat prefix is encountered.
4971 */
4972 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4973 {
4974 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4975 switch (pVCpu->iem.s.enmEffOpSize)
4976 {
4977 case IEMMODE_16BIT:
4978 switch (pVCpu->iem.s.enmEffAddrMode)
4979 {
4980 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4981 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4982 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4984 }
4985 break;
4986 case IEMMODE_32BIT:
4987 switch (pVCpu->iem.s.enmEffAddrMode)
4988 {
4989 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4990 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4991 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4993 }
4994 case IEMMODE_64BIT:
4995 switch (pVCpu->iem.s.enmEffAddrMode)
4996 {
4997 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4998 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4999 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5001 }
5002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5003 }
5004 }
5005 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5006
5007 /*
5008 * Annoying double switch here.
5009 * Using ugly macro for implementing the cases, sharing it with movsb.
5010 */
5011 switch (pVCpu->iem.s.enmEffOpSize)
5012 {
5013 case IEMMODE_16BIT:
5014 switch (pVCpu->iem.s.enmEffAddrMode)
5015 {
5016 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5017 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5018 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5020 }
5021 break;
5022
5023 case IEMMODE_32BIT:
5024 switch (pVCpu->iem.s.enmEffAddrMode)
5025 {
5026 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5027 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5028 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5030 }
5031 break;
5032
5033 case IEMMODE_64BIT:
5034 switch (pVCpu->iem.s.enmEffAddrMode)
5035 {
5036 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5037 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5038 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5040 }
5041 break;
5042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5043 }
5044 return VINF_SUCCESS;
5045}
5046
5047#undef IEM_MOVS_CASE
5048
5049/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5050#define IEM_CMPS_CASE(ValBits, AddrBits) \
5051 IEM_MC_BEGIN(3, 3); \
5052 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5053 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5054 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5055 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5056 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5057 \
5058 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5059 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5060 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5061 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5062 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5063 IEM_MC_REF_EFLAGS(pEFlags); \
5064 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5065 \
5066 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5067 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5068 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5069 } IEM_MC_ELSE() { \
5070 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5071 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5072 } IEM_MC_ENDIF(); \
5073 IEM_MC_ADVANCE_RIP(); \
5074 IEM_MC_END(); \
5075
5076/**
5077 * @opcode 0xa6
5078 */
5079FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5080{
5081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5082
5083 /*
5084 * Use the C implementation if a repeat prefix is encountered.
5085 */
5086 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5087 {
5088 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5089 switch (pVCpu->iem.s.enmEffAddrMode)
5090 {
5091 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5092 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5093 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5095 }
5096 }
5097 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5098 {
5099 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5100 switch (pVCpu->iem.s.enmEffAddrMode)
5101 {
5102 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5103 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5104 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5106 }
5107 }
5108 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5109
5110 /*
5111 * Sharing case implementation with cmps[wdq] below.
5112 */
5113 switch (pVCpu->iem.s.enmEffAddrMode)
5114 {
5115 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5116 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5117 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5119 }
5120 return VINF_SUCCESS;
5121
5122}
5123
5124
5125/**
5126 * @opcode 0xa7
5127 */
5128FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5129{
5130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5131
5132 /*
5133 * Use the C implementation if a repeat prefix is encountered.
5134 */
5135 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5136 {
5137 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5138 switch (pVCpu->iem.s.enmEffOpSize)
5139 {
5140 case IEMMODE_16BIT:
5141 switch (pVCpu->iem.s.enmEffAddrMode)
5142 {
5143 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5144 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5145 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5147 }
5148 break;
5149 case IEMMODE_32BIT:
5150 switch (pVCpu->iem.s.enmEffAddrMode)
5151 {
5152 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5153 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5154 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5156 }
5157 case IEMMODE_64BIT:
5158 switch (pVCpu->iem.s.enmEffAddrMode)
5159 {
5160 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5161 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5162 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5164 }
5165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5166 }
5167 }
5168
5169 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5170 {
5171 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5172 switch (pVCpu->iem.s.enmEffOpSize)
5173 {
5174 case IEMMODE_16BIT:
5175 switch (pVCpu->iem.s.enmEffAddrMode)
5176 {
5177 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5178 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5179 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5181 }
5182 break;
5183 case IEMMODE_32BIT:
5184 switch (pVCpu->iem.s.enmEffAddrMode)
5185 {
5186 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5187 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5188 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5189 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5190 }
5191 case IEMMODE_64BIT:
5192 switch (pVCpu->iem.s.enmEffAddrMode)
5193 {
5194 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5195 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5196 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5198 }
5199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5200 }
5201 }
5202
5203 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5204
5205 /*
5206 * Annoying double switch here.
5207 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5208 */
5209 switch (pVCpu->iem.s.enmEffOpSize)
5210 {
5211 case IEMMODE_16BIT:
5212 switch (pVCpu->iem.s.enmEffAddrMode)
5213 {
5214 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5215 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5216 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5218 }
5219 break;
5220
5221 case IEMMODE_32BIT:
5222 switch (pVCpu->iem.s.enmEffAddrMode)
5223 {
5224 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5225 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5226 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5228 }
5229 break;
5230
5231 case IEMMODE_64BIT:
5232 switch (pVCpu->iem.s.enmEffAddrMode)
5233 {
5234 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5235 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5236 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5238 }
5239 break;
5240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5241 }
5242 return VINF_SUCCESS;
5243
5244}
5245
5246#undef IEM_CMPS_CASE
5247
5248/**
5249 * @opcode 0xa8
5250 */
5251FNIEMOP_DEF(iemOp_test_AL_Ib)
5252{
5253 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5254 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5255 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5256}
5257
5258
5259/**
5260 * @opcode 0xa9
5261 */
5262FNIEMOP_DEF(iemOp_test_eAX_Iz)
5263{
5264 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5265 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5266 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5267}
5268
5269
5270/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5271#define IEM_STOS_CASE(ValBits, AddrBits) \
5272 IEM_MC_BEGIN(0, 2); \
5273 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5274 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5275 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5276 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5277 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5278 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5279 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5280 } IEM_MC_ELSE() { \
5281 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5282 } IEM_MC_ENDIF(); \
5283 IEM_MC_ADVANCE_RIP(); \
5284 IEM_MC_END(); \
5285
5286/**
5287 * @opcode 0xaa
5288 */
5289FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5290{
5291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5292
5293 /*
5294 * Use the C implementation if a repeat prefix is encountered.
5295 */
5296 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5297 {
5298 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5299 switch (pVCpu->iem.s.enmEffAddrMode)
5300 {
5301 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5302 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5303 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5305 }
5306 }
5307 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5308
5309 /*
5310 * Sharing case implementation with stos[wdq] below.
5311 */
5312 switch (pVCpu->iem.s.enmEffAddrMode)
5313 {
5314 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5315 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5316 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5318 }
5319 return VINF_SUCCESS;
5320}
5321
5322
5323/**
5324 * @opcode 0xab
5325 */
5326FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5327{
5328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5329
5330 /*
5331 * Use the C implementation if a repeat prefix is encountered.
5332 */
5333 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5334 {
5335 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5336 switch (pVCpu->iem.s.enmEffOpSize)
5337 {
5338 case IEMMODE_16BIT:
5339 switch (pVCpu->iem.s.enmEffAddrMode)
5340 {
5341 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5342 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5343 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5345 }
5346 break;
5347 case IEMMODE_32BIT:
5348 switch (pVCpu->iem.s.enmEffAddrMode)
5349 {
5350 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5351 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5352 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5354 }
5355 case IEMMODE_64BIT:
5356 switch (pVCpu->iem.s.enmEffAddrMode)
5357 {
5358 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5359 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5360 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5362 }
5363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5364 }
5365 }
5366 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5367
5368 /*
5369 * Annoying double switch here.
5370 * Using ugly macro for implementing the cases, sharing it with stosb.
5371 */
5372 switch (pVCpu->iem.s.enmEffOpSize)
5373 {
5374 case IEMMODE_16BIT:
5375 switch (pVCpu->iem.s.enmEffAddrMode)
5376 {
5377 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5378 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5379 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5381 }
5382 break;
5383
5384 case IEMMODE_32BIT:
5385 switch (pVCpu->iem.s.enmEffAddrMode)
5386 {
5387 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5388 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5389 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5391 }
5392 break;
5393
5394 case IEMMODE_64BIT:
5395 switch (pVCpu->iem.s.enmEffAddrMode)
5396 {
5397 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5398 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5399 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5401 }
5402 break;
5403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5404 }
5405 return VINF_SUCCESS;
5406}
5407
5408#undef IEM_STOS_CASE
5409
5410/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5411#define IEM_LODS_CASE(ValBits, AddrBits) \
5412 IEM_MC_BEGIN(0, 2); \
5413 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5414 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5415 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5416 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5417 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5418 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5419 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5420 } IEM_MC_ELSE() { \
5421 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5422 } IEM_MC_ENDIF(); \
5423 IEM_MC_ADVANCE_RIP(); \
5424 IEM_MC_END();
5425
5426/**
5427 * @opcode 0xac
5428 */
5429FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5430{
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432
5433 /*
5434 * Use the C implementation if a repeat prefix is encountered.
5435 */
5436 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5437 {
5438 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5439 switch (pVCpu->iem.s.enmEffAddrMode)
5440 {
5441 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5442 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5443 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5445 }
5446 }
5447 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5448
5449 /*
5450 * Sharing case implementation with stos[wdq] below.
5451 */
5452 switch (pVCpu->iem.s.enmEffAddrMode)
5453 {
5454 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5455 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5456 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5458 }
5459 return VINF_SUCCESS;
5460}
5461
5462
5463/**
5464 * @opcode 0xad
5465 */
5466FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5467{
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469
5470 /*
5471 * Use the C implementation if a repeat prefix is encountered.
5472 */
5473 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5474 {
5475 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5476 switch (pVCpu->iem.s.enmEffOpSize)
5477 {
5478 case IEMMODE_16BIT:
5479 switch (pVCpu->iem.s.enmEffAddrMode)
5480 {
5481 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5482 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5483 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5485 }
5486 break;
5487 case IEMMODE_32BIT:
5488 switch (pVCpu->iem.s.enmEffAddrMode)
5489 {
5490 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5491 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5492 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5494 }
5495 case IEMMODE_64BIT:
5496 switch (pVCpu->iem.s.enmEffAddrMode)
5497 {
5498 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5499 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5500 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5502 }
5503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5504 }
5505 }
5506 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5507
5508 /*
5509 * Annoying double switch here.
5510 * Using ugly macro for implementing the cases, sharing it with lodsb.
5511 */
5512 switch (pVCpu->iem.s.enmEffOpSize)
5513 {
5514 case IEMMODE_16BIT:
5515 switch (pVCpu->iem.s.enmEffAddrMode)
5516 {
5517 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5518 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5519 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5521 }
5522 break;
5523
5524 case IEMMODE_32BIT:
5525 switch (pVCpu->iem.s.enmEffAddrMode)
5526 {
5527 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5528 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5529 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5531 }
5532 break;
5533
5534 case IEMMODE_64BIT:
5535 switch (pVCpu->iem.s.enmEffAddrMode)
5536 {
5537 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5538 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5539 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5541 }
5542 break;
5543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5544 }
5545 return VINF_SUCCESS;
5546}
5547
5548#undef IEM_LODS_CASE
5549
5550/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5551#define IEM_SCAS_CASE(ValBits, AddrBits) \
5552 IEM_MC_BEGIN(3, 2); \
5553 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5554 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5555 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5556 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5557 \
5558 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5559 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5560 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5561 IEM_MC_REF_EFLAGS(pEFlags); \
5562 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5563 \
5564 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5565 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5566 } IEM_MC_ELSE() { \
5567 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5568 } IEM_MC_ENDIF(); \
5569 IEM_MC_ADVANCE_RIP(); \
5570 IEM_MC_END();
5571
5572/**
5573 * @opcode 0xae
5574 */
5575FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5576{
5577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5578
5579 /*
5580 * Use the C implementation if a repeat prefix is encountered.
5581 */
5582 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5583 {
5584 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5585 switch (pVCpu->iem.s.enmEffAddrMode)
5586 {
5587 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5588 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5589 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5591 }
5592 }
5593 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5594 {
5595 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5596 switch (pVCpu->iem.s.enmEffAddrMode)
5597 {
5598 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5599 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5600 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5602 }
5603 }
5604 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5605
5606 /*
5607 * Sharing case implementation with stos[wdq] below.
5608 */
5609 switch (pVCpu->iem.s.enmEffAddrMode)
5610 {
5611 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5612 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5613 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5615 }
5616 return VINF_SUCCESS;
5617}
5618
5619
5620/**
5621 * @opcode 0xaf
5622 */
5623FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5624{
5625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5626
5627 /*
5628 * Use the C implementation if a repeat prefix is encountered.
5629 */
5630 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5631 {
5632 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5633 switch (pVCpu->iem.s.enmEffOpSize)
5634 {
5635 case IEMMODE_16BIT:
5636 switch (pVCpu->iem.s.enmEffAddrMode)
5637 {
5638 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5639 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5640 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5642 }
5643 break;
5644 case IEMMODE_32BIT:
5645 switch (pVCpu->iem.s.enmEffAddrMode)
5646 {
5647 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5648 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5649 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5651 }
5652 case IEMMODE_64BIT:
5653 switch (pVCpu->iem.s.enmEffAddrMode)
5654 {
5655 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5656 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5657 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5659 }
5660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5661 }
5662 }
5663 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5664 {
5665 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5666 switch (pVCpu->iem.s.enmEffOpSize)
5667 {
5668 case IEMMODE_16BIT:
5669 switch (pVCpu->iem.s.enmEffAddrMode)
5670 {
5671 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5672 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5673 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5675 }
5676 break;
5677 case IEMMODE_32BIT:
5678 switch (pVCpu->iem.s.enmEffAddrMode)
5679 {
5680 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5681 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5682 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5684 }
5685 case IEMMODE_64BIT:
5686 switch (pVCpu->iem.s.enmEffAddrMode)
5687 {
5688 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5689 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5690 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5692 }
5693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5694 }
5695 }
5696 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5697
5698 /*
5699 * Annoying double switch here.
5700 * Using ugly macro for implementing the cases, sharing it with scasb.
5701 */
5702 switch (pVCpu->iem.s.enmEffOpSize)
5703 {
5704 case IEMMODE_16BIT:
5705 switch (pVCpu->iem.s.enmEffAddrMode)
5706 {
5707 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5708 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5709 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5711 }
5712 break;
5713
5714 case IEMMODE_32BIT:
5715 switch (pVCpu->iem.s.enmEffAddrMode)
5716 {
5717 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5718 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5719 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5721 }
5722 break;
5723
5724 case IEMMODE_64BIT:
5725 switch (pVCpu->iem.s.enmEffAddrMode)
5726 {
5727 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5728 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5729 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5731 }
5732 break;
5733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5734 }
5735 return VINF_SUCCESS;
5736}
5737
5738#undef IEM_SCAS_CASE
5739
5740/**
5741 * Common 'mov r8, imm8' helper.
5742 */
5743FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5744{
5745 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5747
5748 IEM_MC_BEGIN(0, 1);
5749 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5750 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5751 IEM_MC_ADVANCE_RIP();
5752 IEM_MC_END();
5753
5754 return VINF_SUCCESS;
5755}
5756
5757
5758/**
5759 * @opcode 0xb0
5760 */
5761FNIEMOP_DEF(iemOp_mov_AL_Ib)
5762{
5763 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5764 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5765}
5766
5767
5768/**
5769 * @opcode 0xb1
5770 */
5771FNIEMOP_DEF(iemOp_CL_Ib)
5772{
5773 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5774 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5775}
5776
5777
5778/**
5779 * @opcode 0xb2
5780 */
5781FNIEMOP_DEF(iemOp_DL_Ib)
5782{
5783 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5784 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5785}
5786
5787
5788/**
5789 * @opcode 0xb3
5790 */
5791FNIEMOP_DEF(iemOp_BL_Ib)
5792{
5793 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5794 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5795}
5796
5797
5798/**
5799 * @opcode 0xb4
5800 */
5801FNIEMOP_DEF(iemOp_mov_AH_Ib)
5802{
5803 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5804 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5805}
5806
5807
5808/**
5809 * @opcode 0xb5
5810 */
5811FNIEMOP_DEF(iemOp_CH_Ib)
5812{
5813 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5814 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5815}
5816
5817
5818/**
5819 * @opcode 0xb6
5820 */
5821FNIEMOP_DEF(iemOp_DH_Ib)
5822{
5823 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5824 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5825}
5826
5827
5828/**
5829 * @opcode 0xb7
5830 */
5831FNIEMOP_DEF(iemOp_BH_Ib)
5832{
5833 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5834 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5835}
5836
5837
5838/**
5839 * Common 'mov regX,immX' helper.
5840 */
5841FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5842{
5843 switch (pVCpu->iem.s.enmEffOpSize)
5844 {
5845 case IEMMODE_16BIT:
5846 {
5847 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5849
5850 IEM_MC_BEGIN(0, 1);
5851 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5852 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5853 IEM_MC_ADVANCE_RIP();
5854 IEM_MC_END();
5855 break;
5856 }
5857
5858 case IEMMODE_32BIT:
5859 {
5860 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5862
5863 IEM_MC_BEGIN(0, 1);
5864 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5865 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5866 IEM_MC_ADVANCE_RIP();
5867 IEM_MC_END();
5868 break;
5869 }
5870 case IEMMODE_64BIT:
5871 {
5872 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5874
5875 IEM_MC_BEGIN(0, 1);
5876 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5877 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5878 IEM_MC_ADVANCE_RIP();
5879 IEM_MC_END();
5880 break;
5881 }
5882 }
5883
5884 return VINF_SUCCESS;
5885}
5886
5887
5888/**
5889 * @opcode 0xb8
5890 */
5891FNIEMOP_DEF(iemOp_eAX_Iv)
5892{
5893 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5894 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5895}
5896
5897
5898/**
5899 * @opcode 0xb9
5900 */
5901FNIEMOP_DEF(iemOp_eCX_Iv)
5902{
5903 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5904 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5905}
5906
5907
5908/**
5909 * @opcode 0xba
5910 */
5911FNIEMOP_DEF(iemOp_eDX_Iv)
5912{
5913 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5914 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5915}
5916
5917
5918/**
5919 * @opcode 0xbb
5920 */
5921FNIEMOP_DEF(iemOp_eBX_Iv)
5922{
5923 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5924 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5925}
5926
5927
5928/**
5929 * @opcode 0xbc
5930 */
5931FNIEMOP_DEF(iemOp_eSP_Iv)
5932{
5933 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5934 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5935}
5936
5937
5938/**
5939 * @opcode 0xbd
5940 */
5941FNIEMOP_DEF(iemOp_eBP_Iv)
5942{
5943 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5944 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5945}
5946
5947
5948/**
5949 * @opcode 0xbe
5950 */
5951FNIEMOP_DEF(iemOp_eSI_Iv)
5952{
5953 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5954 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5955}
5956
5957
5958/**
5959 * @opcode 0xbf
5960 */
5961FNIEMOP_DEF(iemOp_eDI_Iv)
5962{
5963 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5964 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5965}
5966
5967
5968/**
5969 * @opcode 0xc0
5970 */
5971FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5972{
5973 IEMOP_HLP_MIN_186();
5974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5975 PCIEMOPSHIFTSIZES pImpl;
5976 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5977 {
5978 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5979 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5980 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5981 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5982 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5983 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5984 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5985 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5986 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5987 }
5988 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5989
5990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5991 {
5992 /* register */
5993 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5995 IEM_MC_BEGIN(3, 0);
5996 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5997 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5999 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6000 IEM_MC_REF_EFLAGS(pEFlags);
6001 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6002 IEM_MC_ADVANCE_RIP();
6003 IEM_MC_END();
6004 }
6005 else
6006 {
6007 /* memory */
6008 IEM_MC_BEGIN(3, 2);
6009 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6010 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6011 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6013
6014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6015 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6016 IEM_MC_ASSIGN(cShiftArg, cShift);
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6019 IEM_MC_FETCH_EFLAGS(EFlags);
6020 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6021
6022 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6023 IEM_MC_COMMIT_EFLAGS(EFlags);
6024 IEM_MC_ADVANCE_RIP();
6025 IEM_MC_END();
6026 }
6027 return VINF_SUCCESS;
6028}
6029
6030
6031/**
6032 * @opcode 0xc1
6033 */
6034FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6035{
6036 IEMOP_HLP_MIN_186();
6037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6038 PCIEMOPSHIFTSIZES pImpl;
6039 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6040 {
6041 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6042 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6043 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6044 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6045 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6046 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6047 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6048 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6049 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6050 }
6051 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6052
6053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6054 {
6055 /* register */
6056 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6058 switch (pVCpu->iem.s.enmEffOpSize)
6059 {
6060 case IEMMODE_16BIT:
6061 IEM_MC_BEGIN(3, 0);
6062 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6063 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6064 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6065 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6066 IEM_MC_REF_EFLAGS(pEFlags);
6067 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6068 IEM_MC_ADVANCE_RIP();
6069 IEM_MC_END();
6070 return VINF_SUCCESS;
6071
6072 case IEMMODE_32BIT:
6073 IEM_MC_BEGIN(3, 0);
6074 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6075 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6076 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6077 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6078 IEM_MC_REF_EFLAGS(pEFlags);
6079 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6080 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6081 IEM_MC_ADVANCE_RIP();
6082 IEM_MC_END();
6083 return VINF_SUCCESS;
6084
6085 case IEMMODE_64BIT:
6086 IEM_MC_BEGIN(3, 0);
6087 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6088 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6090 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6091 IEM_MC_REF_EFLAGS(pEFlags);
6092 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6093 IEM_MC_ADVANCE_RIP();
6094 IEM_MC_END();
6095 return VINF_SUCCESS;
6096
6097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6098 }
6099 }
6100 else
6101 {
6102 /* memory */
6103 switch (pVCpu->iem.s.enmEffOpSize)
6104 {
6105 case IEMMODE_16BIT:
6106 IEM_MC_BEGIN(3, 2);
6107 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6108 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6109 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6111
6112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6113 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6114 IEM_MC_ASSIGN(cShiftArg, cShift);
6115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6116 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6117 IEM_MC_FETCH_EFLAGS(EFlags);
6118 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6119
6120 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6121 IEM_MC_COMMIT_EFLAGS(EFlags);
6122 IEM_MC_ADVANCE_RIP();
6123 IEM_MC_END();
6124 return VINF_SUCCESS;
6125
6126 case IEMMODE_32BIT:
6127 IEM_MC_BEGIN(3, 2);
6128 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6129 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6130 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6132
6133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6134 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6135 IEM_MC_ASSIGN(cShiftArg, cShift);
6136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6137 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6138 IEM_MC_FETCH_EFLAGS(EFlags);
6139 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6140
6141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6142 IEM_MC_COMMIT_EFLAGS(EFlags);
6143 IEM_MC_ADVANCE_RIP();
6144 IEM_MC_END();
6145 return VINF_SUCCESS;
6146
6147 case IEMMODE_64BIT:
6148 IEM_MC_BEGIN(3, 2);
6149 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6150 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6151 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6153
6154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6155 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6156 IEM_MC_ASSIGN(cShiftArg, cShift);
6157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6158 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6159 IEM_MC_FETCH_EFLAGS(EFlags);
6160 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6161
6162 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6163 IEM_MC_COMMIT_EFLAGS(EFlags);
6164 IEM_MC_ADVANCE_RIP();
6165 IEM_MC_END();
6166 return VINF_SUCCESS;
6167
6168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6169 }
6170 }
6171}
6172
6173
6174/**
6175 * @opcode 0xc2
6176 */
6177FNIEMOP_DEF(iemOp_retn_Iw)
6178{
6179 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6180 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6182 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6183 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6184}
6185
6186
6187/**
6188 * @opcode 0xc3
6189 */
6190FNIEMOP_DEF(iemOp_retn)
6191{
6192 IEMOP_MNEMONIC(retn, "retn");
6193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6195 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6196}
6197
6198
6199/**
6200 * @opcode 0xc4
6201 */
6202FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6203{
6204 /* The LDS instruction is invalid 64-bit mode. In legacy and
6205 compatability mode it is invalid with MOD=3.
6206 The use as a VEX prefix is made possible by assigning the inverted
6207 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6208 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6210 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6211 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT) )
6212 {
6213 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6214 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6215 {
6216 /* Note! The real mode, v8086 mode and invalid prefix checks are
6217 done once the instruction is fully decoded. */
6218 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6219 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6220 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6221 if (bVex2 & 0x80 /* VEX.W */)
6222 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6223 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6224 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
6225 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
6226 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6227 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6228 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6229
6230 switch (bRm & 0x1f)
6231 {
6232 case 1: /* 0x0f lead opcode byte. */
6233#ifdef IEM_WITH_VEX
6234 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6235#else
6236 IEMOP_BITCH_ABOUT_STUB();
6237 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6238#endif
6239
6240 case 2: /* 0x0f 0x38 lead opcode bytes. */
6241#ifdef IEM_WITH_VEX
6242 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6243#else
6244 IEMOP_BITCH_ABOUT_STUB();
6245 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6246#endif
6247
6248 case 3: /* 0x0f 0x3a lead opcode bytes. */
6249#ifdef IEM_WITH_VEX
6250 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6251#else
6252 IEMOP_BITCH_ABOUT_STUB();
6253 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6254#endif
6255
6256 default:
6257 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6258 return IEMOP_RAISE_INVALID_OPCODE();
6259 }
6260 }
6261 Log(("VEX3: AVX support disabled!\n"));
6262 return IEMOP_RAISE_INVALID_OPCODE();
6263 }
6264
6265 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6266 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6267}
6268
6269
6270/**
6271 * @opcode 0xc5
6272 */
6273FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6274{
6275 /* The LES instruction is invalid 64-bit mode. In legacy and
6276 compatability mode it is invalid with MOD=3.
6277 The use as a VEX prefix is made possible by assigning the inverted
6278 REX.R to the top MOD bit, and the top bit in the inverted register
6279 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6280 to accessing registers 0..7 in this VEX form. */
6281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6282 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6283 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6284 {
6285 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6286 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6287 {
6288 /* Note! The real mode, v8086 mode and invalid prefix checks are
6289 done once the instruction is fully decoded. */
6290 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6291 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6292 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6293 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6294 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6295 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6296
6297#ifdef IEM_WITH_VEX
6298 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6299#else
6300 IEMOP_BITCH_ABOUT_STUB();
6301 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6302#endif
6303 }
6304
6305 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6306 Log(("VEX2: AVX support disabled!\n"));
6307 return IEMOP_RAISE_INVALID_OPCODE();
6308 }
6309
6310 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6311 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6312}
6313
6314
6315/**
6316 * @opcode 0xc6
6317 */
6318FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6319{
6320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6321 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6322 return IEMOP_RAISE_INVALID_OPCODE();
6323 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6324
6325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6326 {
6327 /* register access */
6328 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6330 IEM_MC_BEGIN(0, 0);
6331 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6332 IEM_MC_ADVANCE_RIP();
6333 IEM_MC_END();
6334 }
6335 else
6336 {
6337 /* memory access. */
6338 IEM_MC_BEGIN(0, 1);
6339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6341 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6343 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6344 IEM_MC_ADVANCE_RIP();
6345 IEM_MC_END();
6346 }
6347 return VINF_SUCCESS;
6348}
6349
6350
6351/**
6352 * @opcode 0xc7
6353 */
6354FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6355{
6356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6357 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6358 return IEMOP_RAISE_INVALID_OPCODE();
6359 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6360
6361 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6362 {
6363 /* register access */
6364 switch (pVCpu->iem.s.enmEffOpSize)
6365 {
6366 case IEMMODE_16BIT:
6367 IEM_MC_BEGIN(0, 0);
6368 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6370 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6371 IEM_MC_ADVANCE_RIP();
6372 IEM_MC_END();
6373 return VINF_SUCCESS;
6374
6375 case IEMMODE_32BIT:
6376 IEM_MC_BEGIN(0, 0);
6377 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6379 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6380 IEM_MC_ADVANCE_RIP();
6381 IEM_MC_END();
6382 return VINF_SUCCESS;
6383
6384 case IEMMODE_64BIT:
6385 IEM_MC_BEGIN(0, 0);
6386 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6388 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6389 IEM_MC_ADVANCE_RIP();
6390 IEM_MC_END();
6391 return VINF_SUCCESS;
6392
6393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6394 }
6395 }
6396 else
6397 {
6398 /* memory access. */
6399 switch (pVCpu->iem.s.enmEffOpSize)
6400 {
6401 case IEMMODE_16BIT:
6402 IEM_MC_BEGIN(0, 1);
6403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6405 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6407 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6408 IEM_MC_ADVANCE_RIP();
6409 IEM_MC_END();
6410 return VINF_SUCCESS;
6411
6412 case IEMMODE_32BIT:
6413 IEM_MC_BEGIN(0, 1);
6414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6416 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6418 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6419 IEM_MC_ADVANCE_RIP();
6420 IEM_MC_END();
6421 return VINF_SUCCESS;
6422
6423 case IEMMODE_64BIT:
6424 IEM_MC_BEGIN(0, 1);
6425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6427 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6429 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6430 IEM_MC_ADVANCE_RIP();
6431 IEM_MC_END();
6432 return VINF_SUCCESS;
6433
6434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6435 }
6436 }
6437}
6438
6439
6440
6441
6442/**
6443 * @opcode 0xc8
6444 */
6445FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6446{
6447 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6448 IEMOP_HLP_MIN_186();
6449 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6450 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6451 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6453 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6454}
6455
6456
6457/**
6458 * @opcode 0xc9
6459 */
6460FNIEMOP_DEF(iemOp_leave)
6461{
6462 IEMOP_MNEMONIC(leave, "leave");
6463 IEMOP_HLP_MIN_186();
6464 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6466 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6467}
6468
6469
6470/**
6471 * @opcode 0xca
6472 */
6473FNIEMOP_DEF(iemOp_retf_Iw)
6474{
6475 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6476 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6478 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6479 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6480}
6481
6482
6483/**
6484 * @opcode 0xcb
6485 */
6486FNIEMOP_DEF(iemOp_retf)
6487{
6488 IEMOP_MNEMONIC(retf, "retf");
6489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6490 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6491 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6492}
6493
6494
6495/**
6496 * @opcode 0xcc
6497 */
6498FNIEMOP_DEF(iemOp_int3)
6499{
6500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6501 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6502}
6503
6504
6505/**
6506 * @opcode 0xcd
6507 */
6508FNIEMOP_DEF(iemOp_int_Ib)
6509{
6510 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6513}
6514
6515
6516/**
6517 * @opcode 0xce
6518 */
6519FNIEMOP_DEF(iemOp_into)
6520{
6521 IEMOP_MNEMONIC(into, "into");
6522 IEMOP_HLP_NO_64BIT();
6523
6524 IEM_MC_BEGIN(2, 0);
6525 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6526 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6527 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6528 IEM_MC_END();
6529 return VINF_SUCCESS;
6530}
6531
6532
6533/**
6534 * @opcode 0xcf
6535 */
6536FNIEMOP_DEF(iemOp_iret)
6537{
6538 IEMOP_MNEMONIC(iret, "iret");
6539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6540 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6541}
6542
6543
6544/**
6545 * @opcode 0xd0
6546 */
6547FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6548{
6549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6550 PCIEMOPSHIFTSIZES pImpl;
6551 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6552 {
6553 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6554 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6555 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6556 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6557 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6558 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6559 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6560 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6561 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6562 }
6563 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6564
6565 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6566 {
6567 /* register */
6568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6569 IEM_MC_BEGIN(3, 0);
6570 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6571 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6572 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6573 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6574 IEM_MC_REF_EFLAGS(pEFlags);
6575 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6576 IEM_MC_ADVANCE_RIP();
6577 IEM_MC_END();
6578 }
6579 else
6580 {
6581 /* memory */
6582 IEM_MC_BEGIN(3, 2);
6583 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6584 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6585 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6587
6588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6590 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6591 IEM_MC_FETCH_EFLAGS(EFlags);
6592 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6593
6594 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6595 IEM_MC_COMMIT_EFLAGS(EFlags);
6596 IEM_MC_ADVANCE_RIP();
6597 IEM_MC_END();
6598 }
6599 return VINF_SUCCESS;
6600}
6601
6602
6603
6604/**
6605 * @opcode 0xd1
6606 */
6607FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6608{
6609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6610 PCIEMOPSHIFTSIZES pImpl;
6611 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6612 {
6613 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6614 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6615 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6616 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6617 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6618 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6619 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6620 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6621 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6622 }
6623 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6624
6625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6626 {
6627 /* register */
6628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6629 switch (pVCpu->iem.s.enmEffOpSize)
6630 {
6631 case IEMMODE_16BIT:
6632 IEM_MC_BEGIN(3, 0);
6633 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6634 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6635 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6636 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6637 IEM_MC_REF_EFLAGS(pEFlags);
6638 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6639 IEM_MC_ADVANCE_RIP();
6640 IEM_MC_END();
6641 return VINF_SUCCESS;
6642
6643 case IEMMODE_32BIT:
6644 IEM_MC_BEGIN(3, 0);
6645 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6646 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6647 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6648 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6649 IEM_MC_REF_EFLAGS(pEFlags);
6650 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6651 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6652 IEM_MC_ADVANCE_RIP();
6653 IEM_MC_END();
6654 return VINF_SUCCESS;
6655
6656 case IEMMODE_64BIT:
6657 IEM_MC_BEGIN(3, 0);
6658 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6659 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6660 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6661 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6662 IEM_MC_REF_EFLAGS(pEFlags);
6663 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6664 IEM_MC_ADVANCE_RIP();
6665 IEM_MC_END();
6666 return VINF_SUCCESS;
6667
6668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6669 }
6670 }
6671 else
6672 {
6673 /* memory */
6674 switch (pVCpu->iem.s.enmEffOpSize)
6675 {
6676 case IEMMODE_16BIT:
6677 IEM_MC_BEGIN(3, 2);
6678 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6679 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6680 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6682
6683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6685 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6686 IEM_MC_FETCH_EFLAGS(EFlags);
6687 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6688
6689 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6690 IEM_MC_COMMIT_EFLAGS(EFlags);
6691 IEM_MC_ADVANCE_RIP();
6692 IEM_MC_END();
6693 return VINF_SUCCESS;
6694
6695 case IEMMODE_32BIT:
6696 IEM_MC_BEGIN(3, 2);
6697 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6698 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6699 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6701
6702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6704 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6705 IEM_MC_FETCH_EFLAGS(EFlags);
6706 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6707
6708 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6709 IEM_MC_COMMIT_EFLAGS(EFlags);
6710 IEM_MC_ADVANCE_RIP();
6711 IEM_MC_END();
6712 return VINF_SUCCESS;
6713
6714 case IEMMODE_64BIT:
6715 IEM_MC_BEGIN(3, 2);
6716 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6717 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6718 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6720
6721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6723 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6724 IEM_MC_FETCH_EFLAGS(EFlags);
6725 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6726
6727 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6728 IEM_MC_COMMIT_EFLAGS(EFlags);
6729 IEM_MC_ADVANCE_RIP();
6730 IEM_MC_END();
6731 return VINF_SUCCESS;
6732
6733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6734 }
6735 }
6736}
6737
6738
6739/**
6740 * @opcode 0xd2
6741 */
6742FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6743{
6744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6745 PCIEMOPSHIFTSIZES pImpl;
6746 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6747 {
6748 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6749 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6750 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6751 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6752 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6753 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6754 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6755 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6756 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6757 }
6758 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6759
6760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6761 {
6762 /* register */
6763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6764 IEM_MC_BEGIN(3, 0);
6765 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6766 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6767 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6768 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6769 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6770 IEM_MC_REF_EFLAGS(pEFlags);
6771 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6772 IEM_MC_ADVANCE_RIP();
6773 IEM_MC_END();
6774 }
6775 else
6776 {
6777 /* memory */
6778 IEM_MC_BEGIN(3, 2);
6779 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6780 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6781 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6783
6784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6786 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6787 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6788 IEM_MC_FETCH_EFLAGS(EFlags);
6789 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6790
6791 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6792 IEM_MC_COMMIT_EFLAGS(EFlags);
6793 IEM_MC_ADVANCE_RIP();
6794 IEM_MC_END();
6795 }
6796 return VINF_SUCCESS;
6797}
6798
6799
6800/**
6801 * @opcode 0xd3
6802 */
6803FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6804{
6805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6806 PCIEMOPSHIFTSIZES pImpl;
6807 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6808 {
6809 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6810 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6811 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6812 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6813 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6814 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6815 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6816 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6817 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6818 }
6819 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6820
6821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6822 {
6823 /* register */
6824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6825 switch (pVCpu->iem.s.enmEffOpSize)
6826 {
6827 case IEMMODE_16BIT:
6828 IEM_MC_BEGIN(3, 0);
6829 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6830 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6831 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6832 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6833 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6834 IEM_MC_REF_EFLAGS(pEFlags);
6835 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6836 IEM_MC_ADVANCE_RIP();
6837 IEM_MC_END();
6838 return VINF_SUCCESS;
6839
6840 case IEMMODE_32BIT:
6841 IEM_MC_BEGIN(3, 0);
6842 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6843 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6844 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6845 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6846 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6847 IEM_MC_REF_EFLAGS(pEFlags);
6848 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6849 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6850 IEM_MC_ADVANCE_RIP();
6851 IEM_MC_END();
6852 return VINF_SUCCESS;
6853
6854 case IEMMODE_64BIT:
6855 IEM_MC_BEGIN(3, 0);
6856 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6857 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6858 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6859 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6860 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6861 IEM_MC_REF_EFLAGS(pEFlags);
6862 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6863 IEM_MC_ADVANCE_RIP();
6864 IEM_MC_END();
6865 return VINF_SUCCESS;
6866
6867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6868 }
6869 }
6870 else
6871 {
6872 /* memory */
6873 switch (pVCpu->iem.s.enmEffOpSize)
6874 {
6875 case IEMMODE_16BIT:
6876 IEM_MC_BEGIN(3, 2);
6877 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6878 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6879 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6881
6882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6884 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6885 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6886 IEM_MC_FETCH_EFLAGS(EFlags);
6887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6888
6889 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6890 IEM_MC_COMMIT_EFLAGS(EFlags);
6891 IEM_MC_ADVANCE_RIP();
6892 IEM_MC_END();
6893 return VINF_SUCCESS;
6894
6895 case IEMMODE_32BIT:
6896 IEM_MC_BEGIN(3, 2);
6897 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6898 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6899 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6901
6902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6904 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6905 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6906 IEM_MC_FETCH_EFLAGS(EFlags);
6907 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6908
6909 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6910 IEM_MC_COMMIT_EFLAGS(EFlags);
6911 IEM_MC_ADVANCE_RIP();
6912 IEM_MC_END();
6913 return VINF_SUCCESS;
6914
6915 case IEMMODE_64BIT:
6916 IEM_MC_BEGIN(3, 2);
6917 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6918 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6919 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6921
6922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6924 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6925 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6926 IEM_MC_FETCH_EFLAGS(EFlags);
6927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6928
6929 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6930 IEM_MC_COMMIT_EFLAGS(EFlags);
6931 IEM_MC_ADVANCE_RIP();
6932 IEM_MC_END();
6933 return VINF_SUCCESS;
6934
6935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6936 }
6937 }
6938}
6939
6940/**
6941 * @opcode 0xd4
6942 */
6943FNIEMOP_DEF(iemOp_aam_Ib)
6944{
6945 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6946 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6948 IEMOP_HLP_NO_64BIT();
6949 if (!bImm)
6950 return IEMOP_RAISE_DIVIDE_ERROR();
6951 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6952}
6953
6954
6955/**
6956 * @opcode 0xd5
6957 */
6958FNIEMOP_DEF(iemOp_aad_Ib)
6959{
6960 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6961 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6963 IEMOP_HLP_NO_64BIT();
6964 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6965}
6966
6967
6968/**
6969 * @opcode 0xd6
6970 */
6971FNIEMOP_DEF(iemOp_salc)
6972{
6973 IEMOP_MNEMONIC(salc, "salc");
6974 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6975 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6977 IEMOP_HLP_NO_64BIT();
6978
6979 IEM_MC_BEGIN(0, 0);
6980 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6981 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6982 } IEM_MC_ELSE() {
6983 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6984 } IEM_MC_ENDIF();
6985 IEM_MC_ADVANCE_RIP();
6986 IEM_MC_END();
6987 return VINF_SUCCESS;
6988}
6989
6990
6991/**
6992 * @opcode 0xd7
6993 */
6994FNIEMOP_DEF(iemOp_xlat)
6995{
6996 IEMOP_MNEMONIC(xlat, "xlat");
6997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6998 switch (pVCpu->iem.s.enmEffAddrMode)
6999 {
7000 case IEMMODE_16BIT:
7001 IEM_MC_BEGIN(2, 0);
7002 IEM_MC_LOCAL(uint8_t, u8Tmp);
7003 IEM_MC_LOCAL(uint16_t, u16Addr);
7004 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7005 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7006 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7007 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7008 IEM_MC_ADVANCE_RIP();
7009 IEM_MC_END();
7010 return VINF_SUCCESS;
7011
7012 case IEMMODE_32BIT:
7013 IEM_MC_BEGIN(2, 0);
7014 IEM_MC_LOCAL(uint8_t, u8Tmp);
7015 IEM_MC_LOCAL(uint32_t, u32Addr);
7016 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7017 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7018 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7019 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7020 IEM_MC_ADVANCE_RIP();
7021 IEM_MC_END();
7022 return VINF_SUCCESS;
7023
7024 case IEMMODE_64BIT:
7025 IEM_MC_BEGIN(2, 0);
7026 IEM_MC_LOCAL(uint8_t, u8Tmp);
7027 IEM_MC_LOCAL(uint64_t, u64Addr);
7028 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7029 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7030 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7031 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7032 IEM_MC_ADVANCE_RIP();
7033 IEM_MC_END();
7034 return VINF_SUCCESS;
7035
7036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7037 }
7038}
7039
7040
7041/**
7042 * Common worker for FPU instructions working on ST0 and STn, and storing the
7043 * result in ST0.
7044 *
7045 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7046 */
7047FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7048{
7049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7050
7051 IEM_MC_BEGIN(3, 1);
7052 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7053 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7054 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7055 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7056
7057 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7058 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7059 IEM_MC_PREPARE_FPU_USAGE();
7060 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7061 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7062 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7063 IEM_MC_ELSE()
7064 IEM_MC_FPU_STACK_UNDERFLOW(0);
7065 IEM_MC_ENDIF();
7066 IEM_MC_ADVANCE_RIP();
7067
7068 IEM_MC_END();
7069 return VINF_SUCCESS;
7070}
7071
7072
7073/**
7074 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7075 * flags.
7076 *
7077 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7078 */
7079FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7080{
7081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7082
7083 IEM_MC_BEGIN(3, 1);
7084 IEM_MC_LOCAL(uint16_t, u16Fsw);
7085 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7086 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7087 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7088
7089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7090 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7091 IEM_MC_PREPARE_FPU_USAGE();
7092 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7093 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7094 IEM_MC_UPDATE_FSW(u16Fsw);
7095 IEM_MC_ELSE()
7096 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7097 IEM_MC_ENDIF();
7098 IEM_MC_ADVANCE_RIP();
7099
7100 IEM_MC_END();
7101 return VINF_SUCCESS;
7102}
7103
7104
7105/**
7106 * Common worker for FPU instructions working on ST0 and STn, only affecting
7107 * flags, and popping when done.
7108 *
7109 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7110 */
7111FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7112{
7113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7114
7115 IEM_MC_BEGIN(3, 1);
7116 IEM_MC_LOCAL(uint16_t, u16Fsw);
7117 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7118 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7119 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7120
7121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7123 IEM_MC_PREPARE_FPU_USAGE();
7124 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7125 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7126 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7127 IEM_MC_ELSE()
7128 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7129 IEM_MC_ENDIF();
7130 IEM_MC_ADVANCE_RIP();
7131
7132 IEM_MC_END();
7133 return VINF_SUCCESS;
7134}
7135
7136
7137/** Opcode 0xd8 11/0. */
7138FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7139{
7140 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7141 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7142}
7143
7144
7145/** Opcode 0xd8 11/1. */
7146FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7147{
7148 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7149 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7150}
7151
7152
7153/** Opcode 0xd8 11/2. */
7154FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7155{
7156 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7157 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7158}
7159
7160
7161/** Opcode 0xd8 11/3. */
7162FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7163{
7164 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7165 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7166}
7167
7168
7169/** Opcode 0xd8 11/4. */
7170FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7171{
7172 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7173 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7174}
7175
7176
7177/** Opcode 0xd8 11/5. */
7178FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7179{
7180 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7181 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7182}
7183
7184
7185/** Opcode 0xd8 11/6. */
7186FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7187{
7188 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7189 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7190}
7191
7192
7193/** Opcode 0xd8 11/7. */
7194FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7195{
7196 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7197 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7198}
7199
7200
7201/**
7202 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7203 * the result in ST0.
7204 *
7205 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7206 */
7207FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7208{
7209 IEM_MC_BEGIN(3, 3);
7210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7211 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7212 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7213 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7214 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7215 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7216
7217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7219
7220 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7221 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7222 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7223
7224 IEM_MC_PREPARE_FPU_USAGE();
7225 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7226 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7227 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7228 IEM_MC_ELSE()
7229 IEM_MC_FPU_STACK_UNDERFLOW(0);
7230 IEM_MC_ENDIF();
7231 IEM_MC_ADVANCE_RIP();
7232
7233 IEM_MC_END();
7234 return VINF_SUCCESS;
7235}
7236
7237
7238/** Opcode 0xd8 !11/0. */
7239FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7240{
7241 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7242 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7243}
7244
7245
7246/** Opcode 0xd8 !11/1. */
7247FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7248{
7249 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7250 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7251}
7252
7253
7254/** Opcode 0xd8 !11/2. */
7255FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7256{
7257 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7258
7259 IEM_MC_BEGIN(3, 3);
7260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7261 IEM_MC_LOCAL(uint16_t, u16Fsw);
7262 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7263 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7264 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7265 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7266
7267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7269
7270 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7271 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7272 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7273
7274 IEM_MC_PREPARE_FPU_USAGE();
7275 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7276 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7277 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7278 IEM_MC_ELSE()
7279 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7280 IEM_MC_ENDIF();
7281 IEM_MC_ADVANCE_RIP();
7282
7283 IEM_MC_END();
7284 return VINF_SUCCESS;
7285}
7286
7287
7288/** Opcode 0xd8 !11/3. */
7289FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7290{
7291 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7292
7293 IEM_MC_BEGIN(3, 3);
7294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7295 IEM_MC_LOCAL(uint16_t, u16Fsw);
7296 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7297 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7298 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7299 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7300
7301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7303
7304 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7305 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7306 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7307
7308 IEM_MC_PREPARE_FPU_USAGE();
7309 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7310 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7311 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7312 IEM_MC_ELSE()
7313 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7314 IEM_MC_ENDIF();
7315 IEM_MC_ADVANCE_RIP();
7316
7317 IEM_MC_END();
7318 return VINF_SUCCESS;
7319}
7320
7321
7322/** Opcode 0xd8 !11/4. */
7323FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7324{
7325 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7326 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7327}
7328
7329
7330/** Opcode 0xd8 !11/5. */
7331FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7332{
7333 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7334 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7335}
7336
7337
7338/** Opcode 0xd8 !11/6. */
7339FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7340{
7341 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7342 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7343}
7344
7345
7346/** Opcode 0xd8 !11/7. */
7347FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7348{
7349 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7350 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7351}
7352
7353
7354/**
7355 * @opcode 0xd8
7356 */
7357FNIEMOP_DEF(iemOp_EscF0)
7358{
7359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7360 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7361
7362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7363 {
7364 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7365 {
7366 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7367 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7368 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7369 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7370 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7371 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7372 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7373 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7375 }
7376 }
7377 else
7378 {
7379 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7380 {
7381 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7382 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7383 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7384 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7385 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7386 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7387 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7388 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7390 }
7391 }
7392}
7393
7394
7395/** Opcode 0xd9 /0 mem32real
7396 * @sa iemOp_fld_m64r */
7397FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7398{
7399 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7400
7401 IEM_MC_BEGIN(2, 3);
7402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7403 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7404 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7405 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7406 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7407
7408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7410
7411 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7412 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7413 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7414
7415 IEM_MC_PREPARE_FPU_USAGE();
7416 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7417 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7418 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7419 IEM_MC_ELSE()
7420 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7421 IEM_MC_ENDIF();
7422 IEM_MC_ADVANCE_RIP();
7423
7424 IEM_MC_END();
7425 return VINF_SUCCESS;
7426}
7427
7428
7429/** Opcode 0xd9 !11/2 mem32real */
7430FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7431{
7432 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7433 IEM_MC_BEGIN(3, 2);
7434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7435 IEM_MC_LOCAL(uint16_t, u16Fsw);
7436 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7437 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7438 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7439
7440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7442 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7443 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7444
7445 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7446 IEM_MC_PREPARE_FPU_USAGE();
7447 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7448 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7449 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7450 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7451 IEM_MC_ELSE()
7452 IEM_MC_IF_FCW_IM()
7453 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7454 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7455 IEM_MC_ENDIF();
7456 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7457 IEM_MC_ENDIF();
7458 IEM_MC_ADVANCE_RIP();
7459
7460 IEM_MC_END();
7461 return VINF_SUCCESS;
7462}
7463
7464
7465/** Opcode 0xd9 !11/3 */
7466FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7467{
7468 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7469 IEM_MC_BEGIN(3, 2);
7470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7471 IEM_MC_LOCAL(uint16_t, u16Fsw);
7472 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7473 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7474 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7475
7476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7480
7481 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7482 IEM_MC_PREPARE_FPU_USAGE();
7483 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7484 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7485 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7486 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7487 IEM_MC_ELSE()
7488 IEM_MC_IF_FCW_IM()
7489 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7490 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7491 IEM_MC_ENDIF();
7492 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7493 IEM_MC_ENDIF();
7494 IEM_MC_ADVANCE_RIP();
7495
7496 IEM_MC_END();
7497 return VINF_SUCCESS;
7498}
7499
7500
7501/** Opcode 0xd9 !11/4 */
7502FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7503{
7504 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7505 IEM_MC_BEGIN(3, 0);
7506 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7507 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7508 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7512 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7513 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7514 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7515 IEM_MC_END();
7516 return VINF_SUCCESS;
7517}
7518
7519
7520/** Opcode 0xd9 !11/5 */
7521FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7522{
7523 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7524 IEM_MC_BEGIN(1, 1);
7525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7526 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7529 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7530 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7531 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7532 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7533 IEM_MC_END();
7534 return VINF_SUCCESS;
7535}
7536
7537
7538/** Opcode 0xd9 !11/6 */
7539FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7540{
7541 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7542 IEM_MC_BEGIN(3, 0);
7543 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7544 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7545 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7548 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7549 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7550 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7551 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7552 IEM_MC_END();
7553 return VINF_SUCCESS;
7554}
7555
7556
7557/** Opcode 0xd9 !11/7 */
7558FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7559{
7560 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7561 IEM_MC_BEGIN(2, 0);
7562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7563 IEM_MC_LOCAL(uint16_t, u16Fcw);
7564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7566 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7567 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7568 IEM_MC_FETCH_FCW(u16Fcw);
7569 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7570 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7571 IEM_MC_END();
7572 return VINF_SUCCESS;
7573}
7574
7575
7576/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7577FNIEMOP_DEF(iemOp_fnop)
7578{
7579 IEMOP_MNEMONIC(fnop, "fnop");
7580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7581
7582 IEM_MC_BEGIN(0, 0);
7583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7584 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7585 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7586 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7587 * intel optimizations. Investigate. */
7588 IEM_MC_UPDATE_FPU_OPCODE_IP();
7589 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7590 IEM_MC_END();
7591 return VINF_SUCCESS;
7592}
7593
7594
7595/** Opcode 0xd9 11/0 stN */
7596FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7597{
7598 IEMOP_MNEMONIC(fld_stN, "fld stN");
7599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7600
7601 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7602 * indicates that it does. */
7603 IEM_MC_BEGIN(0, 2);
7604 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7605 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7606 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7607 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7608
7609 IEM_MC_PREPARE_FPU_USAGE();
7610 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7611 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7612 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7613 IEM_MC_ELSE()
7614 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7615 IEM_MC_ENDIF();
7616
7617 IEM_MC_ADVANCE_RIP();
7618 IEM_MC_END();
7619
7620 return VINF_SUCCESS;
7621}
7622
7623
7624/** Opcode 0xd9 11/3 stN */
7625FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7626{
7627 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7629
7630 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7631 * indicates that it does. */
7632 IEM_MC_BEGIN(1, 3);
7633 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7634 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7635 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7636 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7637 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7638 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7639
7640 IEM_MC_PREPARE_FPU_USAGE();
7641 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7642 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7643 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7644 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7645 IEM_MC_ELSE()
7646 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7647 IEM_MC_ENDIF();
7648
7649 IEM_MC_ADVANCE_RIP();
7650 IEM_MC_END();
7651
7652 return VINF_SUCCESS;
7653}
7654
7655
7656/** Opcode 0xd9 11/4, 0xdd 11/2. */
7657FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7658{
7659 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7661
7662 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7663 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7664 if (!iDstReg)
7665 {
7666 IEM_MC_BEGIN(0, 1);
7667 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7670
7671 IEM_MC_PREPARE_FPU_USAGE();
7672 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7673 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7674 IEM_MC_ELSE()
7675 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7676 IEM_MC_ENDIF();
7677
7678 IEM_MC_ADVANCE_RIP();
7679 IEM_MC_END();
7680 }
7681 else
7682 {
7683 IEM_MC_BEGIN(0, 2);
7684 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7685 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7687 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7688
7689 IEM_MC_PREPARE_FPU_USAGE();
7690 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7691 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7692 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7693 IEM_MC_ELSE()
7694 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7695 IEM_MC_ENDIF();
7696
7697 IEM_MC_ADVANCE_RIP();
7698 IEM_MC_END();
7699 }
7700 return VINF_SUCCESS;
7701}
7702
7703
7704/**
7705 * Common worker for FPU instructions working on ST0 and replaces it with the
7706 * result, i.e. unary operators.
7707 *
7708 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7709 */
7710FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7711{
7712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7713
7714 IEM_MC_BEGIN(2, 1);
7715 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7716 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7717 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7718
7719 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7720 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7721 IEM_MC_PREPARE_FPU_USAGE();
7722 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7723 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7724 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7725 IEM_MC_ELSE()
7726 IEM_MC_FPU_STACK_UNDERFLOW(0);
7727 IEM_MC_ENDIF();
7728 IEM_MC_ADVANCE_RIP();
7729
7730 IEM_MC_END();
7731 return VINF_SUCCESS;
7732}
7733
7734
7735/** Opcode 0xd9 0xe0. */
7736FNIEMOP_DEF(iemOp_fchs)
7737{
7738 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7739 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7740}
7741
7742
7743/** Opcode 0xd9 0xe1. */
7744FNIEMOP_DEF(iemOp_fabs)
7745{
7746 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7747 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7748}
7749
7750
7751/**
7752 * Common worker for FPU instructions working on ST0 and only returns FSW.
7753 *
7754 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7755 */
7756FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7757{
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7759
7760 IEM_MC_BEGIN(2, 1);
7761 IEM_MC_LOCAL(uint16_t, u16Fsw);
7762 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7763 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7764
7765 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7766 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7767 IEM_MC_PREPARE_FPU_USAGE();
7768 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7769 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7770 IEM_MC_UPDATE_FSW(u16Fsw);
7771 IEM_MC_ELSE()
7772 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7773 IEM_MC_ENDIF();
7774 IEM_MC_ADVANCE_RIP();
7775
7776 IEM_MC_END();
7777 return VINF_SUCCESS;
7778}
7779
7780
7781/** Opcode 0xd9 0xe4. */
7782FNIEMOP_DEF(iemOp_ftst)
7783{
7784 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7785 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7786}
7787
7788
7789/** Opcode 0xd9 0xe5. */
7790FNIEMOP_DEF(iemOp_fxam)
7791{
7792 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7793 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7794}
7795
7796
7797/**
7798 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7799 *
7800 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7801 */
7802FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7803{
7804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7805
7806 IEM_MC_BEGIN(1, 1);
7807 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7808 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7809
7810 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7811 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7812 IEM_MC_PREPARE_FPU_USAGE();
7813 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7814 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7815 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7816 IEM_MC_ELSE()
7817 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7818 IEM_MC_ENDIF();
7819 IEM_MC_ADVANCE_RIP();
7820
7821 IEM_MC_END();
7822 return VINF_SUCCESS;
7823}
7824
7825
7826/** Opcode 0xd9 0xe8. */
7827FNIEMOP_DEF(iemOp_fld1)
7828{
7829 IEMOP_MNEMONIC(fld1, "fld1");
7830 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7831}
7832
7833
7834/** Opcode 0xd9 0xe9. */
7835FNIEMOP_DEF(iemOp_fldl2t)
7836{
7837 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7838 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7839}
7840
7841
7842/** Opcode 0xd9 0xea. */
7843FNIEMOP_DEF(iemOp_fldl2e)
7844{
7845 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7846 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7847}
7848
7849/** Opcode 0xd9 0xeb. */
7850FNIEMOP_DEF(iemOp_fldpi)
7851{
7852 IEMOP_MNEMONIC(fldpi, "fldpi");
7853 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7854}
7855
7856
7857/** Opcode 0xd9 0xec. */
7858FNIEMOP_DEF(iemOp_fldlg2)
7859{
7860 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7861 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7862}
7863
7864/** Opcode 0xd9 0xed. */
7865FNIEMOP_DEF(iemOp_fldln2)
7866{
7867 IEMOP_MNEMONIC(fldln2, "fldln2");
7868 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7869}
7870
7871
7872/** Opcode 0xd9 0xee. */
7873FNIEMOP_DEF(iemOp_fldz)
7874{
7875 IEMOP_MNEMONIC(fldz, "fldz");
7876 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7877}
7878
7879
7880/** Opcode 0xd9 0xf0. */
7881FNIEMOP_DEF(iemOp_f2xm1)
7882{
7883 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7884 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7885}
7886
7887
7888/**
7889 * Common worker for FPU instructions working on STn and ST0, storing the result
7890 * in STn, and popping the stack unless IE, DE or ZE was raised.
7891 *
7892 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7893 */
7894FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7895{
7896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7897
7898 IEM_MC_BEGIN(3, 1);
7899 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7900 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7901 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7902 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7903
7904 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7905 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7906
7907 IEM_MC_PREPARE_FPU_USAGE();
7908 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7909 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7910 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7911 IEM_MC_ELSE()
7912 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7913 IEM_MC_ENDIF();
7914 IEM_MC_ADVANCE_RIP();
7915
7916 IEM_MC_END();
7917 return VINF_SUCCESS;
7918}
7919
7920
7921/** Opcode 0xd9 0xf1. */
7922FNIEMOP_DEF(iemOp_fyl2x)
7923{
7924 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7925 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7926}
7927
7928
7929/**
7930 * Common worker for FPU instructions working on ST0 and having two outputs, one
7931 * replacing ST0 and one pushed onto the stack.
7932 *
7933 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7934 */
7935FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7936{
7937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7938
7939 IEM_MC_BEGIN(2, 1);
7940 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7941 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7942 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7943
7944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7945 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7946 IEM_MC_PREPARE_FPU_USAGE();
7947 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7948 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7949 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7950 IEM_MC_ELSE()
7951 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7952 IEM_MC_ENDIF();
7953 IEM_MC_ADVANCE_RIP();
7954
7955 IEM_MC_END();
7956 return VINF_SUCCESS;
7957}
7958
7959
7960/** Opcode 0xd9 0xf2. */
7961FNIEMOP_DEF(iemOp_fptan)
7962{
7963 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7964 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7965}
7966
7967
7968/** Opcode 0xd9 0xf3. */
7969FNIEMOP_DEF(iemOp_fpatan)
7970{
7971 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7972 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7973}
7974
7975
7976/** Opcode 0xd9 0xf4. */
7977FNIEMOP_DEF(iemOp_fxtract)
7978{
7979 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7980 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7981}
7982
7983
7984/** Opcode 0xd9 0xf5. */
7985FNIEMOP_DEF(iemOp_fprem1)
7986{
7987 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7988 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7989}
7990
7991
7992/** Opcode 0xd9 0xf6. */
7993FNIEMOP_DEF(iemOp_fdecstp)
7994{
7995 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7997 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7998 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7999 * FINCSTP and FDECSTP. */
8000
8001 IEM_MC_BEGIN(0,0);
8002
8003 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8004 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8005
8006 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8007 IEM_MC_FPU_STACK_DEC_TOP();
8008 IEM_MC_UPDATE_FSW_CONST(0);
8009
8010 IEM_MC_ADVANCE_RIP();
8011 IEM_MC_END();
8012 return VINF_SUCCESS;
8013}
8014
8015
8016/** Opcode 0xd9 0xf7. */
8017FNIEMOP_DEF(iemOp_fincstp)
8018{
8019 IEMOP_MNEMONIC(fincstp, "fincstp");
8020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8021 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8022 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8023 * FINCSTP and FDECSTP. */
8024
8025 IEM_MC_BEGIN(0,0);
8026
8027 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8028 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8029
8030 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8031 IEM_MC_FPU_STACK_INC_TOP();
8032 IEM_MC_UPDATE_FSW_CONST(0);
8033
8034 IEM_MC_ADVANCE_RIP();
8035 IEM_MC_END();
8036 return VINF_SUCCESS;
8037}
8038
8039
8040/** Opcode 0xd9 0xf8. */
8041FNIEMOP_DEF(iemOp_fprem)
8042{
8043 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8044 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8045}
8046
8047
8048/** Opcode 0xd9 0xf9. */
8049FNIEMOP_DEF(iemOp_fyl2xp1)
8050{
8051 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8052 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8053}
8054
8055
8056/** Opcode 0xd9 0xfa. */
8057FNIEMOP_DEF(iemOp_fsqrt)
8058{
8059 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8060 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8061}
8062
8063
8064/** Opcode 0xd9 0xfb. */
8065FNIEMOP_DEF(iemOp_fsincos)
8066{
8067 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8068 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8069}
8070
8071
8072/** Opcode 0xd9 0xfc. */
8073FNIEMOP_DEF(iemOp_frndint)
8074{
8075 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8076 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8077}
8078
8079
8080/** Opcode 0xd9 0xfd. */
8081FNIEMOP_DEF(iemOp_fscale)
8082{
8083 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8084 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8085}
8086
8087
8088/** Opcode 0xd9 0xfe. */
8089FNIEMOP_DEF(iemOp_fsin)
8090{
8091 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8092 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8093}
8094
8095
8096/** Opcode 0xd9 0xff. */
8097FNIEMOP_DEF(iemOp_fcos)
8098{
8099 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8100 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8101}
8102
8103
8104/** Used by iemOp_EscF1. */
8105IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8106{
8107 /* 0xe0 */ iemOp_fchs,
8108 /* 0xe1 */ iemOp_fabs,
8109 /* 0xe2 */ iemOp_Invalid,
8110 /* 0xe3 */ iemOp_Invalid,
8111 /* 0xe4 */ iemOp_ftst,
8112 /* 0xe5 */ iemOp_fxam,
8113 /* 0xe6 */ iemOp_Invalid,
8114 /* 0xe7 */ iemOp_Invalid,
8115 /* 0xe8 */ iemOp_fld1,
8116 /* 0xe9 */ iemOp_fldl2t,
8117 /* 0xea */ iemOp_fldl2e,
8118 /* 0xeb */ iemOp_fldpi,
8119 /* 0xec */ iemOp_fldlg2,
8120 /* 0xed */ iemOp_fldln2,
8121 /* 0xee */ iemOp_fldz,
8122 /* 0xef */ iemOp_Invalid,
8123 /* 0xf0 */ iemOp_f2xm1,
8124 /* 0xf1 */ iemOp_fyl2x,
8125 /* 0xf2 */ iemOp_fptan,
8126 /* 0xf3 */ iemOp_fpatan,
8127 /* 0xf4 */ iemOp_fxtract,
8128 /* 0xf5 */ iemOp_fprem1,
8129 /* 0xf6 */ iemOp_fdecstp,
8130 /* 0xf7 */ iemOp_fincstp,
8131 /* 0xf8 */ iemOp_fprem,
8132 /* 0xf9 */ iemOp_fyl2xp1,
8133 /* 0xfa */ iemOp_fsqrt,
8134 /* 0xfb */ iemOp_fsincos,
8135 /* 0xfc */ iemOp_frndint,
8136 /* 0xfd */ iemOp_fscale,
8137 /* 0xfe */ iemOp_fsin,
8138 /* 0xff */ iemOp_fcos
8139};
8140
8141
8142/**
8143 * @opcode 0xd9
8144 */
8145FNIEMOP_DEF(iemOp_EscF1)
8146{
8147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8148 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8149
8150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8151 {
8152 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8153 {
8154 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8155 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8156 case 2:
8157 if (bRm == 0xd0)
8158 return FNIEMOP_CALL(iemOp_fnop);
8159 return IEMOP_RAISE_INVALID_OPCODE();
8160 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8161 case 4:
8162 case 5:
8163 case 6:
8164 case 7:
8165 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8166 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8168 }
8169 }
8170 else
8171 {
8172 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8173 {
8174 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8175 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8176 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8177 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8178 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8179 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8180 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8181 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8183 }
8184 }
8185}
8186
8187
8188/** Opcode 0xda 11/0. */
8189FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8190{
8191 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8193
8194 IEM_MC_BEGIN(0, 1);
8195 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8196
8197 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8198 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8199
8200 IEM_MC_PREPARE_FPU_USAGE();
8201 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8202 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8203 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8204 IEM_MC_ENDIF();
8205 IEM_MC_UPDATE_FPU_OPCODE_IP();
8206 IEM_MC_ELSE()
8207 IEM_MC_FPU_STACK_UNDERFLOW(0);
8208 IEM_MC_ENDIF();
8209 IEM_MC_ADVANCE_RIP();
8210
8211 IEM_MC_END();
8212 return VINF_SUCCESS;
8213}
8214
8215
8216/** Opcode 0xda 11/1. */
8217FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8218{
8219 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8221
8222 IEM_MC_BEGIN(0, 1);
8223 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8224
8225 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8226 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8227
8228 IEM_MC_PREPARE_FPU_USAGE();
8229 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8230 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8231 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8232 IEM_MC_ENDIF();
8233 IEM_MC_UPDATE_FPU_OPCODE_IP();
8234 IEM_MC_ELSE()
8235 IEM_MC_FPU_STACK_UNDERFLOW(0);
8236 IEM_MC_ENDIF();
8237 IEM_MC_ADVANCE_RIP();
8238
8239 IEM_MC_END();
8240 return VINF_SUCCESS;
8241}
8242
8243
8244/** Opcode 0xda 11/2. */
8245FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8246{
8247 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8249
8250 IEM_MC_BEGIN(0, 1);
8251 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8252
8253 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8254 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8255
8256 IEM_MC_PREPARE_FPU_USAGE();
8257 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8258 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8259 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8260 IEM_MC_ENDIF();
8261 IEM_MC_UPDATE_FPU_OPCODE_IP();
8262 IEM_MC_ELSE()
8263 IEM_MC_FPU_STACK_UNDERFLOW(0);
8264 IEM_MC_ENDIF();
8265 IEM_MC_ADVANCE_RIP();
8266
8267 IEM_MC_END();
8268 return VINF_SUCCESS;
8269}
8270
8271
8272/** Opcode 0xda 11/3. */
8273FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8274{
8275 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8277
8278 IEM_MC_BEGIN(0, 1);
8279 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8280
8281 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8282 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8283
8284 IEM_MC_PREPARE_FPU_USAGE();
8285 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8286 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8287 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8288 IEM_MC_ENDIF();
8289 IEM_MC_UPDATE_FPU_OPCODE_IP();
8290 IEM_MC_ELSE()
8291 IEM_MC_FPU_STACK_UNDERFLOW(0);
8292 IEM_MC_ENDIF();
8293 IEM_MC_ADVANCE_RIP();
8294
8295 IEM_MC_END();
8296 return VINF_SUCCESS;
8297}
8298
8299
8300/**
8301 * Common worker for FPU instructions working on ST0 and STn, only affecting
8302 * flags, and popping twice when done.
8303 *
8304 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8305 */
8306FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8307{
8308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8309
8310 IEM_MC_BEGIN(3, 1);
8311 IEM_MC_LOCAL(uint16_t, u16Fsw);
8312 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8313 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8314 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8315
8316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8317 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8318
8319 IEM_MC_PREPARE_FPU_USAGE();
8320 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8321 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8322 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8323 IEM_MC_ELSE()
8324 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8325 IEM_MC_ENDIF();
8326 IEM_MC_ADVANCE_RIP();
8327
8328 IEM_MC_END();
8329 return VINF_SUCCESS;
8330}
8331
8332
8333/** Opcode 0xda 0xe9. */
8334FNIEMOP_DEF(iemOp_fucompp)
8335{
8336 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8337 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8338}
8339
8340
8341/**
8342 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8343 * the result in ST0.
8344 *
8345 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8346 */
8347FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8348{
8349 IEM_MC_BEGIN(3, 3);
8350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8351 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8352 IEM_MC_LOCAL(int32_t, i32Val2);
8353 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8354 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8355 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8356
8357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8359
8360 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8361 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8362 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8363
8364 IEM_MC_PREPARE_FPU_USAGE();
8365 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8366 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8367 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8368 IEM_MC_ELSE()
8369 IEM_MC_FPU_STACK_UNDERFLOW(0);
8370 IEM_MC_ENDIF();
8371 IEM_MC_ADVANCE_RIP();
8372
8373 IEM_MC_END();
8374 return VINF_SUCCESS;
8375}
8376
8377
8378/** Opcode 0xda !11/0. */
8379FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8380{
8381 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8382 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8383}
8384
8385
8386/** Opcode 0xda !11/1. */
8387FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8388{
8389 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8390 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8391}
8392
8393
8394/** Opcode 0xda !11/2. */
8395FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8396{
8397 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8398
8399 IEM_MC_BEGIN(3, 3);
8400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8401 IEM_MC_LOCAL(uint16_t, u16Fsw);
8402 IEM_MC_LOCAL(int32_t, i32Val2);
8403 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8404 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8405 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8406
8407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8409
8410 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8411 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8412 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8413
8414 IEM_MC_PREPARE_FPU_USAGE();
8415 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8416 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8417 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8418 IEM_MC_ELSE()
8419 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8420 IEM_MC_ENDIF();
8421 IEM_MC_ADVANCE_RIP();
8422
8423 IEM_MC_END();
8424 return VINF_SUCCESS;
8425}
8426
8427
8428/** Opcode 0xda !11/3. */
8429FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8430{
8431 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8432
8433 IEM_MC_BEGIN(3, 3);
8434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8435 IEM_MC_LOCAL(uint16_t, u16Fsw);
8436 IEM_MC_LOCAL(int32_t, i32Val2);
8437 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8438 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8439 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8440
8441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8443
8444 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8445 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8446 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8447
8448 IEM_MC_PREPARE_FPU_USAGE();
8449 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8450 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8451 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8452 IEM_MC_ELSE()
8453 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8454 IEM_MC_ENDIF();
8455 IEM_MC_ADVANCE_RIP();
8456
8457 IEM_MC_END();
8458 return VINF_SUCCESS;
8459}
8460
8461
8462/** Opcode 0xda !11/4. */
8463FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8464{
8465 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8466 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8467}
8468
8469
8470/** Opcode 0xda !11/5. */
8471FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8472{
8473 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8474 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8475}
8476
8477
8478/** Opcode 0xda !11/6. */
8479FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8480{
8481 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8482 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8483}
8484
8485
8486/** Opcode 0xda !11/7. */
8487FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8488{
8489 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8490 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8491}
8492
8493
8494/**
8495 * @opcode 0xda
8496 */
8497FNIEMOP_DEF(iemOp_EscF2)
8498{
8499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8500 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8502 {
8503 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8504 {
8505 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8506 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8507 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8508 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8509 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8510 case 5:
8511 if (bRm == 0xe9)
8512 return FNIEMOP_CALL(iemOp_fucompp);
8513 return IEMOP_RAISE_INVALID_OPCODE();
8514 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8515 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8517 }
8518 }
8519 else
8520 {
8521 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8522 {
8523 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8524 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8525 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8526 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8527 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8528 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8529 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8530 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8532 }
8533 }
8534}
8535
8536
8537/** Opcode 0xdb !11/0. */
8538FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8539{
8540 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8541
8542 IEM_MC_BEGIN(2, 3);
8543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8544 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8545 IEM_MC_LOCAL(int32_t, i32Val);
8546 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8547 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8548
8549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8551
8552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8554 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8555
8556 IEM_MC_PREPARE_FPU_USAGE();
8557 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8558 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8559 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8560 IEM_MC_ELSE()
8561 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8562 IEM_MC_ENDIF();
8563 IEM_MC_ADVANCE_RIP();
8564
8565 IEM_MC_END();
8566 return VINF_SUCCESS;
8567}
8568
8569
8570/** Opcode 0xdb !11/1. */
8571FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8572{
8573 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8574 IEM_MC_BEGIN(3, 2);
8575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8576 IEM_MC_LOCAL(uint16_t, u16Fsw);
8577 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8578 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8579 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8580
8581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8584 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8585
8586 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8587 IEM_MC_PREPARE_FPU_USAGE();
8588 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8589 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8590 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8591 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8592 IEM_MC_ELSE()
8593 IEM_MC_IF_FCW_IM()
8594 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8595 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8596 IEM_MC_ENDIF();
8597 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8598 IEM_MC_ENDIF();
8599 IEM_MC_ADVANCE_RIP();
8600
8601 IEM_MC_END();
8602 return VINF_SUCCESS;
8603}
8604
8605
8606/** Opcode 0xdb !11/2. */
8607FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8608{
8609 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8610 IEM_MC_BEGIN(3, 2);
8611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8612 IEM_MC_LOCAL(uint16_t, u16Fsw);
8613 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8614 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8615 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8616
8617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8619 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8620 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8621
8622 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8623 IEM_MC_PREPARE_FPU_USAGE();
8624 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8625 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8626 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8627 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8628 IEM_MC_ELSE()
8629 IEM_MC_IF_FCW_IM()
8630 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8631 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8632 IEM_MC_ENDIF();
8633 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8634 IEM_MC_ENDIF();
8635 IEM_MC_ADVANCE_RIP();
8636
8637 IEM_MC_END();
8638 return VINF_SUCCESS;
8639}
8640
8641
8642/** Opcode 0xdb !11/3. */
8643FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8644{
8645 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8646 IEM_MC_BEGIN(3, 2);
8647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8648 IEM_MC_LOCAL(uint16_t, u16Fsw);
8649 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8650 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8651 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8652
8653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8655 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8656 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8657
8658 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8659 IEM_MC_PREPARE_FPU_USAGE();
8660 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8661 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8662 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8663 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8664 IEM_MC_ELSE()
8665 IEM_MC_IF_FCW_IM()
8666 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8667 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8668 IEM_MC_ENDIF();
8669 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8670 IEM_MC_ENDIF();
8671 IEM_MC_ADVANCE_RIP();
8672
8673 IEM_MC_END();
8674 return VINF_SUCCESS;
8675}
8676
8677
8678/** Opcode 0xdb !11/5. */
8679FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8680{
8681 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8682
8683 IEM_MC_BEGIN(2, 3);
8684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8685 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8686 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8687 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8688 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8689
8690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8692
8693 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8694 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8695 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8696
8697 IEM_MC_PREPARE_FPU_USAGE();
8698 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8699 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8700 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8701 IEM_MC_ELSE()
8702 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8703 IEM_MC_ENDIF();
8704 IEM_MC_ADVANCE_RIP();
8705
8706 IEM_MC_END();
8707 return VINF_SUCCESS;
8708}
8709
8710
8711/** Opcode 0xdb !11/7. */
8712FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8713{
8714 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8715 IEM_MC_BEGIN(3, 2);
8716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8717 IEM_MC_LOCAL(uint16_t, u16Fsw);
8718 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8719 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8720 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8721
8722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8724 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8725 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8726
8727 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8728 IEM_MC_PREPARE_FPU_USAGE();
8729 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8730 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8731 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8732 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8733 IEM_MC_ELSE()
8734 IEM_MC_IF_FCW_IM()
8735 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8736 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8737 IEM_MC_ENDIF();
8738 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8739 IEM_MC_ENDIF();
8740 IEM_MC_ADVANCE_RIP();
8741
8742 IEM_MC_END();
8743 return VINF_SUCCESS;
8744}
8745
8746
8747/** Opcode 0xdb 11/0. */
8748FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8749{
8750 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8752
8753 IEM_MC_BEGIN(0, 1);
8754 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8755
8756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8757 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8758
8759 IEM_MC_PREPARE_FPU_USAGE();
8760 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8761 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8762 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8763 IEM_MC_ENDIF();
8764 IEM_MC_UPDATE_FPU_OPCODE_IP();
8765 IEM_MC_ELSE()
8766 IEM_MC_FPU_STACK_UNDERFLOW(0);
8767 IEM_MC_ENDIF();
8768 IEM_MC_ADVANCE_RIP();
8769
8770 IEM_MC_END();
8771 return VINF_SUCCESS;
8772}
8773
8774
8775/** Opcode 0xdb 11/1. */
8776FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8777{
8778 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8780
8781 IEM_MC_BEGIN(0, 1);
8782 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8783
8784 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8785 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8786
8787 IEM_MC_PREPARE_FPU_USAGE();
8788 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8789 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8790 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8791 IEM_MC_ENDIF();
8792 IEM_MC_UPDATE_FPU_OPCODE_IP();
8793 IEM_MC_ELSE()
8794 IEM_MC_FPU_STACK_UNDERFLOW(0);
8795 IEM_MC_ENDIF();
8796 IEM_MC_ADVANCE_RIP();
8797
8798 IEM_MC_END();
8799 return VINF_SUCCESS;
8800}
8801
8802
8803/** Opcode 0xdb 11/2. */
8804FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8805{
8806 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8808
8809 IEM_MC_BEGIN(0, 1);
8810 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8811
8812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8813 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8814
8815 IEM_MC_PREPARE_FPU_USAGE();
8816 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8817 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8818 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8819 IEM_MC_ENDIF();
8820 IEM_MC_UPDATE_FPU_OPCODE_IP();
8821 IEM_MC_ELSE()
8822 IEM_MC_FPU_STACK_UNDERFLOW(0);
8823 IEM_MC_ENDIF();
8824 IEM_MC_ADVANCE_RIP();
8825
8826 IEM_MC_END();
8827 return VINF_SUCCESS;
8828}
8829
8830
8831/** Opcode 0xdb 11/3. */
8832FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8833{
8834 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8836
8837 IEM_MC_BEGIN(0, 1);
8838 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8839
8840 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8841 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8842
8843 IEM_MC_PREPARE_FPU_USAGE();
8844 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8845 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8846 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8847 IEM_MC_ENDIF();
8848 IEM_MC_UPDATE_FPU_OPCODE_IP();
8849 IEM_MC_ELSE()
8850 IEM_MC_FPU_STACK_UNDERFLOW(0);
8851 IEM_MC_ENDIF();
8852 IEM_MC_ADVANCE_RIP();
8853
8854 IEM_MC_END();
8855 return VINF_SUCCESS;
8856}
8857
8858
8859/** Opcode 0xdb 0xe0. */
8860FNIEMOP_DEF(iemOp_fneni)
8861{
8862 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8864 IEM_MC_BEGIN(0,0);
8865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8866 IEM_MC_ADVANCE_RIP();
8867 IEM_MC_END();
8868 return VINF_SUCCESS;
8869}
8870
8871
8872/** Opcode 0xdb 0xe1. */
8873FNIEMOP_DEF(iemOp_fndisi)
8874{
8875 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8877 IEM_MC_BEGIN(0,0);
8878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8879 IEM_MC_ADVANCE_RIP();
8880 IEM_MC_END();
8881 return VINF_SUCCESS;
8882}
8883
8884
8885/** Opcode 0xdb 0xe2. */
8886FNIEMOP_DEF(iemOp_fnclex)
8887{
8888 IEMOP_MNEMONIC(fnclex, "fnclex");
8889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8890
8891 IEM_MC_BEGIN(0,0);
8892 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8893 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8894 IEM_MC_CLEAR_FSW_EX();
8895 IEM_MC_ADVANCE_RIP();
8896 IEM_MC_END();
8897 return VINF_SUCCESS;
8898}
8899
8900
8901/** Opcode 0xdb 0xe3. */
8902FNIEMOP_DEF(iemOp_fninit)
8903{
8904 IEMOP_MNEMONIC(fninit, "fninit");
8905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8906 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8907}
8908
8909
8910/** Opcode 0xdb 0xe4. */
8911FNIEMOP_DEF(iemOp_fnsetpm)
8912{
8913 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8915 IEM_MC_BEGIN(0,0);
8916 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8917 IEM_MC_ADVANCE_RIP();
8918 IEM_MC_END();
8919 return VINF_SUCCESS;
8920}
8921
8922
8923/** Opcode 0xdb 0xe5. */
8924FNIEMOP_DEF(iemOp_frstpm)
8925{
8926 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8927#if 0 /* #UDs on newer CPUs */
8928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8929 IEM_MC_BEGIN(0,0);
8930 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8931 IEM_MC_ADVANCE_RIP();
8932 IEM_MC_END();
8933 return VINF_SUCCESS;
8934#else
8935 return IEMOP_RAISE_INVALID_OPCODE();
8936#endif
8937}
8938
8939
8940/** Opcode 0xdb 11/5. */
8941FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8942{
8943 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8944 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8945}
8946
8947
8948/** Opcode 0xdb 11/6. */
8949FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8950{
8951 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8952 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8953}
8954
8955
8956/**
8957 * @opcode 0xdb
8958 */
8959FNIEMOP_DEF(iemOp_EscF3)
8960{
8961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8962 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8963 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8964 {
8965 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8966 {
8967 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8968 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8969 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8970 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8971 case 4:
8972 switch (bRm)
8973 {
8974 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8975 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8976 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8977 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8978 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8979 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8980 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8981 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8983 }
8984 break;
8985 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8986 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8987 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8989 }
8990 }
8991 else
8992 {
8993 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8994 {
8995 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8996 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8997 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8998 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8999 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9000 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9001 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9002 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9004 }
9005 }
9006}
9007
9008
9009/**
9010 * Common worker for FPU instructions working on STn and ST0, and storing the
9011 * result in STn unless IE, DE or ZE was raised.
9012 *
9013 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9014 */
9015FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9016{
9017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9018
9019 IEM_MC_BEGIN(3, 1);
9020 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9021 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9022 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9023 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9024
9025 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9026 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9027
9028 IEM_MC_PREPARE_FPU_USAGE();
9029 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
9030 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9031 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9032 IEM_MC_ELSE()
9033 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9034 IEM_MC_ENDIF();
9035 IEM_MC_ADVANCE_RIP();
9036
9037 IEM_MC_END();
9038 return VINF_SUCCESS;
9039}
9040
9041
9042/** Opcode 0xdc 11/0. */
9043FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9044{
9045 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9046 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9047}
9048
9049
9050/** Opcode 0xdc 11/1. */
9051FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9052{
9053 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9054 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9055}
9056
9057
9058/** Opcode 0xdc 11/4. */
9059FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9060{
9061 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9062 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9063}
9064
9065
9066/** Opcode 0xdc 11/5. */
9067FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9068{
9069 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9070 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9071}
9072
9073
9074/** Opcode 0xdc 11/6. */
9075FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9076{
9077 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9078 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9079}
9080
9081
9082/** Opcode 0xdc 11/7. */
9083FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9084{
9085 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9086 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9087}
9088
9089
9090/**
9091 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9092 * memory operand, and storing the result in ST0.
9093 *
9094 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9095 */
9096FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9097{
9098 IEM_MC_BEGIN(3, 3);
9099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9100 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9101 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9102 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9103 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9104 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9105
9106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9108 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9109 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9110
9111 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9112 IEM_MC_PREPARE_FPU_USAGE();
9113 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9114 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9115 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9116 IEM_MC_ELSE()
9117 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9118 IEM_MC_ENDIF();
9119 IEM_MC_ADVANCE_RIP();
9120
9121 IEM_MC_END();
9122 return VINF_SUCCESS;
9123}
9124
9125
9126/** Opcode 0xdc !11/0. */
9127FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9128{
9129 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9130 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9131}
9132
9133
9134/** Opcode 0xdc !11/1. */
9135FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9136{
9137 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9138 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9139}
9140
9141
9142/** Opcode 0xdc !11/2. */
9143FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9144{
9145 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9146
9147 IEM_MC_BEGIN(3, 3);
9148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9149 IEM_MC_LOCAL(uint16_t, u16Fsw);
9150 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9151 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9152 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9153 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9154
9155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9157
9158 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9159 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9160 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9161
9162 IEM_MC_PREPARE_FPU_USAGE();
9163 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9164 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9165 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9166 IEM_MC_ELSE()
9167 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9168 IEM_MC_ENDIF();
9169 IEM_MC_ADVANCE_RIP();
9170
9171 IEM_MC_END();
9172 return VINF_SUCCESS;
9173}
9174
9175
9176/** Opcode 0xdc !11/3. */
9177FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9178{
9179 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9180
9181 IEM_MC_BEGIN(3, 3);
9182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9183 IEM_MC_LOCAL(uint16_t, u16Fsw);
9184 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9185 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9186 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9187 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9188
9189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9191
9192 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9193 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9194 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9195
9196 IEM_MC_PREPARE_FPU_USAGE();
9197 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9198 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9199 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9200 IEM_MC_ELSE()
9201 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9202 IEM_MC_ENDIF();
9203 IEM_MC_ADVANCE_RIP();
9204
9205 IEM_MC_END();
9206 return VINF_SUCCESS;
9207}
9208
9209
9210/** Opcode 0xdc !11/4. */
9211FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9212{
9213 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9214 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9215}
9216
9217
9218/** Opcode 0xdc !11/5. */
9219FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9220{
9221 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9222 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9223}
9224
9225
9226/** Opcode 0xdc !11/6. */
9227FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9228{
9229 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9230 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9231}
9232
9233
9234/** Opcode 0xdc !11/7. */
9235FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9236{
9237 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9238 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9239}
9240
9241
9242/**
9243 * @opcode 0xdc
9244 */
9245FNIEMOP_DEF(iemOp_EscF4)
9246{
9247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9248 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9250 {
9251 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9252 {
9253 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9254 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9255 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9256 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9257 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9258 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9259 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9260 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9262 }
9263 }
9264 else
9265 {
9266 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9267 {
9268 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9269 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9270 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9271 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9272 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9273 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9274 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9275 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9277 }
9278 }
9279}
9280
9281
9282/** Opcode 0xdd !11/0.
9283 * @sa iemOp_fld_m32r */
9284FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9285{
9286 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9287
9288 IEM_MC_BEGIN(2, 3);
9289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9290 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9291 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9292 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9293 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9294
9295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9297 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9298 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9299
9300 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9301 IEM_MC_PREPARE_FPU_USAGE();
9302 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9303 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9304 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9305 IEM_MC_ELSE()
9306 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9307 IEM_MC_ENDIF();
9308 IEM_MC_ADVANCE_RIP();
9309
9310 IEM_MC_END();
9311 return VINF_SUCCESS;
9312}
9313
9314
9315/** Opcode 0xdd !11/0. */
9316FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9317{
9318 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9319 IEM_MC_BEGIN(3, 2);
9320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9321 IEM_MC_LOCAL(uint16_t, u16Fsw);
9322 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9323 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9324 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9325
9326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9328 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9329 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9330
9331 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9332 IEM_MC_PREPARE_FPU_USAGE();
9333 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9334 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9335 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9336 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9337 IEM_MC_ELSE()
9338 IEM_MC_IF_FCW_IM()
9339 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9340 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9341 IEM_MC_ENDIF();
9342 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9343 IEM_MC_ENDIF();
9344 IEM_MC_ADVANCE_RIP();
9345
9346 IEM_MC_END();
9347 return VINF_SUCCESS;
9348}
9349
9350
9351/** Opcode 0xdd !11/0. */
9352FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9353{
9354 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9355 IEM_MC_BEGIN(3, 2);
9356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9357 IEM_MC_LOCAL(uint16_t, u16Fsw);
9358 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9359 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9360 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9361
9362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9366
9367 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9368 IEM_MC_PREPARE_FPU_USAGE();
9369 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9370 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9371 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9372 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9373 IEM_MC_ELSE()
9374 IEM_MC_IF_FCW_IM()
9375 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9376 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9377 IEM_MC_ENDIF();
9378 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9379 IEM_MC_ENDIF();
9380 IEM_MC_ADVANCE_RIP();
9381
9382 IEM_MC_END();
9383 return VINF_SUCCESS;
9384}
9385
9386
9387
9388
9389/** Opcode 0xdd !11/0. */
9390FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9391{
9392 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9393 IEM_MC_BEGIN(3, 2);
9394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9395 IEM_MC_LOCAL(uint16_t, u16Fsw);
9396 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9397 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9398 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9399
9400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9404
9405 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9406 IEM_MC_PREPARE_FPU_USAGE();
9407 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9408 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9409 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9410 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9411 IEM_MC_ELSE()
9412 IEM_MC_IF_FCW_IM()
9413 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9414 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9415 IEM_MC_ENDIF();
9416 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9417 IEM_MC_ENDIF();
9418 IEM_MC_ADVANCE_RIP();
9419
9420 IEM_MC_END();
9421 return VINF_SUCCESS;
9422}
9423
9424
9425/** Opcode 0xdd !11/0. */
9426FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9427{
9428 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9429 IEM_MC_BEGIN(3, 0);
9430 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9431 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9432 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9435 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9436 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9437 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9438 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9439 IEM_MC_END();
9440 return VINF_SUCCESS;
9441}
9442
9443
9444/** Opcode 0xdd !11/0. */
9445FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9446{
9447 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9448 IEM_MC_BEGIN(3, 0);
9449 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9450 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9451 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9455 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9456 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9457 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9458 IEM_MC_END();
9459 return VINF_SUCCESS;
9460
9461}
9462
9463/** Opcode 0xdd !11/0. */
9464FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9465{
9466 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9467
9468 IEM_MC_BEGIN(0, 2);
9469 IEM_MC_LOCAL(uint16_t, u16Tmp);
9470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9471
9472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9474 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9475
9476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9477 IEM_MC_FETCH_FSW(u16Tmp);
9478 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9479 IEM_MC_ADVANCE_RIP();
9480
9481/** @todo Debug / drop a hint to the verifier that things may differ
9482 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9483 * NT4SP1. (X86_FSW_PE) */
9484 IEM_MC_END();
9485 return VINF_SUCCESS;
9486}
9487
9488
9489/** Opcode 0xdd 11/0. */
9490FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9491{
9492 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9494 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9495 unmodified. */
9496
9497 IEM_MC_BEGIN(0, 0);
9498
9499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9501
9502 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9503 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9504 IEM_MC_UPDATE_FPU_OPCODE_IP();
9505
9506 IEM_MC_ADVANCE_RIP();
9507 IEM_MC_END();
9508 return VINF_SUCCESS;
9509}
9510
9511
9512/** Opcode 0xdd 11/1. */
9513FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9514{
9515 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9517
9518 IEM_MC_BEGIN(0, 2);
9519 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9520 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9522 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9523
9524 IEM_MC_PREPARE_FPU_USAGE();
9525 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9526 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9527 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9528 IEM_MC_ELSE()
9529 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9530 IEM_MC_ENDIF();
9531
9532 IEM_MC_ADVANCE_RIP();
9533 IEM_MC_END();
9534 return VINF_SUCCESS;
9535}
9536
9537
9538/** Opcode 0xdd 11/3. */
9539FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9540{
9541 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9542 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9543}
9544
9545
9546/** Opcode 0xdd 11/4. */
9547FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9548{
9549 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9550 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9551}
9552
9553
9554/**
9555 * @opcode 0xdd
9556 */
9557FNIEMOP_DEF(iemOp_EscF5)
9558{
9559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9560 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9562 {
9563 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9564 {
9565 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9566 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9567 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9568 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9569 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9570 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9571 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9572 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9574 }
9575 }
9576 else
9577 {
9578 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9579 {
9580 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9581 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9582 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9583 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9584 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9585 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9586 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9587 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9589 }
9590 }
9591}
9592
9593
9594/** Opcode 0xde 11/0. */
9595FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9596{
9597 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9598 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9599}
9600
9601
9602/** Opcode 0xde 11/0. */
9603FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9604{
9605 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9606 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9607}
9608
9609
9610/** Opcode 0xde 0xd9. */
9611FNIEMOP_DEF(iemOp_fcompp)
9612{
9613 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9614 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9615}
9616
9617
9618/** Opcode 0xde 11/4. */
9619FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9620{
9621 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9622 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9623}
9624
9625
9626/** Opcode 0xde 11/5. */
9627FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9628{
9629 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9630 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9631}
9632
9633
9634/** Opcode 0xde 11/6. */
9635FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9636{
9637 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9638 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9639}
9640
9641
9642/** Opcode 0xde 11/7. */
9643FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9644{
9645 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9646 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9647}
9648
9649
9650/**
9651 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9652 * the result in ST0.
9653 *
9654 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9655 */
9656FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9657{
9658 IEM_MC_BEGIN(3, 3);
9659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9660 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9661 IEM_MC_LOCAL(int16_t, i16Val2);
9662 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9663 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9664 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9665
9666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9668
9669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9671 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9672
9673 IEM_MC_PREPARE_FPU_USAGE();
9674 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9675 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9676 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9677 IEM_MC_ELSE()
9678 IEM_MC_FPU_STACK_UNDERFLOW(0);
9679 IEM_MC_ENDIF();
9680 IEM_MC_ADVANCE_RIP();
9681
9682 IEM_MC_END();
9683 return VINF_SUCCESS;
9684}
9685
9686
9687/** Opcode 0xde !11/0. */
9688FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9689{
9690 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9691 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9692}
9693
9694
9695/** Opcode 0xde !11/1. */
9696FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9697{
9698 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9699 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9700}
9701
9702
9703/** Opcode 0xde !11/2. */
9704FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9705{
9706 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9707
9708 IEM_MC_BEGIN(3, 3);
9709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9710 IEM_MC_LOCAL(uint16_t, u16Fsw);
9711 IEM_MC_LOCAL(int16_t, i16Val2);
9712 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9713 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9714 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9715
9716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9718
9719 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9720 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9721 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9722
9723 IEM_MC_PREPARE_FPU_USAGE();
9724 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9725 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9726 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9727 IEM_MC_ELSE()
9728 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9729 IEM_MC_ENDIF();
9730 IEM_MC_ADVANCE_RIP();
9731
9732 IEM_MC_END();
9733 return VINF_SUCCESS;
9734}
9735
9736
9737/** Opcode 0xde !11/3. */
9738FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9739{
9740 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9741
9742 IEM_MC_BEGIN(3, 3);
9743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9744 IEM_MC_LOCAL(uint16_t, u16Fsw);
9745 IEM_MC_LOCAL(int16_t, i16Val2);
9746 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9747 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9748 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9749
9750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9752
9753 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9754 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9755 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9756
9757 IEM_MC_PREPARE_FPU_USAGE();
9758 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9759 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9760 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9761 IEM_MC_ELSE()
9762 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9763 IEM_MC_ENDIF();
9764 IEM_MC_ADVANCE_RIP();
9765
9766 IEM_MC_END();
9767 return VINF_SUCCESS;
9768}
9769
9770
9771/** Opcode 0xde !11/4. */
9772FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9773{
9774 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9775 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9776}
9777
9778
9779/** Opcode 0xde !11/5. */
9780FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9781{
9782 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9783 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9784}
9785
9786
9787/** Opcode 0xde !11/6. */
9788FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9789{
9790 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9791 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9792}
9793
9794
9795/** Opcode 0xde !11/7. */
9796FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9797{
9798 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9799 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9800}
9801
9802
9803/**
9804 * @opcode 0xde
9805 */
9806FNIEMOP_DEF(iemOp_EscF6)
9807{
9808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9809 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9811 {
9812 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9813 {
9814 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9815 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9816 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9817 case 3: if (bRm == 0xd9)
9818 return FNIEMOP_CALL(iemOp_fcompp);
9819 return IEMOP_RAISE_INVALID_OPCODE();
9820 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9821 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9822 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9823 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9825 }
9826 }
9827 else
9828 {
9829 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9830 {
9831 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9832 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9833 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9834 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9835 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9836 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9837 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9838 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9840 }
9841 }
9842}
9843
9844
9845/** Opcode 0xdf 11/0.
9846 * Undocument instruction, assumed to work like ffree + fincstp. */
9847FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9848{
9849 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9851
9852 IEM_MC_BEGIN(0, 0);
9853
9854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9856
9857 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9858 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9859 IEM_MC_FPU_STACK_INC_TOP();
9860 IEM_MC_UPDATE_FPU_OPCODE_IP();
9861
9862 IEM_MC_ADVANCE_RIP();
9863 IEM_MC_END();
9864 return VINF_SUCCESS;
9865}
9866
9867
9868/** Opcode 0xdf 0xe0. */
9869FNIEMOP_DEF(iemOp_fnstsw_ax)
9870{
9871 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9873
9874 IEM_MC_BEGIN(0, 1);
9875 IEM_MC_LOCAL(uint16_t, u16Tmp);
9876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9877 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9878 IEM_MC_FETCH_FSW(u16Tmp);
9879 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9880 IEM_MC_ADVANCE_RIP();
9881 IEM_MC_END();
9882 return VINF_SUCCESS;
9883}
9884
9885
9886/** Opcode 0xdf 11/5. */
9887FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9888{
9889 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9890 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9891}
9892
9893
9894/** Opcode 0xdf 11/6. */
9895FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9896{
9897 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9898 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9899}
9900
9901
9902/** Opcode 0xdf !11/0. */
9903FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9904{
9905 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9906
9907 IEM_MC_BEGIN(2, 3);
9908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9909 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9910 IEM_MC_LOCAL(int16_t, i16Val);
9911 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9912 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9913
9914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9916
9917 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9918 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9919 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9920
9921 IEM_MC_PREPARE_FPU_USAGE();
9922 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9923 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9924 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9925 IEM_MC_ELSE()
9926 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9927 IEM_MC_ENDIF();
9928 IEM_MC_ADVANCE_RIP();
9929
9930 IEM_MC_END();
9931 return VINF_SUCCESS;
9932}
9933
9934
9935/** Opcode 0xdf !11/1. */
9936FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9937{
9938 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9939 IEM_MC_BEGIN(3, 2);
9940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9941 IEM_MC_LOCAL(uint16_t, u16Fsw);
9942 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9943 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9944 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9945
9946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9948 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9949 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9950
9951 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9952 IEM_MC_PREPARE_FPU_USAGE();
9953 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9954 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9955 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9956 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9957 IEM_MC_ELSE()
9958 IEM_MC_IF_FCW_IM()
9959 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9960 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9961 IEM_MC_ENDIF();
9962 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9963 IEM_MC_ENDIF();
9964 IEM_MC_ADVANCE_RIP();
9965
9966 IEM_MC_END();
9967 return VINF_SUCCESS;
9968}
9969
9970
9971/** Opcode 0xdf !11/2. */
9972FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9973{
9974 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9975 IEM_MC_BEGIN(3, 2);
9976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9977 IEM_MC_LOCAL(uint16_t, u16Fsw);
9978 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9979 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9980 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9981
9982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9986
9987 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9988 IEM_MC_PREPARE_FPU_USAGE();
9989 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9990 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9991 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9992 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9993 IEM_MC_ELSE()
9994 IEM_MC_IF_FCW_IM()
9995 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9996 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9997 IEM_MC_ENDIF();
9998 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9999 IEM_MC_ENDIF();
10000 IEM_MC_ADVANCE_RIP();
10001
10002 IEM_MC_END();
10003 return VINF_SUCCESS;
10004}
10005
10006
10007/** Opcode 0xdf !11/3. */
10008FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10009{
10010 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10011 IEM_MC_BEGIN(3, 2);
10012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10013 IEM_MC_LOCAL(uint16_t, u16Fsw);
10014 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10015 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10016 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10017
10018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10020 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10021 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10022
10023 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10024 IEM_MC_PREPARE_FPU_USAGE();
10025 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10026 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10027 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10028 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10029 IEM_MC_ELSE()
10030 IEM_MC_IF_FCW_IM()
10031 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10032 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10033 IEM_MC_ENDIF();
10034 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10035 IEM_MC_ENDIF();
10036 IEM_MC_ADVANCE_RIP();
10037
10038 IEM_MC_END();
10039 return VINF_SUCCESS;
10040}
10041
10042
10043/** Opcode 0xdf !11/4. */
10044FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
10045
10046
10047/** Opcode 0xdf !11/5. */
10048FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10049{
10050 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10051
10052 IEM_MC_BEGIN(2, 3);
10053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10054 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10055 IEM_MC_LOCAL(int64_t, i64Val);
10056 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10057 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10058
10059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10061
10062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10064 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10065
10066 IEM_MC_PREPARE_FPU_USAGE();
10067 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10068 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
10069 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10070 IEM_MC_ELSE()
10071 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10072 IEM_MC_ENDIF();
10073 IEM_MC_ADVANCE_RIP();
10074
10075 IEM_MC_END();
10076 return VINF_SUCCESS;
10077}
10078
10079
10080/** Opcode 0xdf !11/6. */
10081FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
10082
10083
10084/** Opcode 0xdf !11/7. */
10085FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10086{
10087 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10088 IEM_MC_BEGIN(3, 2);
10089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10090 IEM_MC_LOCAL(uint16_t, u16Fsw);
10091 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10092 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10093 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10094
10095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10097 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10098 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10099
10100 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10101 IEM_MC_PREPARE_FPU_USAGE();
10102 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10103 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10104 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10105 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10106 IEM_MC_ELSE()
10107 IEM_MC_IF_FCW_IM()
10108 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10109 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10110 IEM_MC_ENDIF();
10111 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10112 IEM_MC_ENDIF();
10113 IEM_MC_ADVANCE_RIP();
10114
10115 IEM_MC_END();
10116 return VINF_SUCCESS;
10117}
10118
10119
10120/**
10121 * @opcode 0xdf
10122 */
10123FNIEMOP_DEF(iemOp_EscF7)
10124{
10125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10127 {
10128 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10129 {
10130 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10131 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10132 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10133 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10134 case 4: if (bRm == 0xe0)
10135 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10136 return IEMOP_RAISE_INVALID_OPCODE();
10137 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10138 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10139 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10140 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10141 }
10142 }
10143 else
10144 {
10145 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10146 {
10147 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10148 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10149 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10150 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10151 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10152 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10153 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10154 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10156 }
10157 }
10158}
10159
10160
10161/**
10162 * @opcode 0xe0
10163 */
10164FNIEMOP_DEF(iemOp_loopne_Jb)
10165{
10166 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10167 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10169 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10170
10171 switch (pVCpu->iem.s.enmEffAddrMode)
10172 {
10173 case IEMMODE_16BIT:
10174 IEM_MC_BEGIN(0,0);
10175 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10176 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10177 IEM_MC_REL_JMP_S8(i8Imm);
10178 } IEM_MC_ELSE() {
10179 IEM_MC_ADVANCE_RIP();
10180 } IEM_MC_ENDIF();
10181 IEM_MC_END();
10182 return VINF_SUCCESS;
10183
10184 case IEMMODE_32BIT:
10185 IEM_MC_BEGIN(0,0);
10186 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10187 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10188 IEM_MC_REL_JMP_S8(i8Imm);
10189 } IEM_MC_ELSE() {
10190 IEM_MC_ADVANCE_RIP();
10191 } IEM_MC_ENDIF();
10192 IEM_MC_END();
10193 return VINF_SUCCESS;
10194
10195 case IEMMODE_64BIT:
10196 IEM_MC_BEGIN(0,0);
10197 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10198 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10199 IEM_MC_REL_JMP_S8(i8Imm);
10200 } IEM_MC_ELSE() {
10201 IEM_MC_ADVANCE_RIP();
10202 } IEM_MC_ENDIF();
10203 IEM_MC_END();
10204 return VINF_SUCCESS;
10205
10206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10207 }
10208}
10209
10210
10211/**
10212 * @opcode 0xe1
10213 */
10214FNIEMOP_DEF(iemOp_loope_Jb)
10215{
10216 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10217 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10219 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10220
10221 switch (pVCpu->iem.s.enmEffAddrMode)
10222 {
10223 case IEMMODE_16BIT:
10224 IEM_MC_BEGIN(0,0);
10225 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10226 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10227 IEM_MC_REL_JMP_S8(i8Imm);
10228 } IEM_MC_ELSE() {
10229 IEM_MC_ADVANCE_RIP();
10230 } IEM_MC_ENDIF();
10231 IEM_MC_END();
10232 return VINF_SUCCESS;
10233
10234 case IEMMODE_32BIT:
10235 IEM_MC_BEGIN(0,0);
10236 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10237 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10238 IEM_MC_REL_JMP_S8(i8Imm);
10239 } IEM_MC_ELSE() {
10240 IEM_MC_ADVANCE_RIP();
10241 } IEM_MC_ENDIF();
10242 IEM_MC_END();
10243 return VINF_SUCCESS;
10244
10245 case IEMMODE_64BIT:
10246 IEM_MC_BEGIN(0,0);
10247 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10248 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10249 IEM_MC_REL_JMP_S8(i8Imm);
10250 } IEM_MC_ELSE() {
10251 IEM_MC_ADVANCE_RIP();
10252 } IEM_MC_ENDIF();
10253 IEM_MC_END();
10254 return VINF_SUCCESS;
10255
10256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10257 }
10258}
10259
10260
10261/**
10262 * @opcode 0xe2
10263 */
10264FNIEMOP_DEF(iemOp_loop_Jb)
10265{
10266 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10267 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10269 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10270
10271 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10272 * using the 32-bit operand size override. How can that be restarted? See
10273 * weird pseudo code in intel manual. */
10274 switch (pVCpu->iem.s.enmEffAddrMode)
10275 {
10276 case IEMMODE_16BIT:
10277 IEM_MC_BEGIN(0,0);
10278 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10279 {
10280 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10281 IEM_MC_IF_CX_IS_NZ() {
10282 IEM_MC_REL_JMP_S8(i8Imm);
10283 } IEM_MC_ELSE() {
10284 IEM_MC_ADVANCE_RIP();
10285 } IEM_MC_ENDIF();
10286 }
10287 else
10288 {
10289 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10290 IEM_MC_ADVANCE_RIP();
10291 }
10292 IEM_MC_END();
10293 return VINF_SUCCESS;
10294
10295 case IEMMODE_32BIT:
10296 IEM_MC_BEGIN(0,0);
10297 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10298 {
10299 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10300 IEM_MC_IF_ECX_IS_NZ() {
10301 IEM_MC_REL_JMP_S8(i8Imm);
10302 } IEM_MC_ELSE() {
10303 IEM_MC_ADVANCE_RIP();
10304 } IEM_MC_ENDIF();
10305 }
10306 else
10307 {
10308 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10309 IEM_MC_ADVANCE_RIP();
10310 }
10311 IEM_MC_END();
10312 return VINF_SUCCESS;
10313
10314 case IEMMODE_64BIT:
10315 IEM_MC_BEGIN(0,0);
10316 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10317 {
10318 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10319 IEM_MC_IF_RCX_IS_NZ() {
10320 IEM_MC_REL_JMP_S8(i8Imm);
10321 } IEM_MC_ELSE() {
10322 IEM_MC_ADVANCE_RIP();
10323 } IEM_MC_ENDIF();
10324 }
10325 else
10326 {
10327 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10328 IEM_MC_ADVANCE_RIP();
10329 }
10330 IEM_MC_END();
10331 return VINF_SUCCESS;
10332
10333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10334 }
10335}
10336
10337
10338/**
10339 * @opcode 0xe3
10340 */
10341FNIEMOP_DEF(iemOp_jecxz_Jb)
10342{
10343 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10344 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10346 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10347
10348 switch (pVCpu->iem.s.enmEffAddrMode)
10349 {
10350 case IEMMODE_16BIT:
10351 IEM_MC_BEGIN(0,0);
10352 IEM_MC_IF_CX_IS_NZ() {
10353 IEM_MC_ADVANCE_RIP();
10354 } IEM_MC_ELSE() {
10355 IEM_MC_REL_JMP_S8(i8Imm);
10356 } IEM_MC_ENDIF();
10357 IEM_MC_END();
10358 return VINF_SUCCESS;
10359
10360 case IEMMODE_32BIT:
10361 IEM_MC_BEGIN(0,0);
10362 IEM_MC_IF_ECX_IS_NZ() {
10363 IEM_MC_ADVANCE_RIP();
10364 } IEM_MC_ELSE() {
10365 IEM_MC_REL_JMP_S8(i8Imm);
10366 } IEM_MC_ENDIF();
10367 IEM_MC_END();
10368 return VINF_SUCCESS;
10369
10370 case IEMMODE_64BIT:
10371 IEM_MC_BEGIN(0,0);
10372 IEM_MC_IF_RCX_IS_NZ() {
10373 IEM_MC_ADVANCE_RIP();
10374 } IEM_MC_ELSE() {
10375 IEM_MC_REL_JMP_S8(i8Imm);
10376 } IEM_MC_ENDIF();
10377 IEM_MC_END();
10378 return VINF_SUCCESS;
10379
10380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10381 }
10382}
10383
10384
10385/** Opcode 0xe4 */
10386FNIEMOP_DEF(iemOp_in_AL_Ib)
10387{
10388 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10389 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10391 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10392}
10393
10394
10395/** Opcode 0xe5 */
10396FNIEMOP_DEF(iemOp_in_eAX_Ib)
10397{
10398 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10399 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10401 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10402}
10403
10404
10405/** Opcode 0xe6 */
10406FNIEMOP_DEF(iemOp_out_Ib_AL)
10407{
10408 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10409 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10411 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10412}
10413
10414
10415/** Opcode 0xe7 */
10416FNIEMOP_DEF(iemOp_out_Ib_eAX)
10417{
10418 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10419 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10421 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10422}
10423
10424
10425/**
10426 * @opcode 0xe8
10427 */
10428FNIEMOP_DEF(iemOp_call_Jv)
10429{
10430 IEMOP_MNEMONIC(call_Jv, "call Jv");
10431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10432 switch (pVCpu->iem.s.enmEffOpSize)
10433 {
10434 case IEMMODE_16BIT:
10435 {
10436 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10437 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10438 }
10439
10440 case IEMMODE_32BIT:
10441 {
10442 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10443 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10444 }
10445
10446 case IEMMODE_64BIT:
10447 {
10448 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10449 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10450 }
10451
10452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10453 }
10454}
10455
10456
10457/**
10458 * @opcode 0xe9
10459 */
10460FNIEMOP_DEF(iemOp_jmp_Jv)
10461{
10462 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10463 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10464 switch (pVCpu->iem.s.enmEffOpSize)
10465 {
10466 case IEMMODE_16BIT:
10467 {
10468 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10469 IEM_MC_BEGIN(0, 0);
10470 IEM_MC_REL_JMP_S16(i16Imm);
10471 IEM_MC_END();
10472 return VINF_SUCCESS;
10473 }
10474
10475 case IEMMODE_64BIT:
10476 case IEMMODE_32BIT:
10477 {
10478 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10479 IEM_MC_BEGIN(0, 0);
10480 IEM_MC_REL_JMP_S32(i32Imm);
10481 IEM_MC_END();
10482 return VINF_SUCCESS;
10483 }
10484
10485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10486 }
10487}
10488
10489
10490/**
10491 * @opcode 0xea
10492 */
10493FNIEMOP_DEF(iemOp_jmp_Ap)
10494{
10495 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10496 IEMOP_HLP_NO_64BIT();
10497
10498 /* Decode the far pointer address and pass it on to the far call C implementation. */
10499 uint32_t offSeg;
10500 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10501 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10502 else
10503 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10504 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10507}
10508
10509
10510/**
10511 * @opcode 0xeb
10512 */
10513FNIEMOP_DEF(iemOp_jmp_Jb)
10514{
10515 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10516 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10518 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10519
10520 IEM_MC_BEGIN(0, 0);
10521 IEM_MC_REL_JMP_S8(i8Imm);
10522 IEM_MC_END();
10523 return VINF_SUCCESS;
10524}
10525
10526
10527/** Opcode 0xec */
10528FNIEMOP_DEF(iemOp_in_AL_DX)
10529{
10530 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10532 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10533}
10534
10535
10536/** Opcode 0xed */
10537FNIEMOP_DEF(iemOp_eAX_DX)
10538{
10539 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10541 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10542}
10543
10544
10545/** Opcode 0xee */
10546FNIEMOP_DEF(iemOp_out_DX_AL)
10547{
10548 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10550 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10551}
10552
10553
10554/** Opcode 0xef */
10555FNIEMOP_DEF(iemOp_out_DX_eAX)
10556{
10557 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10559 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10560}
10561
10562
10563/**
10564 * @opcode 0xf0
10565 */
10566FNIEMOP_DEF(iemOp_lock)
10567{
10568 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10569 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10570
10571 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10572 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10573}
10574
10575
10576/**
10577 * @opcode 0xf1
10578 */
10579FNIEMOP_DEF(iemOp_int1)
10580{
10581 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10582 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10583 /** @todo testcase! */
10584 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10585}
10586
10587
10588/**
10589 * @opcode 0xf2
10590 */
10591FNIEMOP_DEF(iemOp_repne)
10592{
10593 /* This overrides any previous REPE prefix. */
10594 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10595 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10596 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10597
10598 /* For the 4 entry opcode tables, REPNZ overrides any previous
10599 REPZ and operand size prefixes. */
10600 pVCpu->iem.s.idxPrefix = 3;
10601
10602 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10603 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10604}
10605
10606
10607/**
10608 * @opcode 0xf3
10609 */
10610FNIEMOP_DEF(iemOp_repe)
10611{
10612 /* This overrides any previous REPNE prefix. */
10613 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10614 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10615 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10616
10617 /* For the 4 entry opcode tables, REPNZ overrides any previous
10618 REPNZ and operand size prefixes. */
10619 pVCpu->iem.s.idxPrefix = 2;
10620
10621 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10622 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10623}
10624
10625
10626/**
10627 * @opcode 0xf4
10628 */
10629FNIEMOP_DEF(iemOp_hlt)
10630{
10631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10632 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10633}
10634
10635
10636/**
10637 * @opcode 0xf5
10638 */
10639FNIEMOP_DEF(iemOp_cmc)
10640{
10641 IEMOP_MNEMONIC(cmc, "cmc");
10642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10643 IEM_MC_BEGIN(0, 0);
10644 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10645 IEM_MC_ADVANCE_RIP();
10646 IEM_MC_END();
10647 return VINF_SUCCESS;
10648}
10649
10650
10651/**
10652 * Common implementation of 'inc/dec/not/neg Eb'.
10653 *
10654 * @param bRm The RM byte.
10655 * @param pImpl The instruction implementation.
10656 */
10657FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10658{
10659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10660 {
10661 /* register access */
10662 IEM_MC_BEGIN(2, 0);
10663 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10664 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10665 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10666 IEM_MC_REF_EFLAGS(pEFlags);
10667 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10668 IEM_MC_ADVANCE_RIP();
10669 IEM_MC_END();
10670 }
10671 else
10672 {
10673 /* memory access. */
10674 IEM_MC_BEGIN(2, 2);
10675 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10676 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10678
10679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10680 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10681 IEM_MC_FETCH_EFLAGS(EFlags);
10682 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10683 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10684 else
10685 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10686
10687 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10688 IEM_MC_COMMIT_EFLAGS(EFlags);
10689 IEM_MC_ADVANCE_RIP();
10690 IEM_MC_END();
10691 }
10692 return VINF_SUCCESS;
10693}
10694
10695
10696/**
10697 * Common implementation of 'inc/dec/not/neg Ev'.
10698 *
10699 * @param bRm The RM byte.
10700 * @param pImpl The instruction implementation.
10701 */
10702FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10703{
10704 /* Registers are handled by a common worker. */
10705 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10706 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10707
10708 /* Memory we do here. */
10709 switch (pVCpu->iem.s.enmEffOpSize)
10710 {
10711 case IEMMODE_16BIT:
10712 IEM_MC_BEGIN(2, 2);
10713 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10714 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10716
10717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10718 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10719 IEM_MC_FETCH_EFLAGS(EFlags);
10720 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10721 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10722 else
10723 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10724
10725 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10726 IEM_MC_COMMIT_EFLAGS(EFlags);
10727 IEM_MC_ADVANCE_RIP();
10728 IEM_MC_END();
10729 return VINF_SUCCESS;
10730
10731 case IEMMODE_32BIT:
10732 IEM_MC_BEGIN(2, 2);
10733 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10734 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10736
10737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10738 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10739 IEM_MC_FETCH_EFLAGS(EFlags);
10740 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10741 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10742 else
10743 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10744
10745 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10746 IEM_MC_COMMIT_EFLAGS(EFlags);
10747 IEM_MC_ADVANCE_RIP();
10748 IEM_MC_END();
10749 return VINF_SUCCESS;
10750
10751 case IEMMODE_64BIT:
10752 IEM_MC_BEGIN(2, 2);
10753 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10754 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10756
10757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10758 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10759 IEM_MC_FETCH_EFLAGS(EFlags);
10760 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10761 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10762 else
10763 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10764
10765 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10766 IEM_MC_COMMIT_EFLAGS(EFlags);
10767 IEM_MC_ADVANCE_RIP();
10768 IEM_MC_END();
10769 return VINF_SUCCESS;
10770
10771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10772 }
10773}
10774
10775
10776/** Opcode 0xf6 /0. */
10777FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10778{
10779 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10780 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10781
10782 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10783 {
10784 /* register access */
10785 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10787
10788 IEM_MC_BEGIN(3, 0);
10789 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10790 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10791 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10792 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10793 IEM_MC_REF_EFLAGS(pEFlags);
10794 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10795 IEM_MC_ADVANCE_RIP();
10796 IEM_MC_END();
10797 }
10798 else
10799 {
10800 /* memory access. */
10801 IEM_MC_BEGIN(3, 2);
10802 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10803 IEM_MC_ARG(uint8_t, u8Src, 1);
10804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10806
10807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10808 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10809 IEM_MC_ASSIGN(u8Src, u8Imm);
10810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10811 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10812 IEM_MC_FETCH_EFLAGS(EFlags);
10813 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10814
10815 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10816 IEM_MC_COMMIT_EFLAGS(EFlags);
10817 IEM_MC_ADVANCE_RIP();
10818 IEM_MC_END();
10819 }
10820 return VINF_SUCCESS;
10821}
10822
10823
10824/** Opcode 0xf7 /0. */
10825FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10826{
10827 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10828 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10829
10830 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10831 {
10832 /* register access */
10833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10834 switch (pVCpu->iem.s.enmEffOpSize)
10835 {
10836 case IEMMODE_16BIT:
10837 {
10838 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10839 IEM_MC_BEGIN(3, 0);
10840 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10841 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10842 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10843 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10844 IEM_MC_REF_EFLAGS(pEFlags);
10845 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10846 IEM_MC_ADVANCE_RIP();
10847 IEM_MC_END();
10848 return VINF_SUCCESS;
10849 }
10850
10851 case IEMMODE_32BIT:
10852 {
10853 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10854 IEM_MC_BEGIN(3, 0);
10855 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10856 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10857 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10858 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10859 IEM_MC_REF_EFLAGS(pEFlags);
10860 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10861 /* No clearing the high dword here - test doesn't write back the result. */
10862 IEM_MC_ADVANCE_RIP();
10863 IEM_MC_END();
10864 return VINF_SUCCESS;
10865 }
10866
10867 case IEMMODE_64BIT:
10868 {
10869 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10870 IEM_MC_BEGIN(3, 0);
10871 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10872 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10873 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10874 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10875 IEM_MC_REF_EFLAGS(pEFlags);
10876 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10877 IEM_MC_ADVANCE_RIP();
10878 IEM_MC_END();
10879 return VINF_SUCCESS;
10880 }
10881
10882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10883 }
10884 }
10885 else
10886 {
10887 /* memory access. */
10888 switch (pVCpu->iem.s.enmEffOpSize)
10889 {
10890 case IEMMODE_16BIT:
10891 {
10892 IEM_MC_BEGIN(3, 2);
10893 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10894 IEM_MC_ARG(uint16_t, u16Src, 1);
10895 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10897
10898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10899 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10900 IEM_MC_ASSIGN(u16Src, u16Imm);
10901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10902 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10903 IEM_MC_FETCH_EFLAGS(EFlags);
10904 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10905
10906 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10907 IEM_MC_COMMIT_EFLAGS(EFlags);
10908 IEM_MC_ADVANCE_RIP();
10909 IEM_MC_END();
10910 return VINF_SUCCESS;
10911 }
10912
10913 case IEMMODE_32BIT:
10914 {
10915 IEM_MC_BEGIN(3, 2);
10916 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10917 IEM_MC_ARG(uint32_t, u32Src, 1);
10918 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10920
10921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10922 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10923 IEM_MC_ASSIGN(u32Src, u32Imm);
10924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10925 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10926 IEM_MC_FETCH_EFLAGS(EFlags);
10927 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10928
10929 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10930 IEM_MC_COMMIT_EFLAGS(EFlags);
10931 IEM_MC_ADVANCE_RIP();
10932 IEM_MC_END();
10933 return VINF_SUCCESS;
10934 }
10935
10936 case IEMMODE_64BIT:
10937 {
10938 IEM_MC_BEGIN(3, 2);
10939 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10940 IEM_MC_ARG(uint64_t, u64Src, 1);
10941 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10943
10944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10945 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10946 IEM_MC_ASSIGN(u64Src, u64Imm);
10947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10948 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10949 IEM_MC_FETCH_EFLAGS(EFlags);
10950 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10951
10952 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10953 IEM_MC_COMMIT_EFLAGS(EFlags);
10954 IEM_MC_ADVANCE_RIP();
10955 IEM_MC_END();
10956 return VINF_SUCCESS;
10957 }
10958
10959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10960 }
10961 }
10962}
10963
10964
10965/** Opcode 0xf6 /4, /5, /6 and /7. */
10966FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10967{
10968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10969 {
10970 /* register access */
10971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10972 IEM_MC_BEGIN(3, 1);
10973 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10974 IEM_MC_ARG(uint8_t, u8Value, 1);
10975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10976 IEM_MC_LOCAL(int32_t, rc);
10977
10978 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10979 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10980 IEM_MC_REF_EFLAGS(pEFlags);
10981 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10982 IEM_MC_IF_LOCAL_IS_Z(rc) {
10983 IEM_MC_ADVANCE_RIP();
10984 } IEM_MC_ELSE() {
10985 IEM_MC_RAISE_DIVIDE_ERROR();
10986 } IEM_MC_ENDIF();
10987
10988 IEM_MC_END();
10989 }
10990 else
10991 {
10992 /* memory access. */
10993 IEM_MC_BEGIN(3, 2);
10994 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10995 IEM_MC_ARG(uint8_t, u8Value, 1);
10996 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10998 IEM_MC_LOCAL(int32_t, rc);
10999
11000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11002 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11003 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11004 IEM_MC_REF_EFLAGS(pEFlags);
11005 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11006 IEM_MC_IF_LOCAL_IS_Z(rc) {
11007 IEM_MC_ADVANCE_RIP();
11008 } IEM_MC_ELSE() {
11009 IEM_MC_RAISE_DIVIDE_ERROR();
11010 } IEM_MC_ENDIF();
11011
11012 IEM_MC_END();
11013 }
11014 return VINF_SUCCESS;
11015}
11016
11017
11018/** Opcode 0xf7 /4, /5, /6 and /7. */
11019FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11020{
11021 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11022
11023 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11024 {
11025 /* register access */
11026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11027 switch (pVCpu->iem.s.enmEffOpSize)
11028 {
11029 case IEMMODE_16BIT:
11030 {
11031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11032 IEM_MC_BEGIN(4, 1);
11033 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11034 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11035 IEM_MC_ARG(uint16_t, u16Value, 2);
11036 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11037 IEM_MC_LOCAL(int32_t, rc);
11038
11039 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11040 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11041 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11042 IEM_MC_REF_EFLAGS(pEFlags);
11043 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11044 IEM_MC_IF_LOCAL_IS_Z(rc) {
11045 IEM_MC_ADVANCE_RIP();
11046 } IEM_MC_ELSE() {
11047 IEM_MC_RAISE_DIVIDE_ERROR();
11048 } IEM_MC_ENDIF();
11049
11050 IEM_MC_END();
11051 return VINF_SUCCESS;
11052 }
11053
11054 case IEMMODE_32BIT:
11055 {
11056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11057 IEM_MC_BEGIN(4, 1);
11058 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11059 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11060 IEM_MC_ARG(uint32_t, u32Value, 2);
11061 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11062 IEM_MC_LOCAL(int32_t, rc);
11063
11064 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11065 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11066 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11067 IEM_MC_REF_EFLAGS(pEFlags);
11068 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11069 IEM_MC_IF_LOCAL_IS_Z(rc) {
11070 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11071 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11072 IEM_MC_ADVANCE_RIP();
11073 } IEM_MC_ELSE() {
11074 IEM_MC_RAISE_DIVIDE_ERROR();
11075 } IEM_MC_ENDIF();
11076
11077 IEM_MC_END();
11078 return VINF_SUCCESS;
11079 }
11080
11081 case IEMMODE_64BIT:
11082 {
11083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11084 IEM_MC_BEGIN(4, 1);
11085 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11086 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11087 IEM_MC_ARG(uint64_t, u64Value, 2);
11088 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11089 IEM_MC_LOCAL(int32_t, rc);
11090
11091 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11092 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11093 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11094 IEM_MC_REF_EFLAGS(pEFlags);
11095 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11096 IEM_MC_IF_LOCAL_IS_Z(rc) {
11097 IEM_MC_ADVANCE_RIP();
11098 } IEM_MC_ELSE() {
11099 IEM_MC_RAISE_DIVIDE_ERROR();
11100 } IEM_MC_ENDIF();
11101
11102 IEM_MC_END();
11103 return VINF_SUCCESS;
11104 }
11105
11106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11107 }
11108 }
11109 else
11110 {
11111 /* memory access. */
11112 switch (pVCpu->iem.s.enmEffOpSize)
11113 {
11114 case IEMMODE_16BIT:
11115 {
11116 IEM_MC_BEGIN(4, 2);
11117 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11118 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11119 IEM_MC_ARG(uint16_t, u16Value, 2);
11120 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11122 IEM_MC_LOCAL(int32_t, rc);
11123
11124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11126 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11127 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11128 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11129 IEM_MC_REF_EFLAGS(pEFlags);
11130 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11131 IEM_MC_IF_LOCAL_IS_Z(rc) {
11132 IEM_MC_ADVANCE_RIP();
11133 } IEM_MC_ELSE() {
11134 IEM_MC_RAISE_DIVIDE_ERROR();
11135 } IEM_MC_ENDIF();
11136
11137 IEM_MC_END();
11138 return VINF_SUCCESS;
11139 }
11140
11141 case IEMMODE_32BIT:
11142 {
11143 IEM_MC_BEGIN(4, 2);
11144 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11145 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11146 IEM_MC_ARG(uint32_t, u32Value, 2);
11147 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11149 IEM_MC_LOCAL(int32_t, rc);
11150
11151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11153 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11154 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11155 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11156 IEM_MC_REF_EFLAGS(pEFlags);
11157 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11158 IEM_MC_IF_LOCAL_IS_Z(rc) {
11159 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11160 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11161 IEM_MC_ADVANCE_RIP();
11162 } IEM_MC_ELSE() {
11163 IEM_MC_RAISE_DIVIDE_ERROR();
11164 } IEM_MC_ENDIF();
11165
11166 IEM_MC_END();
11167 return VINF_SUCCESS;
11168 }
11169
11170 case IEMMODE_64BIT:
11171 {
11172 IEM_MC_BEGIN(4, 2);
11173 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11174 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11175 IEM_MC_ARG(uint64_t, u64Value, 2);
11176 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11178 IEM_MC_LOCAL(int32_t, rc);
11179
11180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11182 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11183 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11184 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11185 IEM_MC_REF_EFLAGS(pEFlags);
11186 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11187 IEM_MC_IF_LOCAL_IS_Z(rc) {
11188 IEM_MC_ADVANCE_RIP();
11189 } IEM_MC_ELSE() {
11190 IEM_MC_RAISE_DIVIDE_ERROR();
11191 } IEM_MC_ENDIF();
11192
11193 IEM_MC_END();
11194 return VINF_SUCCESS;
11195 }
11196
11197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11198 }
11199 }
11200}
11201
11202/**
11203 * @opcode 0xf6
11204 */
11205FNIEMOP_DEF(iemOp_Grp3_Eb)
11206{
11207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11208 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11209 {
11210 case 0:
11211 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11212 case 1:
11213/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11214 return IEMOP_RAISE_INVALID_OPCODE();
11215 case 2:
11216 IEMOP_MNEMONIC(not_Eb, "not Eb");
11217 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11218 case 3:
11219 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11220 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11221 case 4:
11222 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11223 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11224 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
11225 case 5:
11226 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11227 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11228 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
11229 case 6:
11230 IEMOP_MNEMONIC(div_Eb, "div Eb");
11231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11232 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
11233 case 7:
11234 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11236 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
11237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11238 }
11239}
11240
11241
11242/**
11243 * @opcode 0xf7
11244 */
11245FNIEMOP_DEF(iemOp_Grp3_Ev)
11246{
11247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11248 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11249 {
11250 case 0:
11251 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11252 case 1:
11253/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11254 return IEMOP_RAISE_INVALID_OPCODE();
11255 case 2:
11256 IEMOP_MNEMONIC(not_Ev, "not Ev");
11257 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11258 case 3:
11259 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11260 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11261 case 4:
11262 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11264 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11265 case 5:
11266 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11267 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11268 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11269 case 6:
11270 IEMOP_MNEMONIC(div_Ev, "div Ev");
11271 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11272 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11273 case 7:
11274 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11276 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11278 }
11279}
11280
11281
11282/**
11283 * @opcode 0xf8
11284 */
11285FNIEMOP_DEF(iemOp_clc)
11286{
11287 IEMOP_MNEMONIC(clc, "clc");
11288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11289 IEM_MC_BEGIN(0, 0);
11290 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11291 IEM_MC_ADVANCE_RIP();
11292 IEM_MC_END();
11293 return VINF_SUCCESS;
11294}
11295
11296
11297/**
11298 * @opcode 0xf9
11299 */
11300FNIEMOP_DEF(iemOp_stc)
11301{
11302 IEMOP_MNEMONIC(stc, "stc");
11303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11304 IEM_MC_BEGIN(0, 0);
11305 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11306 IEM_MC_ADVANCE_RIP();
11307 IEM_MC_END();
11308 return VINF_SUCCESS;
11309}
11310
11311
11312/**
11313 * @opcode 0xfa
11314 */
11315FNIEMOP_DEF(iemOp_cli)
11316{
11317 IEMOP_MNEMONIC(cli, "cli");
11318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11319 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11320}
11321
11322
11323FNIEMOP_DEF(iemOp_sti)
11324{
11325 IEMOP_MNEMONIC(sti, "sti");
11326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11327 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11328}
11329
11330
11331/**
11332 * @opcode 0xfc
11333 */
11334FNIEMOP_DEF(iemOp_cld)
11335{
11336 IEMOP_MNEMONIC(cld, "cld");
11337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11338 IEM_MC_BEGIN(0, 0);
11339 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11340 IEM_MC_ADVANCE_RIP();
11341 IEM_MC_END();
11342 return VINF_SUCCESS;
11343}
11344
11345
11346/**
11347 * @opcode 0xfd
11348 */
11349FNIEMOP_DEF(iemOp_std)
11350{
11351 IEMOP_MNEMONIC(std, "std");
11352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11353 IEM_MC_BEGIN(0, 0);
11354 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11355 IEM_MC_ADVANCE_RIP();
11356 IEM_MC_END();
11357 return VINF_SUCCESS;
11358}
11359
11360
11361/**
11362 * @opcode 0xfe
11363 */
11364FNIEMOP_DEF(iemOp_Grp4)
11365{
11366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11367 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11368 {
11369 case 0:
11370 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11371 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11372 case 1:
11373 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11374 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11375 default:
11376 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11377 return IEMOP_RAISE_INVALID_OPCODE();
11378 }
11379}
11380
11381
11382/**
11383 * Opcode 0xff /2.
11384 * @param bRm The RM byte.
11385 */
11386FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11387{
11388 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11389 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11390
11391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11392 {
11393 /* The new RIP is taken from a register. */
11394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11395 switch (pVCpu->iem.s.enmEffOpSize)
11396 {
11397 case IEMMODE_16BIT:
11398 IEM_MC_BEGIN(1, 0);
11399 IEM_MC_ARG(uint16_t, u16Target, 0);
11400 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11401 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11402 IEM_MC_END()
11403 return VINF_SUCCESS;
11404
11405 case IEMMODE_32BIT:
11406 IEM_MC_BEGIN(1, 0);
11407 IEM_MC_ARG(uint32_t, u32Target, 0);
11408 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11409 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11410 IEM_MC_END()
11411 return VINF_SUCCESS;
11412
11413 case IEMMODE_64BIT:
11414 IEM_MC_BEGIN(1, 0);
11415 IEM_MC_ARG(uint64_t, u64Target, 0);
11416 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11417 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11418 IEM_MC_END()
11419 return VINF_SUCCESS;
11420
11421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11422 }
11423 }
11424 else
11425 {
11426 /* The new RIP is taken from a register. */
11427 switch (pVCpu->iem.s.enmEffOpSize)
11428 {
11429 case IEMMODE_16BIT:
11430 IEM_MC_BEGIN(1, 1);
11431 IEM_MC_ARG(uint16_t, u16Target, 0);
11432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11435 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11436 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11437 IEM_MC_END()
11438 return VINF_SUCCESS;
11439
11440 case IEMMODE_32BIT:
11441 IEM_MC_BEGIN(1, 1);
11442 IEM_MC_ARG(uint32_t, u32Target, 0);
11443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11446 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11447 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11448 IEM_MC_END()
11449 return VINF_SUCCESS;
11450
11451 case IEMMODE_64BIT:
11452 IEM_MC_BEGIN(1, 1);
11453 IEM_MC_ARG(uint64_t, u64Target, 0);
11454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11457 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11458 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11459 IEM_MC_END()
11460 return VINF_SUCCESS;
11461
11462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11463 }
11464 }
11465}
11466
11467typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11468
11469FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11470{
11471 /* Registers? How?? */
11472 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11473 { /* likely */ }
11474 else
11475 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11476
11477 /* Far pointer loaded from memory. */
11478 switch (pVCpu->iem.s.enmEffOpSize)
11479 {
11480 case IEMMODE_16BIT:
11481 IEM_MC_BEGIN(3, 1);
11482 IEM_MC_ARG(uint16_t, u16Sel, 0);
11483 IEM_MC_ARG(uint16_t, offSeg, 1);
11484 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11488 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11489 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11490 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11491 IEM_MC_END();
11492 return VINF_SUCCESS;
11493
11494 case IEMMODE_64BIT:
11495 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11496 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11497 * and call far qword [rsp] encodings. */
11498 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11499 {
11500 IEM_MC_BEGIN(3, 1);
11501 IEM_MC_ARG(uint16_t, u16Sel, 0);
11502 IEM_MC_ARG(uint64_t, offSeg, 1);
11503 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11507 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11508 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11509 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11510 IEM_MC_END();
11511 return VINF_SUCCESS;
11512 }
11513 /* AMD falls thru. */
11514 /* fall thru */
11515
11516 case IEMMODE_32BIT:
11517 IEM_MC_BEGIN(3, 1);
11518 IEM_MC_ARG(uint16_t, u16Sel, 0);
11519 IEM_MC_ARG(uint32_t, offSeg, 1);
11520 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11524 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11525 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11526 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11527 IEM_MC_END();
11528 return VINF_SUCCESS;
11529
11530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11531 }
11532}
11533
11534
11535/**
11536 * Opcode 0xff /3.
11537 * @param bRm The RM byte.
11538 */
11539FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11540{
11541 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11542 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11543}
11544
11545
11546/**
11547 * Opcode 0xff /4.
11548 * @param bRm The RM byte.
11549 */
11550FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11551{
11552 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11554
11555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11556 {
11557 /* The new RIP is taken from a register. */
11558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11559 switch (pVCpu->iem.s.enmEffOpSize)
11560 {
11561 case IEMMODE_16BIT:
11562 IEM_MC_BEGIN(0, 1);
11563 IEM_MC_LOCAL(uint16_t, u16Target);
11564 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11565 IEM_MC_SET_RIP_U16(u16Target);
11566 IEM_MC_END()
11567 return VINF_SUCCESS;
11568
11569 case IEMMODE_32BIT:
11570 IEM_MC_BEGIN(0, 1);
11571 IEM_MC_LOCAL(uint32_t, u32Target);
11572 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11573 IEM_MC_SET_RIP_U32(u32Target);
11574 IEM_MC_END()
11575 return VINF_SUCCESS;
11576
11577 case IEMMODE_64BIT:
11578 IEM_MC_BEGIN(0, 1);
11579 IEM_MC_LOCAL(uint64_t, u64Target);
11580 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11581 IEM_MC_SET_RIP_U64(u64Target);
11582 IEM_MC_END()
11583 return VINF_SUCCESS;
11584
11585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11586 }
11587 }
11588 else
11589 {
11590 /* The new RIP is taken from a memory location. */
11591 switch (pVCpu->iem.s.enmEffOpSize)
11592 {
11593 case IEMMODE_16BIT:
11594 IEM_MC_BEGIN(0, 2);
11595 IEM_MC_LOCAL(uint16_t, u16Target);
11596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11599 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11600 IEM_MC_SET_RIP_U16(u16Target);
11601 IEM_MC_END()
11602 return VINF_SUCCESS;
11603
11604 case IEMMODE_32BIT:
11605 IEM_MC_BEGIN(0, 2);
11606 IEM_MC_LOCAL(uint32_t, u32Target);
11607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11610 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11611 IEM_MC_SET_RIP_U32(u32Target);
11612 IEM_MC_END()
11613 return VINF_SUCCESS;
11614
11615 case IEMMODE_64BIT:
11616 IEM_MC_BEGIN(0, 2);
11617 IEM_MC_LOCAL(uint64_t, u64Target);
11618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11621 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11622 IEM_MC_SET_RIP_U64(u64Target);
11623 IEM_MC_END()
11624 return VINF_SUCCESS;
11625
11626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11627 }
11628 }
11629}
11630
11631
11632/**
11633 * Opcode 0xff /5.
11634 * @param bRm The RM byte.
11635 */
11636FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11637{
11638 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11639 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11640}
11641
11642
11643/**
11644 * Opcode 0xff /6.
11645 * @param bRm The RM byte.
11646 */
11647FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11648{
11649 IEMOP_MNEMONIC(push_Ev, "push Ev");
11650
11651 /* Registers are handled by a common worker. */
11652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11653 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11654
11655 /* Memory we do here. */
11656 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11657 switch (pVCpu->iem.s.enmEffOpSize)
11658 {
11659 case IEMMODE_16BIT:
11660 IEM_MC_BEGIN(0, 2);
11661 IEM_MC_LOCAL(uint16_t, u16Src);
11662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11665 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11666 IEM_MC_PUSH_U16(u16Src);
11667 IEM_MC_ADVANCE_RIP();
11668 IEM_MC_END();
11669 return VINF_SUCCESS;
11670
11671 case IEMMODE_32BIT:
11672 IEM_MC_BEGIN(0, 2);
11673 IEM_MC_LOCAL(uint32_t, u32Src);
11674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11677 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11678 IEM_MC_PUSH_U32(u32Src);
11679 IEM_MC_ADVANCE_RIP();
11680 IEM_MC_END();
11681 return VINF_SUCCESS;
11682
11683 case IEMMODE_64BIT:
11684 IEM_MC_BEGIN(0, 2);
11685 IEM_MC_LOCAL(uint64_t, u64Src);
11686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11689 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11690 IEM_MC_PUSH_U64(u64Src);
11691 IEM_MC_ADVANCE_RIP();
11692 IEM_MC_END();
11693 return VINF_SUCCESS;
11694
11695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11696 }
11697}
11698
11699
11700/**
11701 * @opcode 0xff
11702 */
11703FNIEMOP_DEF(iemOp_Grp5)
11704{
11705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11706 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11707 {
11708 case 0:
11709 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11710 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11711 case 1:
11712 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11713 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11714 case 2:
11715 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11716 case 3:
11717 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11718 case 4:
11719 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11720 case 5:
11721 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11722 case 6:
11723 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11724 case 7:
11725 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11726 return IEMOP_RAISE_INVALID_OPCODE();
11727 }
11728 AssertFailedReturn(VERR_IEM_IPE_3);
11729}
11730
11731
11732
11733const PFNIEMOP g_apfnOneByteMap[256] =
11734{
11735 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11736 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11737 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11738 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11739 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11740 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11741 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11742 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11743 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11744 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11745 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11746 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11747 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11748 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11749 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11750 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11751 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11752 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11753 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11754 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11755 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11756 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11757 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11758 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11759 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11760 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11761 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11762 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11763 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11764 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11765 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11766 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11767 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11768 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11769 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11770 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11771 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11772 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11773 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11774 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11775 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11776 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11777 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11778 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11779 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11780 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11781 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11782 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11783 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11784 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11785 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11786 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11787 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11788 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11789 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11790 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11791 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11792 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11793 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11794 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11795 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11796 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11797 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11798 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11799};
11800
11801
11802/** @} */
11803
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette