VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 72518

最後變更 在這個檔案從72518是 72518,由 vboxsync 提交於 7 年 前

IEM: Implemented rsm for the purpose of SVM intercepting. Renamed IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP to IEMCIMPL_HLP_SVM_INSTR_INTERCEPT_AND_NRIP.

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 332.1 KB
 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 72518 2018-06-11 15:00:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.alldomusa.eu.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
251{
252 IEMOP_BITCH_ABOUT_STUB();
253 return IEMOP_RAISE_INVALID_OPCODE();
254}
255
256
257/** Opcode 0x0f 0x01 /0. */
258FNIEMOP_DEF(iemOp_Grp7_vmresume)
259{
260 IEMOP_BITCH_ABOUT_STUB();
261 return IEMOP_RAISE_INVALID_OPCODE();
262}
263
264
265/** Opcode 0x0f 0x01 /0. */
266FNIEMOP_DEF(iemOp_Grp7_vmxoff)
267{
268 IEMOP_BITCH_ABOUT_STUB();
269 return IEMOP_RAISE_INVALID_OPCODE();
270}
271
272
273/** Opcode 0x0f 0x01 /1. */
274FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
275{
276 IEMOP_MNEMONIC(sidt, "sidt Ms");
277 IEMOP_HLP_MIN_286();
278 IEMOP_HLP_64BIT_OP_SIZE();
279 IEM_MC_BEGIN(2, 1);
280 IEM_MC_ARG(uint8_t, iEffSeg, 0);
281 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
284 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
285 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
286 IEM_MC_END();
287 return VINF_SUCCESS;
288}
289
290
291/** Opcode 0x0f 0x01 /1. */
292FNIEMOP_DEF(iemOp_Grp7_monitor)
293{
294 IEMOP_MNEMONIC(monitor, "monitor");
295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
296 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
297}
298
299
300/** Opcode 0x0f 0x01 /1. */
301FNIEMOP_DEF(iemOp_Grp7_mwait)
302{
303 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
306}
307
308
309/** Opcode 0x0f 0x01 /2. */
310FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
311{
312 IEMOP_MNEMONIC(lgdt, "lgdt");
313 IEMOP_HLP_64BIT_OP_SIZE();
314 IEM_MC_BEGIN(3, 1);
315 IEM_MC_ARG(uint8_t, iEffSeg, 0);
316 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
317 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
320 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
321 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
322 IEM_MC_END();
323 return VINF_SUCCESS;
324}
325
326
327/** Opcode 0x0f 0x01 0xd0. */
328FNIEMOP_DEF(iemOp_Grp7_xgetbv)
329{
330 IEMOP_MNEMONIC(xgetbv, "xgetbv");
331 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
332 {
333 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
334 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
335 }
336 return IEMOP_RAISE_INVALID_OPCODE();
337}
338
339
340/** Opcode 0x0f 0x01 0xd1. */
341FNIEMOP_DEF(iemOp_Grp7_xsetbv)
342{
343 IEMOP_MNEMONIC(xsetbv, "xsetbv");
344 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
345 {
346 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
347 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
348 }
349 return IEMOP_RAISE_INVALID_OPCODE();
350}
351
352
353/** Opcode 0x0f 0x01 /3. */
354FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
355{
356 IEMOP_MNEMONIC(lidt, "lidt");
357 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
358 ? IEMMODE_64BIT
359 : pVCpu->iem.s.enmEffOpSize;
360 IEM_MC_BEGIN(3, 1);
361 IEM_MC_ARG(uint8_t, iEffSeg, 0);
362 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
363 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
367 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
368 IEM_MC_END();
369 return VINF_SUCCESS;
370}
371
372
373/** Opcode 0x0f 0x01 0xd8. */
374#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
375FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
376{
377 IEMOP_MNEMONIC(vmrun, "vmrun");
378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
379 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
380}
381#else
382FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
383#endif
384
385/** Opcode 0x0f 0x01 0xd9. */
386FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
387{
388 IEMOP_MNEMONIC(vmmcall, "vmmcall");
389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
390
391 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
392 want all hypercalls regardless of instruction used, and if a
393 hypercall isn't handled by GIM or HMSvm will raise an #UD.
394 (NEM/win makes ASSUMPTIONS about this behavior.) */
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
396}
397
398/** Opcode 0x0f 0x01 0xda. */
399#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
400FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
401{
402 IEMOP_MNEMONIC(vmload, "vmload");
403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
405}
406#else
407FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
408#endif
409
410
411/** Opcode 0x0f 0x01 0xdb. */
412#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
413FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
414{
415 IEMOP_MNEMONIC(vmsave, "vmsave");
416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
417 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
418}
419#else
420FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
421#endif
422
423
424/** Opcode 0x0f 0x01 0xdc. */
425#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
426FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
427{
428 IEMOP_MNEMONIC(stgi, "stgi");
429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
430 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
431}
432#else
433FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
434#endif
435
436
437/** Opcode 0x0f 0x01 0xdd. */
438#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
439FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
440{
441 IEMOP_MNEMONIC(clgi, "clgi");
442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
443 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
444}
445#else
446FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
447#endif
448
449
450/** Opcode 0x0f 0x01 0xdf. */
451#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
452FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
453{
454 IEMOP_MNEMONIC(invlpga, "invlpga");
455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
456 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
457}
458#else
459FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
460#endif
461
462
463/** Opcode 0x0f 0x01 0xde. */
464#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
465FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
466{
467 IEMOP_MNEMONIC(skinit, "skinit");
468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
470}
471#else
472FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
473#endif
474
475
476/** Opcode 0x0f 0x01 /4. */
477FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
478{
479 IEMOP_MNEMONIC(smsw, "smsw");
480 IEMOP_HLP_MIN_286();
481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
482 {
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
484 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
485 }
486
487 /* Ignore operand size here, memory refs are always 16-bit. */
488 IEM_MC_BEGIN(2, 0);
489 IEM_MC_ARG(uint16_t, iEffSeg, 0);
490 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
494 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
495 IEM_MC_END();
496 return VINF_SUCCESS;
497}
498
499
500/** Opcode 0x0f 0x01 /6. */
501FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
502{
503 /* The operand size is effectively ignored, all is 16-bit and only the
504 lower 3-bits are used. */
505 IEMOP_MNEMONIC(lmsw, "lmsw");
506 IEMOP_HLP_MIN_286();
507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
508 {
509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
510 IEM_MC_BEGIN(1, 0);
511 IEM_MC_ARG(uint16_t, u16Tmp, 0);
512 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
513 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
514 IEM_MC_END();
515 }
516 else
517 {
518 IEM_MC_BEGIN(1, 1);
519 IEM_MC_ARG(uint16_t, u16Tmp, 0);
520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
523 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
524 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
525 IEM_MC_END();
526 }
527 return VINF_SUCCESS;
528}
529
530
531/** Opcode 0x0f 0x01 /7. */
532FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
533{
534 IEMOP_MNEMONIC(invlpg, "invlpg");
535 IEMOP_HLP_MIN_486();
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_BEGIN(1, 1);
538 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
540 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
541 IEM_MC_END();
542 return VINF_SUCCESS;
543}
544
545
546/** Opcode 0x0f 0x01 /7. */
547FNIEMOP_DEF(iemOp_Grp7_swapgs)
548{
549 IEMOP_MNEMONIC(swapgs, "swapgs");
550 IEMOP_HLP_ONLY_64BIT();
551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
552 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
553}
554
555
556/** Opcode 0x0f 0x01 /7. */
557FNIEMOP_DEF(iemOp_Grp7_rdtscp)
558{
559 IEMOP_MNEMONIC(rdtscp, "rdtscp");
560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
561 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
562}
563
564
565/**
566 * Group 7 jump table, memory variant.
567 */
568IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
569{
570 iemOp_Grp7_sgdt,
571 iemOp_Grp7_sidt,
572 iemOp_Grp7_lgdt,
573 iemOp_Grp7_lidt,
574 iemOp_Grp7_smsw,
575 iemOp_InvalidWithRM,
576 iemOp_Grp7_lmsw,
577 iemOp_Grp7_invlpg
578};
579
580
581/** Opcode 0x0f 0x01. */
582FNIEMOP_DEF(iemOp_Grp7)
583{
584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
585 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
586 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
587
588 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
589 {
590 case 0:
591 switch (bRm & X86_MODRM_RM_MASK)
592 {
593 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
594 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
595 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
596 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
597 }
598 return IEMOP_RAISE_INVALID_OPCODE();
599
600 case 1:
601 switch (bRm & X86_MODRM_RM_MASK)
602 {
603 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
604 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
605 }
606 return IEMOP_RAISE_INVALID_OPCODE();
607
608 case 2:
609 switch (bRm & X86_MODRM_RM_MASK)
610 {
611 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
612 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
613 }
614 return IEMOP_RAISE_INVALID_OPCODE();
615
616 case 3:
617 switch (bRm & X86_MODRM_RM_MASK)
618 {
619 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
620 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
621 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
622 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
623 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
624 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
625 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
626 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
628 }
629
630 case 4:
631 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
632
633 case 5:
634 return IEMOP_RAISE_INVALID_OPCODE();
635
636 case 6:
637 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
638
639 case 7:
640 switch (bRm & X86_MODRM_RM_MASK)
641 {
642 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
643 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
644 }
645 return IEMOP_RAISE_INVALID_OPCODE();
646
647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
648 }
649}
650
651/** Opcode 0x0f 0x00 /3. */
652FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
653{
654 IEMOP_HLP_NO_REAL_OR_V86_MODE();
655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
656
657 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
658 {
659 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
660 switch (pVCpu->iem.s.enmEffOpSize)
661 {
662 case IEMMODE_16BIT:
663 {
664 IEM_MC_BEGIN(3, 0);
665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 1);
667 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
668
669 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
670 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
671 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
672
673 IEM_MC_END();
674 return VINF_SUCCESS;
675 }
676
677 case IEMMODE_32BIT:
678 case IEMMODE_64BIT:
679 {
680 IEM_MC_BEGIN(3, 0);
681 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
682 IEM_MC_ARG(uint16_t, u16Sel, 1);
683 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
684
685 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
686 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
687 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
688
689 IEM_MC_END();
690 return VINF_SUCCESS;
691 }
692
693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
694 }
695 }
696 else
697 {
698 switch (pVCpu->iem.s.enmEffOpSize)
699 {
700 case IEMMODE_16BIT:
701 {
702 IEM_MC_BEGIN(3, 1);
703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
704 IEM_MC_ARG(uint16_t, u16Sel, 1);
705 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
707
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
710
711 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
712 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
713 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
714
715 IEM_MC_END();
716 return VINF_SUCCESS;
717 }
718
719 case IEMMODE_32BIT:
720 case IEMMODE_64BIT:
721 {
722 IEM_MC_BEGIN(3, 1);
723 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 1);
725 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
727
728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
729 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
730/** @todo testcase: make sure it's a 16-bit read. */
731
732 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
733 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
734 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
735
736 IEM_MC_END();
737 return VINF_SUCCESS;
738 }
739
740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
741 }
742 }
743}
744
745
746
747/** Opcode 0x0f 0x02. */
748FNIEMOP_DEF(iemOp_lar_Gv_Ew)
749{
750 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
751 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
752}
753
754
755/** Opcode 0x0f 0x03. */
756FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
757{
758 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
759 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
760}
761
762
763/** Opcode 0x0f 0x05. */
764FNIEMOP_DEF(iemOp_syscall)
765{
766 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
768 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
769}
770
771
772/** Opcode 0x0f 0x06. */
773FNIEMOP_DEF(iemOp_clts)
774{
775 IEMOP_MNEMONIC(clts, "clts");
776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
777 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
778}
779
780
781/** Opcode 0x0f 0x07. */
782FNIEMOP_DEF(iemOp_sysret)
783{
784 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
786 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
787}
788
789
790/** Opcode 0x0f 0x08. */
791FNIEMOP_DEF(iemOp_invd)
792{
793 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
794 IEMOP_HLP_MIN_486();
795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
796 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
797}
798
799
800/** Opcode 0x0f 0x09. */
801FNIEMOP_DEF(iemOp_wbinvd)
802{
803 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
804 IEMOP_HLP_MIN_486();
805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
806 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
807}
808
809
810/** Opcode 0x0f 0x0b. */
811FNIEMOP_DEF(iemOp_ud2)
812{
813 IEMOP_MNEMONIC(ud2, "ud2");
814 return IEMOP_RAISE_INVALID_OPCODE();
815}
816
817/** Opcode 0x0f 0x0d. */
818FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
819{
820 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
821 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
822 {
823 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
824 return IEMOP_RAISE_INVALID_OPCODE();
825 }
826
827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
828 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
829 {
830 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
831 return IEMOP_RAISE_INVALID_OPCODE();
832 }
833
834 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
835 {
836 case 2: /* Aliased to /0 for the time being. */
837 case 4: /* Aliased to /0 for the time being. */
838 case 5: /* Aliased to /0 for the time being. */
839 case 6: /* Aliased to /0 for the time being. */
840 case 7: /* Aliased to /0 for the time being. */
841 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
842 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
843 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
845 }
846
847 IEM_MC_BEGIN(0, 1);
848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
851 /* Currently a NOP. */
852 NOREF(GCPtrEffSrc);
853 IEM_MC_ADVANCE_RIP();
854 IEM_MC_END();
855 return VINF_SUCCESS;
856}
857
858
859/** Opcode 0x0f 0x0e. */
860FNIEMOP_DEF(iemOp_femms)
861{
862 IEMOP_MNEMONIC(femms, "femms");
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
864
865 IEM_MC_BEGIN(0,0);
866 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
867 IEM_MC_MAYBE_RAISE_FPU_XCPT();
868 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
869 IEM_MC_FPU_FROM_MMX_MODE();
870 IEM_MC_ADVANCE_RIP();
871 IEM_MC_END();
872 return VINF_SUCCESS;
873}
874
875
876/** Opcode 0x0f 0x0f. */
877FNIEMOP_DEF(iemOp_3Dnow)
878{
879 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
880 {
881 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
882 return IEMOP_RAISE_INVALID_OPCODE();
883 }
884
885#ifdef IEM_WITH_3DNOW
886 /* This is pretty sparse, use switch instead of table. */
887 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
888 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
889#else
890 IEMOP_BITCH_ABOUT_STUB();
891 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
892#endif
893}
894
895
896/**
897 * @opcode 0x10
898 * @oppfx none
899 * @opcpuid sse
900 * @opgroup og_sse_simdfp_datamove
901 * @opxcpttype 4UA
902 * @optest op1=1 op2=2 -> op1=2
903 * @optest op1=0 op2=-22 -> op1=-22
904 */
905FNIEMOP_DEF(iemOp_movups_Vps_Wps)
906{
907 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
910 {
911 /*
912 * Register, register.
913 */
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
915 IEM_MC_BEGIN(0, 0);
916 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
917 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
918 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
919 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
920 IEM_MC_ADVANCE_RIP();
921 IEM_MC_END();
922 }
923 else
924 {
925 /*
926 * Memory, register.
927 */
928 IEM_MC_BEGIN(0, 2);
929 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
931
932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
934 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
935 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
936
937 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
938 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
939
940 IEM_MC_ADVANCE_RIP();
941 IEM_MC_END();
942 }
943 return VINF_SUCCESS;
944
945}
946
947
948/**
949 * @opcode 0x10
950 * @oppfx 0x66
951 * @opcpuid sse2
952 * @opgroup og_sse2_pcksclr_datamove
953 * @opxcpttype 4UA
954 * @optest op1=1 op2=2 -> op1=2
955 * @optest op1=0 op2=-42 -> op1=-42
956 */
957FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
958{
959 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
962 {
963 /*
964 * Register, register.
965 */
966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
967 IEM_MC_BEGIN(0, 0);
968 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
969 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
970 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
971 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
972 IEM_MC_ADVANCE_RIP();
973 IEM_MC_END();
974 }
975 else
976 {
977 /*
978 * Memory, register.
979 */
980 IEM_MC_BEGIN(0, 2);
981 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
983
984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
986 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
987 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
988
989 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
990 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
991
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 }
995 return VINF_SUCCESS;
996}
997
998
999/**
1000 * @opcode 0x10
1001 * @oppfx 0xf3
1002 * @opcpuid sse
1003 * @opgroup og_sse_simdfp_datamove
1004 * @opxcpttype 5
1005 * @optest op1=1 op2=2 -> op1=2
1006 * @optest op1=0 op2=-22 -> op1=-22
1007 */
1008FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1009{
1010 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1012 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1013 {
1014 /*
1015 * Register, register.
1016 */
1017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1018 IEM_MC_BEGIN(0, 1);
1019 IEM_MC_LOCAL(uint32_t, uSrc);
1020
1021 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1023 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1024 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1025
1026 IEM_MC_ADVANCE_RIP();
1027 IEM_MC_END();
1028 }
1029 else
1030 {
1031 /*
1032 * Memory, register.
1033 */
1034 IEM_MC_BEGIN(0, 2);
1035 IEM_MC_LOCAL(uint32_t, uSrc);
1036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1037
1038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1040 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1041 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1042
1043 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1044 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1045
1046 IEM_MC_ADVANCE_RIP();
1047 IEM_MC_END();
1048 }
1049 return VINF_SUCCESS;
1050}
1051
1052
1053/**
1054 * @opcode 0x10
1055 * @oppfx 0xf2
1056 * @opcpuid sse2
1057 * @opgroup og_sse2_pcksclr_datamove
1058 * @opxcpttype 5
1059 * @optest op1=1 op2=2 -> op1=2
1060 * @optest op1=0 op2=-42 -> op1=-42
1061 */
1062FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1063{
1064 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1067 {
1068 /*
1069 * Register, register.
1070 */
1071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1072 IEM_MC_BEGIN(0, 1);
1073 IEM_MC_LOCAL(uint64_t, uSrc);
1074
1075 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1076 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1077 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1078 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1079
1080 IEM_MC_ADVANCE_RIP();
1081 IEM_MC_END();
1082 }
1083 else
1084 {
1085 /*
1086 * Memory, register.
1087 */
1088 IEM_MC_BEGIN(0, 2);
1089 IEM_MC_LOCAL(uint64_t, uSrc);
1090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1091
1092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1094 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1095 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1096
1097 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1098 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1099
1100 IEM_MC_ADVANCE_RIP();
1101 IEM_MC_END();
1102 }
1103 return VINF_SUCCESS;
1104}
1105
1106
1107/**
1108 * @opcode 0x11
1109 * @oppfx none
1110 * @opcpuid sse
1111 * @opgroup og_sse_simdfp_datamove
1112 * @opxcpttype 4UA
1113 * @optest op1=1 op2=2 -> op1=2
1114 * @optest op1=0 op2=-42 -> op1=-42
1115 */
1116FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1117{
1118 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1121 {
1122 /*
1123 * Register, register.
1124 */
1125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1126 IEM_MC_BEGIN(0, 0);
1127 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1128 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1129 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1130 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1131 IEM_MC_ADVANCE_RIP();
1132 IEM_MC_END();
1133 }
1134 else
1135 {
1136 /*
1137 * Memory, register.
1138 */
1139 IEM_MC_BEGIN(0, 2);
1140 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1142
1143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1145 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1146 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1147
1148 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1149 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1150
1151 IEM_MC_ADVANCE_RIP();
1152 IEM_MC_END();
1153 }
1154 return VINF_SUCCESS;
1155}
1156
1157
1158/**
1159 * @opcode 0x11
1160 * @oppfx 0x66
1161 * @opcpuid sse2
1162 * @opgroup og_sse2_pcksclr_datamove
1163 * @opxcpttype 4UA
1164 * @optest op1=1 op2=2 -> op1=2
1165 * @optest op1=0 op2=-42 -> op1=-42
1166 */
1167FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1168{
1169 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1172 {
1173 /*
1174 * Register, register.
1175 */
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1177 IEM_MC_BEGIN(0, 0);
1178 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1179 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1180 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1181 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1182 IEM_MC_ADVANCE_RIP();
1183 IEM_MC_END();
1184 }
1185 else
1186 {
1187 /*
1188 * Memory, register.
1189 */
1190 IEM_MC_BEGIN(0, 2);
1191 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1193
1194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1196 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1197 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1198
1199 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1200 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1201
1202 IEM_MC_ADVANCE_RIP();
1203 IEM_MC_END();
1204 }
1205 return VINF_SUCCESS;
1206}
1207
1208
1209/**
1210 * @opcode 0x11
1211 * @oppfx 0xf3
1212 * @opcpuid sse
1213 * @opgroup og_sse_simdfp_datamove
1214 * @opxcpttype 5
1215 * @optest op1=1 op2=2 -> op1=2
1216 * @optest op1=0 op2=-22 -> op1=-22
1217 */
1218FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1219{
1220 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1222 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1223 {
1224 /*
1225 * Register, register.
1226 */
1227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1228 IEM_MC_BEGIN(0, 1);
1229 IEM_MC_LOCAL(uint32_t, uSrc);
1230
1231 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1232 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1233 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1234 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1235
1236 IEM_MC_ADVANCE_RIP();
1237 IEM_MC_END();
1238 }
1239 else
1240 {
1241 /*
1242 * Memory, register.
1243 */
1244 IEM_MC_BEGIN(0, 2);
1245 IEM_MC_LOCAL(uint32_t, uSrc);
1246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1247
1248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1250 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1251 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1252
1253 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1254 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1255
1256 IEM_MC_ADVANCE_RIP();
1257 IEM_MC_END();
1258 }
1259 return VINF_SUCCESS;
1260}
1261
1262
1263/**
1264 * @opcode 0x11
1265 * @oppfx 0xf2
1266 * @opcpuid sse2
1267 * @opgroup og_sse2_pcksclr_datamove
1268 * @opxcpttype 5
1269 * @optest op1=1 op2=2 -> op1=2
1270 * @optest op1=0 op2=-42 -> op1=-42
1271 */
1272FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1273{
1274 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1277 {
1278 /*
1279 * Register, register.
1280 */
1281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1282 IEM_MC_BEGIN(0, 1);
1283 IEM_MC_LOCAL(uint64_t, uSrc);
1284
1285 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1286 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1287 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1288 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1289
1290 IEM_MC_ADVANCE_RIP();
1291 IEM_MC_END();
1292 }
1293 else
1294 {
1295 /*
1296 * Memory, register.
1297 */
1298 IEM_MC_BEGIN(0, 2);
1299 IEM_MC_LOCAL(uint64_t, uSrc);
1300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1301
1302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1304 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1305 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1306
1307 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1308 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1309
1310 IEM_MC_ADVANCE_RIP();
1311 IEM_MC_END();
1312 }
1313 return VINF_SUCCESS;
1314}
1315
1316
1317FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1318{
1319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1321 {
1322 /**
1323 * @opcode 0x12
1324 * @opcodesub 11 mr/reg
1325 * @oppfx none
1326 * @opcpuid sse
1327 * @opgroup og_sse_simdfp_datamove
1328 * @opxcpttype 5
1329 * @optest op1=1 op2=2 -> op1=2
1330 * @optest op1=0 op2=-42 -> op1=-42
1331 */
1332 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1333
1334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1335 IEM_MC_BEGIN(0, 1);
1336 IEM_MC_LOCAL(uint64_t, uSrc);
1337
1338 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1339 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1340 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1341 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1342
1343 IEM_MC_ADVANCE_RIP();
1344 IEM_MC_END();
1345 }
1346 else
1347 {
1348 /**
1349 * @opdone
1350 * @opcode 0x12
1351 * @opcodesub !11 mr/reg
1352 * @oppfx none
1353 * @opcpuid sse
1354 * @opgroup og_sse_simdfp_datamove
1355 * @opxcpttype 5
1356 * @optest op1=1 op2=2 -> op1=2
1357 * @optest op1=0 op2=-42 -> op1=-42
1358 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1359 */
1360 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1361
1362 IEM_MC_BEGIN(0, 2);
1363 IEM_MC_LOCAL(uint64_t, uSrc);
1364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1365
1366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1368 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1369 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1370
1371 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1372 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1373
1374 IEM_MC_ADVANCE_RIP();
1375 IEM_MC_END();
1376 }
1377 return VINF_SUCCESS;
1378}
1379
1380
1381/**
1382 * @opcode 0x12
1383 * @opcodesub !11 mr/reg
1384 * @oppfx 0x66
1385 * @opcpuid sse2
1386 * @opgroup og_sse2_pcksclr_datamove
1387 * @opxcpttype 5
1388 * @optest op1=1 op2=2 -> op1=2
1389 * @optest op1=0 op2=-42 -> op1=-42
1390 */
1391FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1392{
1393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1394 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1395 {
1396 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1397
1398 IEM_MC_BEGIN(0, 2);
1399 IEM_MC_LOCAL(uint64_t, uSrc);
1400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1401
1402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1404 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1405 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1406
1407 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1408 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1409
1410 IEM_MC_ADVANCE_RIP();
1411 IEM_MC_END();
1412 return VINF_SUCCESS;
1413 }
1414
1415 /**
1416 * @opdone
1417 * @opmnemonic ud660f12m3
1418 * @opcode 0x12
1419 * @opcodesub 11 mr/reg
1420 * @oppfx 0x66
1421 * @opunused immediate
1422 * @opcpuid sse
1423 * @optest ->
1424 */
1425 return IEMOP_RAISE_INVALID_OPCODE();
1426}
1427
1428
1429/**
1430 * @opcode 0x12
1431 * @oppfx 0xf3
1432 * @opcpuid sse3
1433 * @opgroup og_sse3_pcksclr_datamove
1434 * @opxcpttype 4
1435 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1436 * op1=0x00000002000000020000000100000001
1437 */
1438FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1439{
1440 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1443 {
1444 /*
1445 * Register, register.
1446 */
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(2, 0);
1449 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1450 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1451
1452 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1453 IEM_MC_PREPARE_SSE_USAGE();
1454
1455 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1456 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1457 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1458
1459 IEM_MC_ADVANCE_RIP();
1460 IEM_MC_END();
1461 }
1462 else
1463 {
1464 /*
1465 * Register, memory.
1466 */
1467 IEM_MC_BEGIN(2, 2);
1468 IEM_MC_LOCAL(RTUINT128U, uSrc);
1469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1470 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1471 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1472
1473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1475 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1476 IEM_MC_PREPARE_SSE_USAGE();
1477
1478 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1479 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1480 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1481
1482 IEM_MC_ADVANCE_RIP();
1483 IEM_MC_END();
1484 }
1485 return VINF_SUCCESS;
1486}
1487
1488
1489/**
1490 * @opcode 0x12
1491 * @oppfx 0xf2
1492 * @opcpuid sse3
1493 * @opgroup og_sse3_pcksclr_datamove
1494 * @opxcpttype 5
1495 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1496 * op1=0x22222222111111112222222211111111
1497 */
1498FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1499{
1500 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1503 {
1504 /*
1505 * Register, register.
1506 */
1507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1508 IEM_MC_BEGIN(2, 0);
1509 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1510 IEM_MC_ARG(uint64_t, uSrc, 1);
1511
1512 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1513 IEM_MC_PREPARE_SSE_USAGE();
1514
1515 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1516 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1517 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1518
1519 IEM_MC_ADVANCE_RIP();
1520 IEM_MC_END();
1521 }
1522 else
1523 {
1524 /*
1525 * Register, memory.
1526 */
1527 IEM_MC_BEGIN(2, 2);
1528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1529 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1530 IEM_MC_ARG(uint64_t, uSrc, 1);
1531
1532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1534 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1535 IEM_MC_PREPARE_SSE_USAGE();
1536
1537 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1539 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1540
1541 IEM_MC_ADVANCE_RIP();
1542 IEM_MC_END();
1543 }
1544 return VINF_SUCCESS;
1545}
1546
1547
1548/**
1549 * @opcode 0x13
1550 * @opcodesub !11 mr/reg
1551 * @oppfx none
1552 * @opcpuid sse
1553 * @opgroup og_sse_simdfp_datamove
1554 * @opxcpttype 5
1555 * @optest op1=1 op2=2 -> op1=2
1556 * @optest op1=0 op2=-42 -> op1=-42
1557 */
1558FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1559{
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1562 {
1563 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1564
1565 IEM_MC_BEGIN(0, 2);
1566 IEM_MC_LOCAL(uint64_t, uSrc);
1567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1572 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1573
1574 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1575 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1576
1577 IEM_MC_ADVANCE_RIP();
1578 IEM_MC_END();
1579 return VINF_SUCCESS;
1580 }
1581
1582 /**
1583 * @opdone
1584 * @opmnemonic ud0f13m3
1585 * @opcode 0x13
1586 * @opcodesub 11 mr/reg
1587 * @oppfx none
1588 * @opunused immediate
1589 * @opcpuid sse
1590 * @optest ->
1591 */
1592 return IEMOP_RAISE_INVALID_OPCODE();
1593}
1594
1595
1596/**
1597 * @opcode 0x13
1598 * @opcodesub !11 mr/reg
1599 * @oppfx 0x66
1600 * @opcpuid sse2
1601 * @opgroup og_sse2_pcksclr_datamove
1602 * @opxcpttype 5
1603 * @optest op1=1 op2=2 -> op1=2
1604 * @optest op1=0 op2=-42 -> op1=-42
1605 */
1606FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1607{
1608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1609 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1610 {
1611 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1612 IEM_MC_BEGIN(0, 2);
1613 IEM_MC_LOCAL(uint64_t, uSrc);
1614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1615
1616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1618 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1620
1621 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1622 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1623
1624 IEM_MC_ADVANCE_RIP();
1625 IEM_MC_END();
1626 return VINF_SUCCESS;
1627 }
1628
1629 /**
1630 * @opdone
1631 * @opmnemonic ud660f13m3
1632 * @opcode 0x13
1633 * @opcodesub 11 mr/reg
1634 * @oppfx 0x66
1635 * @opunused immediate
1636 * @opcpuid sse
1637 * @optest ->
1638 */
1639 return IEMOP_RAISE_INVALID_OPCODE();
1640}
1641
1642
1643/**
1644 * @opmnemonic udf30f13
1645 * @opcode 0x13
1646 * @oppfx 0xf3
1647 * @opunused intel-modrm
1648 * @opcpuid sse
1649 * @optest ->
1650 * @opdone
1651 */
1652
1653/**
1654 * @opmnemonic udf20f13
1655 * @opcode 0x13
1656 * @oppfx 0xf2
1657 * @opunused intel-modrm
1658 * @opcpuid sse
1659 * @optest ->
1660 * @opdone
1661 */
1662
1663/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1664FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1665/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1666FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1667
1668/**
1669 * @opdone
1670 * @opmnemonic udf30f14
1671 * @opcode 0x14
1672 * @oppfx 0xf3
1673 * @opunused intel-modrm
1674 * @opcpuid sse
1675 * @optest ->
1676 * @opdone
1677 */
1678
1679/**
1680 * @opmnemonic udf20f14
1681 * @opcode 0x14
1682 * @oppfx 0xf2
1683 * @opunused intel-modrm
1684 * @opcpuid sse
1685 * @optest ->
1686 * @opdone
1687 */
1688
1689/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1690FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1691/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1692FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1693/* Opcode 0xf3 0x0f 0x15 - invalid */
1694/* Opcode 0xf2 0x0f 0x15 - invalid */
1695
1696/**
1697 * @opdone
1698 * @opmnemonic udf30f15
1699 * @opcode 0x15
1700 * @oppfx 0xf3
1701 * @opunused intel-modrm
1702 * @opcpuid sse
1703 * @optest ->
1704 * @opdone
1705 */
1706
1707/**
1708 * @opmnemonic udf20f15
1709 * @opcode 0x15
1710 * @oppfx 0xf2
1711 * @opunused intel-modrm
1712 * @opcpuid sse
1713 * @optest ->
1714 * @opdone
1715 */
1716
1717FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1718{
1719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1721 {
1722 /**
1723 * @opcode 0x16
1724 * @opcodesub 11 mr/reg
1725 * @oppfx none
1726 * @opcpuid sse
1727 * @opgroup og_sse_simdfp_datamove
1728 * @opxcpttype 5
1729 * @optest op1=1 op2=2 -> op1=2
1730 * @optest op1=0 op2=-42 -> op1=-42
1731 */
1732 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1733
1734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1735 IEM_MC_BEGIN(0, 1);
1736 IEM_MC_LOCAL(uint64_t, uSrc);
1737
1738 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1739 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1740 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1741 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1742
1743 IEM_MC_ADVANCE_RIP();
1744 IEM_MC_END();
1745 }
1746 else
1747 {
1748 /**
1749 * @opdone
1750 * @opcode 0x16
1751 * @opcodesub !11 mr/reg
1752 * @oppfx none
1753 * @opcpuid sse
1754 * @opgroup og_sse_simdfp_datamove
1755 * @opxcpttype 5
1756 * @optest op1=1 op2=2 -> op1=2
1757 * @optest op1=0 op2=-42 -> op1=-42
1758 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1759 */
1760 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1761
1762 IEM_MC_BEGIN(0, 2);
1763 IEM_MC_LOCAL(uint64_t, uSrc);
1764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1765
1766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1768 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1769 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1770
1771 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1772 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1773
1774 IEM_MC_ADVANCE_RIP();
1775 IEM_MC_END();
1776 }
1777 return VINF_SUCCESS;
1778}
1779
1780
1781/**
1782 * @opcode 0x16
1783 * @opcodesub !11 mr/reg
1784 * @oppfx 0x66
1785 * @opcpuid sse2
1786 * @opgroup og_sse2_pcksclr_datamove
1787 * @opxcpttype 5
1788 * @optest op1=1 op2=2 -> op1=2
1789 * @optest op1=0 op2=-42 -> op1=-42
1790 */
1791FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1792{
1793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1794 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1795 {
1796 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1797 IEM_MC_BEGIN(0, 2);
1798 IEM_MC_LOCAL(uint64_t, uSrc);
1799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1800
1801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1803 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1804 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1805
1806 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1807 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1808
1809 IEM_MC_ADVANCE_RIP();
1810 IEM_MC_END();
1811 return VINF_SUCCESS;
1812 }
1813
1814 /**
1815 * @opdone
1816 * @opmnemonic ud660f16m3
1817 * @opcode 0x16
1818 * @opcodesub 11 mr/reg
1819 * @oppfx 0x66
1820 * @opunused immediate
1821 * @opcpuid sse
1822 * @optest ->
1823 */
1824 return IEMOP_RAISE_INVALID_OPCODE();
1825}
1826
1827
1828/**
1829 * @opcode 0x16
1830 * @oppfx 0xf3
1831 * @opcpuid sse3
1832 * @opgroup og_sse3_pcksclr_datamove
1833 * @opxcpttype 4
1834 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1835 * op1=0x00000002000000020000000100000001
1836 */
1837FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1838{
1839 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1842 {
1843 /*
1844 * Register, register.
1845 */
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 IEM_MC_BEGIN(2, 0);
1848 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1849 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1850
1851 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1852 IEM_MC_PREPARE_SSE_USAGE();
1853
1854 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1855 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1856 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1857
1858 IEM_MC_ADVANCE_RIP();
1859 IEM_MC_END();
1860 }
1861 else
1862 {
1863 /*
1864 * Register, memory.
1865 */
1866 IEM_MC_BEGIN(2, 2);
1867 IEM_MC_LOCAL(RTUINT128U, uSrc);
1868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1869 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1870 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1871
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1875 IEM_MC_PREPARE_SSE_USAGE();
1876
1877 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1878 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1879 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1880
1881 IEM_MC_ADVANCE_RIP();
1882 IEM_MC_END();
1883 }
1884 return VINF_SUCCESS;
1885}
1886
1887/**
1888 * @opdone
1889 * @opmnemonic udf30f16
1890 * @opcode 0x16
1891 * @oppfx 0xf2
1892 * @opunused intel-modrm
1893 * @opcpuid sse
1894 * @optest ->
1895 * @opdone
1896 */
1897
1898
1899/**
1900 * @opcode 0x17
1901 * @opcodesub !11 mr/reg
1902 * @oppfx none
1903 * @opcpuid sse
1904 * @opgroup og_sse_simdfp_datamove
1905 * @opxcpttype 5
1906 * @optest op1=1 op2=2 -> op1=2
1907 * @optest op1=0 op2=-42 -> op1=-42
1908 */
1909FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1910{
1911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1912 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1913 {
1914 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1915
1916 IEM_MC_BEGIN(0, 2);
1917 IEM_MC_LOCAL(uint64_t, uSrc);
1918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1919
1920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1922 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1923 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1924
1925 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1926 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1927
1928 IEM_MC_ADVANCE_RIP();
1929 IEM_MC_END();
1930 return VINF_SUCCESS;
1931 }
1932
1933 /**
1934 * @opdone
1935 * @opmnemonic ud0f17m3
1936 * @opcode 0x17
1937 * @opcodesub 11 mr/reg
1938 * @oppfx none
1939 * @opunused immediate
1940 * @opcpuid sse
1941 * @optest ->
1942 */
1943 return IEMOP_RAISE_INVALID_OPCODE();
1944}
1945
1946
1947/**
1948 * @opcode 0x17
1949 * @opcodesub !11 mr/reg
1950 * @oppfx 0x66
1951 * @opcpuid sse2
1952 * @opgroup og_sse2_pcksclr_datamove
1953 * @opxcpttype 5
1954 * @optest op1=1 op2=2 -> op1=2
1955 * @optest op1=0 op2=-42 -> op1=-42
1956 */
1957FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1958{
1959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1960 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1961 {
1962 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1963
1964 IEM_MC_BEGIN(0, 2);
1965 IEM_MC_LOCAL(uint64_t, uSrc);
1966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1971 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1972
1973 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1974 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1975
1976 IEM_MC_ADVANCE_RIP();
1977 IEM_MC_END();
1978 return VINF_SUCCESS;
1979 }
1980
1981 /**
1982 * @opdone
1983 * @opmnemonic ud660f17m3
1984 * @opcode 0x17
1985 * @opcodesub 11 mr/reg
1986 * @oppfx 0x66
1987 * @opunused immediate
1988 * @opcpuid sse
1989 * @optest ->
1990 */
1991 return IEMOP_RAISE_INVALID_OPCODE();
1992}
1993
1994
1995/**
1996 * @opdone
1997 * @opmnemonic udf30f17
1998 * @opcode 0x17
1999 * @oppfx 0xf3
2000 * @opunused intel-modrm
2001 * @opcpuid sse
2002 * @optest ->
2003 * @opdone
2004 */
2005
2006/**
2007 * @opmnemonic udf20f17
2008 * @opcode 0x17
2009 * @oppfx 0xf2
2010 * @opunused intel-modrm
2011 * @opcpuid sse
2012 * @optest ->
2013 * @opdone
2014 */
2015
2016
2017/** Opcode 0x0f 0x18. */
2018FNIEMOP_DEF(iemOp_prefetch_Grp16)
2019{
2020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2021 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2022 {
2023 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2024 {
2025 case 4: /* Aliased to /0 for the time being according to AMD. */
2026 case 5: /* Aliased to /0 for the time being according to AMD. */
2027 case 6: /* Aliased to /0 for the time being according to AMD. */
2028 case 7: /* Aliased to /0 for the time being according to AMD. */
2029 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2030 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2031 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2032 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2034 }
2035
2036 IEM_MC_BEGIN(0, 1);
2037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2040 /* Currently a NOP. */
2041 NOREF(GCPtrEffSrc);
2042 IEM_MC_ADVANCE_RIP();
2043 IEM_MC_END();
2044 return VINF_SUCCESS;
2045 }
2046
2047 return IEMOP_RAISE_INVALID_OPCODE();
2048}
2049
2050
2051/** Opcode 0x0f 0x19..0x1f. */
2052FNIEMOP_DEF(iemOp_nop_Ev)
2053{
2054 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEM_MC_BEGIN(0, 0);
2060 IEM_MC_ADVANCE_RIP();
2061 IEM_MC_END();
2062 }
2063 else
2064 {
2065 IEM_MC_BEGIN(0, 1);
2066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2069 /* Currently a NOP. */
2070 NOREF(GCPtrEffSrc);
2071 IEM_MC_ADVANCE_RIP();
2072 IEM_MC_END();
2073 }
2074 return VINF_SUCCESS;
2075}
2076
2077
2078/** Opcode 0x0f 0x20. */
2079FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2080{
2081 /* mod is ignored, as is operand size overrides. */
2082 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2083 IEMOP_HLP_MIN_386();
2084 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2085 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2086 else
2087 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2088
2089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2090 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2092 {
2093 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2094 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2095 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2096 iCrReg |= 8;
2097 }
2098 switch (iCrReg)
2099 {
2100 case 0: case 2: case 3: case 4: case 8:
2101 break;
2102 default:
2103 return IEMOP_RAISE_INVALID_OPCODE();
2104 }
2105 IEMOP_HLP_DONE_DECODING();
2106
2107 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2108}
2109
2110
2111/** Opcode 0x0f 0x21. */
2112FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2113{
2114 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2115 IEMOP_HLP_MIN_386();
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2118 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2119 return IEMOP_RAISE_INVALID_OPCODE();
2120 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2121 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2122 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2123}
2124
2125
2126/** Opcode 0x0f 0x22. */
2127FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2128{
2129 /* mod is ignored, as is operand size overrides. */
2130 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2131 IEMOP_HLP_MIN_386();
2132 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2133 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2134 else
2135 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2136
2137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2138 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2139 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2140 {
2141 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2142 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2143 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2144 iCrReg |= 8;
2145 }
2146 switch (iCrReg)
2147 {
2148 case 0: case 2: case 3: case 4: case 8:
2149 break;
2150 default:
2151 return IEMOP_RAISE_INVALID_OPCODE();
2152 }
2153 IEMOP_HLP_DONE_DECODING();
2154
2155 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2156}
2157
2158
2159/** Opcode 0x0f 0x23. */
2160FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2161{
2162 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2163 IEMOP_HLP_MIN_386();
2164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2166 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2167 return IEMOP_RAISE_INVALID_OPCODE();
2168 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2169 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2170 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2171}
2172
2173
2174/** Opcode 0x0f 0x24. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Td)
2176{
2177 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2178 /** @todo works on 386 and 486. */
2179 /* The RM byte is not considered, see testcase. */
2180 return IEMOP_RAISE_INVALID_OPCODE();
2181}
2182
2183
2184/** Opcode 0x0f 0x26. */
2185FNIEMOP_DEF(iemOp_mov_Td_Rd)
2186{
2187 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2188 /** @todo works on 386 and 486. */
2189 /* The RM byte is not considered, see testcase. */
2190 return IEMOP_RAISE_INVALID_OPCODE();
2191}
2192
2193
2194/**
2195 * @opcode 0x28
2196 * @oppfx none
2197 * @opcpuid sse
2198 * @opgroup og_sse_simdfp_datamove
2199 * @opxcpttype 1
2200 * @optest op1=1 op2=2 -> op1=2
2201 * @optest op1=0 op2=-42 -> op1=-42
2202 */
2203FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2204{
2205 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2207 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2208 {
2209 /*
2210 * Register, register.
2211 */
2212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2213 IEM_MC_BEGIN(0, 0);
2214 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2215 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2216 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2217 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2218 IEM_MC_ADVANCE_RIP();
2219 IEM_MC_END();
2220 }
2221 else
2222 {
2223 /*
2224 * Register, memory.
2225 */
2226 IEM_MC_BEGIN(0, 2);
2227 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2229
2230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2233 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2234
2235 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2236 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2237
2238 IEM_MC_ADVANCE_RIP();
2239 IEM_MC_END();
2240 }
2241 return VINF_SUCCESS;
2242}
2243
2244/**
2245 * @opcode 0x28
2246 * @oppfx 66
2247 * @opcpuid sse2
2248 * @opgroup og_sse2_pcksclr_datamove
2249 * @opxcpttype 1
2250 * @optest op1=1 op2=2 -> op1=2
2251 * @optest op1=0 op2=-42 -> op1=-42
2252 */
2253FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2254{
2255 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2258 {
2259 /*
2260 * Register, register.
2261 */
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2263 IEM_MC_BEGIN(0, 0);
2264 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2265 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2266 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2267 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 else
2272 {
2273 /*
2274 * Register, memory.
2275 */
2276 IEM_MC_BEGIN(0, 2);
2277 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2279
2280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2284
2285 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2286 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2287
2288 IEM_MC_ADVANCE_RIP();
2289 IEM_MC_END();
2290 }
2291 return VINF_SUCCESS;
2292}
2293
2294/* Opcode 0xf3 0x0f 0x28 - invalid */
2295/* Opcode 0xf2 0x0f 0x28 - invalid */
2296
2297/**
2298 * @opcode 0x29
2299 * @oppfx none
2300 * @opcpuid sse
2301 * @opgroup og_sse_simdfp_datamove
2302 * @opxcpttype 1
2303 * @optest op1=1 op2=2 -> op1=2
2304 * @optest op1=0 op2=-42 -> op1=-42
2305 */
2306FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2307{
2308 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2311 {
2312 /*
2313 * Register, register.
2314 */
2315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2316 IEM_MC_BEGIN(0, 0);
2317 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2318 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2319 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2320 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2321 IEM_MC_ADVANCE_RIP();
2322 IEM_MC_END();
2323 }
2324 else
2325 {
2326 /*
2327 * Memory, register.
2328 */
2329 IEM_MC_BEGIN(0, 2);
2330 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2332
2333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2335 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2336 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2337
2338 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2339 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2340
2341 IEM_MC_ADVANCE_RIP();
2342 IEM_MC_END();
2343 }
2344 return VINF_SUCCESS;
2345}
2346
2347/**
2348 * @opcode 0x29
2349 * @oppfx 66
2350 * @opcpuid sse2
2351 * @opgroup og_sse2_pcksclr_datamove
2352 * @opxcpttype 1
2353 * @optest op1=1 op2=2 -> op1=2
2354 * @optest op1=0 op2=-42 -> op1=-42
2355 */
2356FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2357{
2358 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2361 {
2362 /*
2363 * Register, register.
2364 */
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_BEGIN(0, 0);
2367 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2368 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2369 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2370 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2371 IEM_MC_ADVANCE_RIP();
2372 IEM_MC_END();
2373 }
2374 else
2375 {
2376 /*
2377 * Memory, register.
2378 */
2379 IEM_MC_BEGIN(0, 2);
2380 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2385 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2386 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2387
2388 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2389 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2390
2391 IEM_MC_ADVANCE_RIP();
2392 IEM_MC_END();
2393 }
2394 return VINF_SUCCESS;
2395}
2396
2397/* Opcode 0xf3 0x0f 0x29 - invalid */
2398/* Opcode 0xf2 0x0f 0x29 - invalid */
2399
2400
2401/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2402FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2403/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2404FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2405/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2406FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2407/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2408FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2409
2410
2411/**
2412 * @opcode 0x2b
2413 * @opcodesub !11 mr/reg
2414 * @oppfx none
2415 * @opcpuid sse
2416 * @opgroup og_sse1_cachect
2417 * @opxcpttype 1
2418 * @optest op1=1 op2=2 -> op1=2
2419 * @optest op1=0 op2=-42 -> op1=-42
2420 */
2421FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2422{
2423 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2425 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2426 {
2427 /*
2428 * memory, register.
2429 */
2430 IEM_MC_BEGIN(0, 2);
2431 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2433
2434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2436 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2437 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2438
2439 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2440 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2441
2442 IEM_MC_ADVANCE_RIP();
2443 IEM_MC_END();
2444 }
2445 /* The register, register encoding is invalid. */
2446 else
2447 return IEMOP_RAISE_INVALID_OPCODE();
2448 return VINF_SUCCESS;
2449}
2450
2451/**
2452 * @opcode 0x2b
2453 * @opcodesub !11 mr/reg
2454 * @oppfx 0x66
2455 * @opcpuid sse2
2456 * @opgroup og_sse2_cachect
2457 * @opxcpttype 1
2458 * @optest op1=1 op2=2 -> op1=2
2459 * @optest op1=0 op2=-42 -> op1=-42
2460 */
2461FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2462{
2463 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2465 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2466 {
2467 /*
2468 * memory, register.
2469 */
2470 IEM_MC_BEGIN(0, 2);
2471 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2473
2474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2478
2479 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2480 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2481
2482 IEM_MC_ADVANCE_RIP();
2483 IEM_MC_END();
2484 }
2485 /* The register, register encoding is invalid. */
2486 else
2487 return IEMOP_RAISE_INVALID_OPCODE();
2488 return VINF_SUCCESS;
2489}
2490/* Opcode 0xf3 0x0f 0x2b - invalid */
2491/* Opcode 0xf2 0x0f 0x2b - invalid */
2492
2493
2494/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2495FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2496/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2497FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2498/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2499FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2500/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2501FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2502
2503/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2504FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2505/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2506FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2507/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2508FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2509/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2510FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2511
2512/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2513FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2514/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2515FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2516/* Opcode 0xf3 0x0f 0x2e - invalid */
2517/* Opcode 0xf2 0x0f 0x2e - invalid */
2518
2519/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2520FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2521/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2522FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2523/* Opcode 0xf3 0x0f 0x2f - invalid */
2524/* Opcode 0xf2 0x0f 0x2f - invalid */
2525
2526/** Opcode 0x0f 0x30. */
2527FNIEMOP_DEF(iemOp_wrmsr)
2528{
2529 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2531 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2532}
2533
2534
2535/** Opcode 0x0f 0x31. */
2536FNIEMOP_DEF(iemOp_rdtsc)
2537{
2538 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2540 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2541}
2542
2543
2544/** Opcode 0x0f 0x33. */
2545FNIEMOP_DEF(iemOp_rdmsr)
2546{
2547 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2549 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2550}
2551
2552
2553/** Opcode 0x0f 0x34. */
2554FNIEMOP_DEF(iemOp_rdpmc)
2555{
2556 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2558 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2559}
2560
2561
2562/** Opcode 0x0f 0x34. */
2563FNIEMOP_STUB(iemOp_sysenter);
2564/** Opcode 0x0f 0x35. */
2565FNIEMOP_STUB(iemOp_sysexit);
2566/** Opcode 0x0f 0x37. */
2567FNIEMOP_STUB(iemOp_getsec);
2568
2569
2570/** Opcode 0x0f 0x38. */
2571FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2572{
2573#ifdef IEM_WITH_THREE_0F_38
2574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2575 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2576#else
2577 IEMOP_BITCH_ABOUT_STUB();
2578 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2579#endif
2580}
2581
2582
2583/** Opcode 0x0f 0x3a. */
2584FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2585{
2586#ifdef IEM_WITH_THREE_0F_3A
2587 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2588 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2589#else
2590 IEMOP_BITCH_ABOUT_STUB();
2591 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2592#endif
2593}
2594
2595
2596/**
2597 * Implements a conditional move.
2598 *
2599 * Wish there was an obvious way to do this where we could share and reduce
2600 * code bloat.
2601 *
2602 * @param a_Cnd The conditional "microcode" operation.
2603 */
2604#define CMOV_X(a_Cnd) \
2605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2606 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2607 { \
2608 switch (pVCpu->iem.s.enmEffOpSize) \
2609 { \
2610 case IEMMODE_16BIT: \
2611 IEM_MC_BEGIN(0, 1); \
2612 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2613 a_Cnd { \
2614 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2615 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2616 } IEM_MC_ENDIF(); \
2617 IEM_MC_ADVANCE_RIP(); \
2618 IEM_MC_END(); \
2619 return VINF_SUCCESS; \
2620 \
2621 case IEMMODE_32BIT: \
2622 IEM_MC_BEGIN(0, 1); \
2623 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2624 a_Cnd { \
2625 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2626 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2627 } IEM_MC_ELSE() { \
2628 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2629 } IEM_MC_ENDIF(); \
2630 IEM_MC_ADVANCE_RIP(); \
2631 IEM_MC_END(); \
2632 return VINF_SUCCESS; \
2633 \
2634 case IEMMODE_64BIT: \
2635 IEM_MC_BEGIN(0, 1); \
2636 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2637 a_Cnd { \
2638 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2639 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2640 } IEM_MC_ENDIF(); \
2641 IEM_MC_ADVANCE_RIP(); \
2642 IEM_MC_END(); \
2643 return VINF_SUCCESS; \
2644 \
2645 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2646 } \
2647 } \
2648 else \
2649 { \
2650 switch (pVCpu->iem.s.enmEffOpSize) \
2651 { \
2652 case IEMMODE_16BIT: \
2653 IEM_MC_BEGIN(0, 2); \
2654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2655 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2657 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2658 a_Cnd { \
2659 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2660 } IEM_MC_ENDIF(); \
2661 IEM_MC_ADVANCE_RIP(); \
2662 IEM_MC_END(); \
2663 return VINF_SUCCESS; \
2664 \
2665 case IEMMODE_32BIT: \
2666 IEM_MC_BEGIN(0, 2); \
2667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2668 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2670 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2671 a_Cnd { \
2672 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2673 } IEM_MC_ELSE() { \
2674 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2675 } IEM_MC_ENDIF(); \
2676 IEM_MC_ADVANCE_RIP(); \
2677 IEM_MC_END(); \
2678 return VINF_SUCCESS; \
2679 \
2680 case IEMMODE_64BIT: \
2681 IEM_MC_BEGIN(0, 2); \
2682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2683 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2685 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2686 a_Cnd { \
2687 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2688 } IEM_MC_ENDIF(); \
2689 IEM_MC_ADVANCE_RIP(); \
2690 IEM_MC_END(); \
2691 return VINF_SUCCESS; \
2692 \
2693 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2694 } \
2695 } do {} while (0)
2696
2697
2698
2699/** Opcode 0x0f 0x40. */
2700FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2701{
2702 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2703 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2704}
2705
2706
2707/** Opcode 0x0f 0x41. */
2708FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2709{
2710 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2711 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2712}
2713
2714
2715/** Opcode 0x0f 0x42. */
2716FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2717{
2718 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2719 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2720}
2721
2722
2723/** Opcode 0x0f 0x43. */
2724FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2725{
2726 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2727 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2728}
2729
2730
2731/** Opcode 0x0f 0x44. */
2732FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2733{
2734 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2735 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2736}
2737
2738
2739/** Opcode 0x0f 0x45. */
2740FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2741{
2742 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2743 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2744}
2745
2746
2747/** Opcode 0x0f 0x46. */
2748FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2749{
2750 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2751 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2752}
2753
2754
2755/** Opcode 0x0f 0x47. */
2756FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2757{
2758 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2759 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2760}
2761
2762
2763/** Opcode 0x0f 0x48. */
2764FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2765{
2766 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2767 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2768}
2769
2770
2771/** Opcode 0x0f 0x49. */
2772FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2773{
2774 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2775 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2776}
2777
2778
2779/** Opcode 0x0f 0x4a. */
2780FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2781{
2782 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2783 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2784}
2785
2786
2787/** Opcode 0x0f 0x4b. */
2788FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2789{
2790 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2791 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2792}
2793
2794
2795/** Opcode 0x0f 0x4c. */
2796FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2800}
2801
2802
2803/** Opcode 0x0f 0x4d. */
2804FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2808}
2809
2810
2811/** Opcode 0x0f 0x4e. */
2812FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2816}
2817
2818
2819/** Opcode 0x0f 0x4f. */
2820FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2824}
2825
2826#undef CMOV_X
2827
2828/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2829FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2830/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2831FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2832/* Opcode 0xf3 0x0f 0x50 - invalid */
2833/* Opcode 0xf2 0x0f 0x50 - invalid */
2834
2835/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2836FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2837/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2838FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2839/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2840FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2841/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2842FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2843
2844/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2845FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2846/* Opcode 0x66 0x0f 0x52 - invalid */
2847/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2848FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2849/* Opcode 0xf2 0x0f 0x52 - invalid */
2850
2851/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2852FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2853/* Opcode 0x66 0x0f 0x53 - invalid */
2854/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2855FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2856/* Opcode 0xf2 0x0f 0x53 - invalid */
2857
2858/** Opcode 0x0f 0x54 - andps Vps, Wps */
2859FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2860/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2861FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2862/* Opcode 0xf3 0x0f 0x54 - invalid */
2863/* Opcode 0xf2 0x0f 0x54 - invalid */
2864
2865/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2866FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2867/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2868FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2869/* Opcode 0xf3 0x0f 0x55 - invalid */
2870/* Opcode 0xf2 0x0f 0x55 - invalid */
2871
2872/** Opcode 0x0f 0x56 - orps Vps, Wps */
2873FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2874/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2875FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2876/* Opcode 0xf3 0x0f 0x56 - invalid */
2877/* Opcode 0xf2 0x0f 0x56 - invalid */
2878
2879/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2880FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2881/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2882FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2883/* Opcode 0xf3 0x0f 0x57 - invalid */
2884/* Opcode 0xf2 0x0f 0x57 - invalid */
2885
2886/** Opcode 0x0f 0x58 - addps Vps, Wps */
2887FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2888/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2889FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2890/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2891FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2892/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2893FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2894
2895/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2896FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2897/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2898FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2899/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2900FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2901/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2902FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2903
2904/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2905FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2906/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2907FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2908/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2909FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2910/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2911FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2912
2913/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2914FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2915/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2916FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2917/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2918FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2919/* Opcode 0xf2 0x0f 0x5b - invalid */
2920
2921/** Opcode 0x0f 0x5c - subps Vps, Wps */
2922FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2923/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2924FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2925/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2926FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2927/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2928FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2929
2930/** Opcode 0x0f 0x5d - minps Vps, Wps */
2931FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2932/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2933FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2934/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2935FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2936/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2937FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2938
2939/** Opcode 0x0f 0x5e - divps Vps, Wps */
2940FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2941/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2942FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2943/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2944FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2945/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2946FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2947
2948/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2949FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2950/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2951FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2952/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2953FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2954/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2955FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2956
2957/**
2958 * Common worker for MMX instructions on the forms:
2959 * pxxxx mm1, mm2/mem32
2960 *
2961 * The 2nd operand is the first half of a register, which in the memory case
2962 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2963 * memory accessed for MMX.
2964 *
2965 * Exceptions type 4.
2966 */
2967FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2968{
2969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2971 {
2972 /*
2973 * Register, register.
2974 */
2975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2976 IEM_MC_BEGIN(2, 0);
2977 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2978 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2979 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2980 IEM_MC_PREPARE_SSE_USAGE();
2981 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2982 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2983 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2984 IEM_MC_ADVANCE_RIP();
2985 IEM_MC_END();
2986 }
2987 else
2988 {
2989 /*
2990 * Register, memory.
2991 */
2992 IEM_MC_BEGIN(2, 2);
2993 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2994 IEM_MC_LOCAL(uint64_t, uSrc);
2995 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2997
2998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3001 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3002
3003 IEM_MC_PREPARE_SSE_USAGE();
3004 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3005 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3006
3007 IEM_MC_ADVANCE_RIP();
3008 IEM_MC_END();
3009 }
3010 return VINF_SUCCESS;
3011}
3012
3013
3014/**
3015 * Common worker for SSE2 instructions on the forms:
3016 * pxxxx xmm1, xmm2/mem128
3017 *
3018 * The 2nd operand is the first half of a register, which in the memory case
3019 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3020 * memory accessed for MMX.
3021 *
3022 * Exceptions type 4.
3023 */
3024FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3025{
3026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3027 if (!pImpl->pfnU64)
3028 return IEMOP_RAISE_INVALID_OPCODE();
3029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3030 {
3031 /*
3032 * Register, register.
3033 */
3034 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3035 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3037 IEM_MC_BEGIN(2, 0);
3038 IEM_MC_ARG(uint64_t *, pDst, 0);
3039 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3040 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3041 IEM_MC_PREPARE_FPU_USAGE();
3042 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3043 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3044 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3045 IEM_MC_ADVANCE_RIP();
3046 IEM_MC_END();
3047 }
3048 else
3049 {
3050 /*
3051 * Register, memory.
3052 */
3053 IEM_MC_BEGIN(2, 2);
3054 IEM_MC_ARG(uint64_t *, pDst, 0);
3055 IEM_MC_LOCAL(uint32_t, uSrc);
3056 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3058
3059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3061 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3062 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3063
3064 IEM_MC_PREPARE_FPU_USAGE();
3065 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3066 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3067
3068 IEM_MC_ADVANCE_RIP();
3069 IEM_MC_END();
3070 }
3071 return VINF_SUCCESS;
3072}
3073
3074
3075/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3076FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3077{
3078 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3079 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3080}
3081
3082/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3083FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3084{
3085 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3086 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3087}
3088
3089/* Opcode 0xf3 0x0f 0x60 - invalid */
3090
3091
3092/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3093FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3094{
3095 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3096 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3097}
3098
3099/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3100FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3101{
3102 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3103 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3104}
3105
3106/* Opcode 0xf3 0x0f 0x61 - invalid */
3107
3108
3109/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3110FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3111{
3112 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3113 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3114}
3115
3116/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3117FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3118{
3119 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3120 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3121}
3122
3123/* Opcode 0xf3 0x0f 0x62 - invalid */
3124
3125
3126
3127/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3128FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3129/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3130FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3131/* Opcode 0xf3 0x0f 0x63 - invalid */
3132
3133/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3134FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3135/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3136FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3137/* Opcode 0xf3 0x0f 0x64 - invalid */
3138
3139/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3140FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3141/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3142FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3143/* Opcode 0xf3 0x0f 0x65 - invalid */
3144
3145/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3146FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3147/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3148FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3149/* Opcode 0xf3 0x0f 0x66 - invalid */
3150
3151/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3152FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3153/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3154FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3155/* Opcode 0xf3 0x0f 0x67 - invalid */
3156
3157
3158/**
3159 * Common worker for MMX instructions on the form:
3160 * pxxxx mm1, mm2/mem64
3161 *
3162 * The 2nd operand is the second half of a register, which in the memory case
3163 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3164 * where it may read the full 128 bits or only the upper 64 bits.
3165 *
3166 * Exceptions type 4.
3167 */
3168FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3169{
3170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3171 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3172 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3173 {
3174 /*
3175 * Register, register.
3176 */
3177 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3178 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3180 IEM_MC_BEGIN(2, 0);
3181 IEM_MC_ARG(uint64_t *, pDst, 0);
3182 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3183 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3184 IEM_MC_PREPARE_FPU_USAGE();
3185 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3186 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3187 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3188 IEM_MC_ADVANCE_RIP();
3189 IEM_MC_END();
3190 }
3191 else
3192 {
3193 /*
3194 * Register, memory.
3195 */
3196 IEM_MC_BEGIN(2, 2);
3197 IEM_MC_ARG(uint64_t *, pDst, 0);
3198 IEM_MC_LOCAL(uint64_t, uSrc);
3199 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3201
3202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3205 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3206
3207 IEM_MC_PREPARE_FPU_USAGE();
3208 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3209 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3210
3211 IEM_MC_ADVANCE_RIP();
3212 IEM_MC_END();
3213 }
3214 return VINF_SUCCESS;
3215}
3216
3217
3218/**
3219 * Common worker for SSE2 instructions on the form:
3220 * pxxxx xmm1, xmm2/mem128
3221 *
3222 * The 2nd operand is the second half of a register, which in the memory case
3223 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3224 * where it may read the full 128 bits or only the upper 64 bits.
3225 *
3226 * Exceptions type 4.
3227 */
3228FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3229{
3230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3232 {
3233 /*
3234 * Register, register.
3235 */
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237 IEM_MC_BEGIN(2, 0);
3238 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3239 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3240 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3241 IEM_MC_PREPARE_SSE_USAGE();
3242 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3243 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3244 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3245 IEM_MC_ADVANCE_RIP();
3246 IEM_MC_END();
3247 }
3248 else
3249 {
3250 /*
3251 * Register, memory.
3252 */
3253 IEM_MC_BEGIN(2, 2);
3254 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3255 IEM_MC_LOCAL(RTUINT128U, uSrc);
3256 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3258
3259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3262 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3263
3264 IEM_MC_PREPARE_SSE_USAGE();
3265 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3266 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3267
3268 IEM_MC_ADVANCE_RIP();
3269 IEM_MC_END();
3270 }
3271 return VINF_SUCCESS;
3272}
3273
3274
3275/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3276FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3277{
3278 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3279 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3280}
3281
3282/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3283FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3284{
3285 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3286 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3287}
3288/* Opcode 0xf3 0x0f 0x68 - invalid */
3289
3290
3291/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3292FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3293{
3294 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3295 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3296}
3297
3298/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3299FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3300{
3301 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3302 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3303
3304}
3305/* Opcode 0xf3 0x0f 0x69 - invalid */
3306
3307
3308/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3309FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3310{
3311 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3312 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3313}
3314
3315/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3316FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3317{
3318 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3319 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3320}
3321/* Opcode 0xf3 0x0f 0x6a - invalid */
3322
3323
3324/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3325FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3326/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3327FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3328/* Opcode 0xf3 0x0f 0x6b - invalid */
3329
3330
3331/* Opcode 0x0f 0x6c - invalid */
3332
3333/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3334FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3335{
3336 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3337 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3338}
3339
3340/* Opcode 0xf3 0x0f 0x6c - invalid */
3341/* Opcode 0xf2 0x0f 0x6c - invalid */
3342
3343
3344/* Opcode 0x0f 0x6d - invalid */
3345
3346/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3347FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3348{
3349 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3350 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3351}
3352
3353/* Opcode 0xf3 0x0f 0x6d - invalid */
3354
3355
3356FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3357{
3358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3359 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3360 {
3361 /**
3362 * @opcode 0x6e
3363 * @opcodesub rex.w=1
3364 * @oppfx none
3365 * @opcpuid mmx
3366 * @opgroup og_mmx_datamove
3367 * @opxcpttype 5
3368 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3369 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3370 */
3371 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3373 {
3374 /* MMX, greg64 */
3375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3376 IEM_MC_BEGIN(0, 1);
3377 IEM_MC_LOCAL(uint64_t, u64Tmp);
3378
3379 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3380 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3381
3382 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3383 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3384 IEM_MC_FPU_TO_MMX_MODE();
3385
3386 IEM_MC_ADVANCE_RIP();
3387 IEM_MC_END();
3388 }
3389 else
3390 {
3391 /* MMX, [mem64] */
3392 IEM_MC_BEGIN(0, 2);
3393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3394 IEM_MC_LOCAL(uint64_t, u64Tmp);
3395
3396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3398 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3399 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3400
3401 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3402 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3403 IEM_MC_FPU_TO_MMX_MODE();
3404
3405 IEM_MC_ADVANCE_RIP();
3406 IEM_MC_END();
3407 }
3408 }
3409 else
3410 {
3411 /**
3412 * @opdone
3413 * @opcode 0x6e
3414 * @opcodesub rex.w=0
3415 * @oppfx none
3416 * @opcpuid mmx
3417 * @opgroup og_mmx_datamove
3418 * @opxcpttype 5
3419 * @opfunction iemOp_movd_q_Pd_Ey
3420 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3421 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3422 */
3423 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3425 {
3426 /* MMX, greg */
3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3428 IEM_MC_BEGIN(0, 1);
3429 IEM_MC_LOCAL(uint64_t, u64Tmp);
3430
3431 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3432 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3433
3434 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3435 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3436 IEM_MC_FPU_TO_MMX_MODE();
3437
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 }
3441 else
3442 {
3443 /* MMX, [mem] */
3444 IEM_MC_BEGIN(0, 2);
3445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3446 IEM_MC_LOCAL(uint32_t, u32Tmp);
3447
3448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3450 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3451 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3452
3453 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3454 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3455 IEM_MC_FPU_TO_MMX_MODE();
3456
3457 IEM_MC_ADVANCE_RIP();
3458 IEM_MC_END();
3459 }
3460 }
3461 return VINF_SUCCESS;
3462}
3463
3464FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3465{
3466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3467 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3468 {
3469 /**
3470 * @opcode 0x6e
3471 * @opcodesub rex.w=1
3472 * @oppfx 0x66
3473 * @opcpuid sse2
3474 * @opgroup og_sse2_simdint_datamove
3475 * @opxcpttype 5
3476 * @optest 64-bit / op1=1 op2=2 -> op1=2
3477 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3478 */
3479 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3480 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3481 {
3482 /* XMM, greg64 */
3483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3484 IEM_MC_BEGIN(0, 1);
3485 IEM_MC_LOCAL(uint64_t, u64Tmp);
3486
3487 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3489
3490 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3491 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3492
3493 IEM_MC_ADVANCE_RIP();
3494 IEM_MC_END();
3495 }
3496 else
3497 {
3498 /* XMM, [mem64] */
3499 IEM_MC_BEGIN(0, 2);
3500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3501 IEM_MC_LOCAL(uint64_t, u64Tmp);
3502
3503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3505 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3507
3508 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3509 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3510
3511 IEM_MC_ADVANCE_RIP();
3512 IEM_MC_END();
3513 }
3514 }
3515 else
3516 {
3517 /**
3518 * @opdone
3519 * @opcode 0x6e
3520 * @opcodesub rex.w=0
3521 * @oppfx 0x66
3522 * @opcpuid sse2
3523 * @opgroup og_sse2_simdint_datamove
3524 * @opxcpttype 5
3525 * @opfunction iemOp_movd_q_Vy_Ey
3526 * @optest op1=1 op2=2 -> op1=2
3527 * @optest op1=0 op2=-42 -> op1=-42
3528 */
3529 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3531 {
3532 /* XMM, greg32 */
3533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3534 IEM_MC_BEGIN(0, 1);
3535 IEM_MC_LOCAL(uint32_t, u32Tmp);
3536
3537 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3538 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3539
3540 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3541 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3542
3543 IEM_MC_ADVANCE_RIP();
3544 IEM_MC_END();
3545 }
3546 else
3547 {
3548 /* XMM, [mem32] */
3549 IEM_MC_BEGIN(0, 2);
3550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3551 IEM_MC_LOCAL(uint32_t, u32Tmp);
3552
3553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3556 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3557
3558 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3559 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3560
3561 IEM_MC_ADVANCE_RIP();
3562 IEM_MC_END();
3563 }
3564 }
3565 return VINF_SUCCESS;
3566}
3567
3568/* Opcode 0xf3 0x0f 0x6e - invalid */
3569
3570
3571/**
3572 * @opcode 0x6f
3573 * @oppfx none
3574 * @opcpuid mmx
3575 * @opgroup og_mmx_datamove
3576 * @opxcpttype 5
3577 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3578 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3579 */
3580FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3581{
3582 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3585 {
3586 /*
3587 * Register, register.
3588 */
3589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3590 IEM_MC_BEGIN(0, 1);
3591 IEM_MC_LOCAL(uint64_t, u64Tmp);
3592
3593 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3594 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3595
3596 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3597 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3598 IEM_MC_FPU_TO_MMX_MODE();
3599
3600 IEM_MC_ADVANCE_RIP();
3601 IEM_MC_END();
3602 }
3603 else
3604 {
3605 /*
3606 * Register, memory.
3607 */
3608 IEM_MC_BEGIN(0, 2);
3609 IEM_MC_LOCAL(uint64_t, u64Tmp);
3610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3611
3612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3615 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3616
3617 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3618 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3619 IEM_MC_FPU_TO_MMX_MODE();
3620
3621 IEM_MC_ADVANCE_RIP();
3622 IEM_MC_END();
3623 }
3624 return VINF_SUCCESS;
3625}
3626
3627/**
3628 * @opcode 0x6f
3629 * @oppfx 0x66
3630 * @opcpuid sse2
3631 * @opgroup og_sse2_simdint_datamove
3632 * @opxcpttype 1
3633 * @optest op1=1 op2=2 -> op1=2
3634 * @optest op1=0 op2=-42 -> op1=-42
3635 */
3636FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3637{
3638 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3641 {
3642 /*
3643 * Register, register.
3644 */
3645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3646 IEM_MC_BEGIN(0, 0);
3647
3648 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3649 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3650
3651 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3652 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3653 IEM_MC_ADVANCE_RIP();
3654 IEM_MC_END();
3655 }
3656 else
3657 {
3658 /*
3659 * Register, memory.
3660 */
3661 IEM_MC_BEGIN(0, 2);
3662 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3664
3665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3667 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3669
3670 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3671 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3672
3673 IEM_MC_ADVANCE_RIP();
3674 IEM_MC_END();
3675 }
3676 return VINF_SUCCESS;
3677}
3678
3679/**
3680 * @opcode 0x6f
3681 * @oppfx 0xf3
3682 * @opcpuid sse2
3683 * @opgroup og_sse2_simdint_datamove
3684 * @opxcpttype 4UA
3685 * @optest op1=1 op2=2 -> op1=2
3686 * @optest op1=0 op2=-42 -> op1=-42
3687 */
3688FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3689{
3690 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3693 {
3694 /*
3695 * Register, register.
3696 */
3697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3698 IEM_MC_BEGIN(0, 0);
3699 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3700 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3701 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3702 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3703 IEM_MC_ADVANCE_RIP();
3704 IEM_MC_END();
3705 }
3706 else
3707 {
3708 /*
3709 * Register, memory.
3710 */
3711 IEM_MC_BEGIN(0, 2);
3712 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3714
3715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3719 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3720 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3721
3722 IEM_MC_ADVANCE_RIP();
3723 IEM_MC_END();
3724 }
3725 return VINF_SUCCESS;
3726}
3727
3728
3729/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3730FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3731{
3732 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3734 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3735 {
3736 /*
3737 * Register, register.
3738 */
3739 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741
3742 IEM_MC_BEGIN(3, 0);
3743 IEM_MC_ARG(uint64_t *, pDst, 0);
3744 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3745 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3746 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3747 IEM_MC_PREPARE_FPU_USAGE();
3748 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3749 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3750 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 }
3754 else
3755 {
3756 /*
3757 * Register, memory.
3758 */
3759 IEM_MC_BEGIN(3, 2);
3760 IEM_MC_ARG(uint64_t *, pDst, 0);
3761 IEM_MC_LOCAL(uint64_t, uSrc);
3762 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3764
3765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3766 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3767 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3769 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3770
3771 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3772 IEM_MC_PREPARE_FPU_USAGE();
3773 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3774 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3775
3776 IEM_MC_ADVANCE_RIP();
3777 IEM_MC_END();
3778 }
3779 return VINF_SUCCESS;
3780}
3781
3782/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3783FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3784{
3785 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3788 {
3789 /*
3790 * Register, register.
3791 */
3792 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794
3795 IEM_MC_BEGIN(3, 0);
3796 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3797 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3798 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3799 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3800 IEM_MC_PREPARE_SSE_USAGE();
3801 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3802 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3803 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3804 IEM_MC_ADVANCE_RIP();
3805 IEM_MC_END();
3806 }
3807 else
3808 {
3809 /*
3810 * Register, memory.
3811 */
3812 IEM_MC_BEGIN(3, 2);
3813 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3814 IEM_MC_LOCAL(RTUINT128U, uSrc);
3815 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3817
3818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3819 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3820 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3823
3824 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3825 IEM_MC_PREPARE_SSE_USAGE();
3826 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3827 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3828
3829 IEM_MC_ADVANCE_RIP();
3830 IEM_MC_END();
3831 }
3832 return VINF_SUCCESS;
3833}
3834
3835/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3836FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3837{
3838 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3841 {
3842 /*
3843 * Register, register.
3844 */
3845 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3847
3848 IEM_MC_BEGIN(3, 0);
3849 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3850 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3851 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3852 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3853 IEM_MC_PREPARE_SSE_USAGE();
3854 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3855 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3856 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3857 IEM_MC_ADVANCE_RIP();
3858 IEM_MC_END();
3859 }
3860 else
3861 {
3862 /*
3863 * Register, memory.
3864 */
3865 IEM_MC_BEGIN(3, 2);
3866 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3867 IEM_MC_LOCAL(RTUINT128U, uSrc);
3868 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3870
3871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3872 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3873 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3875 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3876
3877 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3878 IEM_MC_PREPARE_SSE_USAGE();
3879 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3880 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3881
3882 IEM_MC_ADVANCE_RIP();
3883 IEM_MC_END();
3884 }
3885 return VINF_SUCCESS;
3886}
3887
3888/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3889FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3890{
3891 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3894 {
3895 /*
3896 * Register, register.
3897 */
3898 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3900
3901 IEM_MC_BEGIN(3, 0);
3902 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3903 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3904 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3905 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3906 IEM_MC_PREPARE_SSE_USAGE();
3907 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3908 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3909 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3910 IEM_MC_ADVANCE_RIP();
3911 IEM_MC_END();
3912 }
3913 else
3914 {
3915 /*
3916 * Register, memory.
3917 */
3918 IEM_MC_BEGIN(3, 2);
3919 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3920 IEM_MC_LOCAL(RTUINT128U, uSrc);
3921 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3923
3924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3925 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3926 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3928 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3929
3930 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3931 IEM_MC_PREPARE_SSE_USAGE();
3932 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3933 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3934
3935 IEM_MC_ADVANCE_RIP();
3936 IEM_MC_END();
3937 }
3938 return VINF_SUCCESS;
3939}
3940
3941
3942/** Opcode 0x0f 0x71 11/2. */
3943FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3944
3945/** Opcode 0x66 0x0f 0x71 11/2. */
3946FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3947
3948/** Opcode 0x0f 0x71 11/4. */
3949FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3950
3951/** Opcode 0x66 0x0f 0x71 11/4. */
3952FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3953
3954/** Opcode 0x0f 0x71 11/6. */
3955FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3956
3957/** Opcode 0x66 0x0f 0x71 11/6. */
3958FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3959
3960
3961/**
3962 * Group 12 jump table for register variant.
3963 */
3964IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3965{
3966 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3967 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3968 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3969 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3970 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3971 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3972 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3973 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3974};
3975AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3976
3977
3978/** Opcode 0x0f 0x71. */
3979FNIEMOP_DEF(iemOp_Grp12)
3980{
3981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3982 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3983 /* register, register */
3984 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3985 + pVCpu->iem.s.idxPrefix], bRm);
3986 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3987}
3988
3989
3990/** Opcode 0x0f 0x72 11/2. */
3991FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3992
3993/** Opcode 0x66 0x0f 0x72 11/2. */
3994FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3995
3996/** Opcode 0x0f 0x72 11/4. */
3997FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3998
3999/** Opcode 0x66 0x0f 0x72 11/4. */
4000FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4001
4002/** Opcode 0x0f 0x72 11/6. */
4003FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4004
4005/** Opcode 0x66 0x0f 0x72 11/6. */
4006FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4007
4008
4009/**
4010 * Group 13 jump table for register variant.
4011 */
4012IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4013{
4014 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4015 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4016 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4017 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4018 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4019 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4020 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4021 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4022};
4023AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4024
4025/** Opcode 0x0f 0x72. */
4026FNIEMOP_DEF(iemOp_Grp13)
4027{
4028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4030 /* register, register */
4031 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4032 + pVCpu->iem.s.idxPrefix], bRm);
4033 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4034}
4035
4036
4037/** Opcode 0x0f 0x73 11/2. */
4038FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4039
4040/** Opcode 0x66 0x0f 0x73 11/2. */
4041FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4042
4043/** Opcode 0x66 0x0f 0x73 11/3. */
4044FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4045
4046/** Opcode 0x0f 0x73 11/6. */
4047FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4048
4049/** Opcode 0x66 0x0f 0x73 11/6. */
4050FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4051
4052/** Opcode 0x66 0x0f 0x73 11/7. */
4053FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4054
4055/**
4056 * Group 14 jump table for register variant.
4057 */
4058IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4059{
4060 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4061 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4062 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4063 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4064 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4065 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4066 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4067 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4068};
4069AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4070
4071
4072/** Opcode 0x0f 0x73. */
4073FNIEMOP_DEF(iemOp_Grp14)
4074{
4075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4077 /* register, register */
4078 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4079 + pVCpu->iem.s.idxPrefix], bRm);
4080 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4081}
4082
4083
4084/**
4085 * Common worker for MMX instructions on the form:
4086 * pxxx mm1, mm2/mem64
4087 */
4088FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4089{
4090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4092 {
4093 /*
4094 * Register, register.
4095 */
4096 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4097 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4099 IEM_MC_BEGIN(2, 0);
4100 IEM_MC_ARG(uint64_t *, pDst, 0);
4101 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4102 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4103 IEM_MC_PREPARE_FPU_USAGE();
4104 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4105 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4106 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4107 IEM_MC_ADVANCE_RIP();
4108 IEM_MC_END();
4109 }
4110 else
4111 {
4112 /*
4113 * Register, memory.
4114 */
4115 IEM_MC_BEGIN(2, 2);
4116 IEM_MC_ARG(uint64_t *, pDst, 0);
4117 IEM_MC_LOCAL(uint64_t, uSrc);
4118 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4120
4121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4123 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4124 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4125
4126 IEM_MC_PREPARE_FPU_USAGE();
4127 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4128 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4129
4130 IEM_MC_ADVANCE_RIP();
4131 IEM_MC_END();
4132 }
4133 return VINF_SUCCESS;
4134}
4135
4136
4137/**
4138 * Common worker for SSE2 instructions on the forms:
4139 * pxxx xmm1, xmm2/mem128
4140 *
4141 * Proper alignment of the 128-bit operand is enforced.
4142 * Exceptions type 4. SSE2 cpuid checks.
4143 */
4144FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4145{
4146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4148 {
4149 /*
4150 * Register, register.
4151 */
4152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4153 IEM_MC_BEGIN(2, 0);
4154 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4155 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4156 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4157 IEM_MC_PREPARE_SSE_USAGE();
4158 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4159 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4160 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4161 IEM_MC_ADVANCE_RIP();
4162 IEM_MC_END();
4163 }
4164 else
4165 {
4166 /*
4167 * Register, memory.
4168 */
4169 IEM_MC_BEGIN(2, 2);
4170 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4171 IEM_MC_LOCAL(RTUINT128U, uSrc);
4172 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4174
4175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4177 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4178 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4179
4180 IEM_MC_PREPARE_SSE_USAGE();
4181 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4182 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4183
4184 IEM_MC_ADVANCE_RIP();
4185 IEM_MC_END();
4186 }
4187 return VINF_SUCCESS;
4188}
4189
4190
4191/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4192FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4193{
4194 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4195 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4196}
4197
4198/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4199FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4200{
4201 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4202 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4203}
4204
4205/* Opcode 0xf3 0x0f 0x74 - invalid */
4206/* Opcode 0xf2 0x0f 0x74 - invalid */
4207
4208
4209/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4210FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4211{
4212 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4213 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4214}
4215
4216/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4217FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4218{
4219 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4220 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4221}
4222
4223/* Opcode 0xf3 0x0f 0x75 - invalid */
4224/* Opcode 0xf2 0x0f 0x75 - invalid */
4225
4226
4227/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4228FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4229{
4230 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4231 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4232}
4233
4234/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4235FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4236{
4237 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4238 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4239}
4240
4241/* Opcode 0xf3 0x0f 0x76 - invalid */
4242/* Opcode 0xf2 0x0f 0x76 - invalid */
4243
4244
4245/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4246FNIEMOP_DEF(iemOp_emms)
4247{
4248 IEMOP_MNEMONIC(emms, "emms");
4249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4250
4251 IEM_MC_BEGIN(0,0);
4252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4253 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4254 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4255 IEM_MC_FPU_FROM_MMX_MODE();
4256 IEM_MC_ADVANCE_RIP();
4257 IEM_MC_END();
4258 return VINF_SUCCESS;
4259}
4260
4261/* Opcode 0x66 0x0f 0x77 - invalid */
4262/* Opcode 0xf3 0x0f 0x77 - invalid */
4263/* Opcode 0xf2 0x0f 0x77 - invalid */
4264
4265/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4266FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4267/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4268FNIEMOP_STUB(iemOp_AmdGrp17);
4269/* Opcode 0xf3 0x0f 0x78 - invalid */
4270/* Opcode 0xf2 0x0f 0x78 - invalid */
4271
4272/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4273FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4274/* Opcode 0x66 0x0f 0x79 - invalid */
4275/* Opcode 0xf3 0x0f 0x79 - invalid */
4276/* Opcode 0xf2 0x0f 0x79 - invalid */
4277
4278/* Opcode 0x0f 0x7a - invalid */
4279/* Opcode 0x66 0x0f 0x7a - invalid */
4280/* Opcode 0xf3 0x0f 0x7a - invalid */
4281/* Opcode 0xf2 0x0f 0x7a - invalid */
4282
4283/* Opcode 0x0f 0x7b - invalid */
4284/* Opcode 0x66 0x0f 0x7b - invalid */
4285/* Opcode 0xf3 0x0f 0x7b - invalid */
4286/* Opcode 0xf2 0x0f 0x7b - invalid */
4287
4288/* Opcode 0x0f 0x7c - invalid */
4289/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4290FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4291/* Opcode 0xf3 0x0f 0x7c - invalid */
4292/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4293FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4294
4295/* Opcode 0x0f 0x7d - invalid */
4296/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4297FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4298/* Opcode 0xf3 0x0f 0x7d - invalid */
4299/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4300FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4301
4302
4303/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4304FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4305{
4306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4307 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4308 {
4309 /**
4310 * @opcode 0x7e
4311 * @opcodesub rex.w=1
4312 * @oppfx none
4313 * @opcpuid mmx
4314 * @opgroup og_mmx_datamove
4315 * @opxcpttype 5
4316 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4317 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4318 */
4319 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4321 {
4322 /* greg64, MMX */
4323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4324 IEM_MC_BEGIN(0, 1);
4325 IEM_MC_LOCAL(uint64_t, u64Tmp);
4326
4327 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4328 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4329
4330 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4331 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4332 IEM_MC_FPU_TO_MMX_MODE();
4333
4334 IEM_MC_ADVANCE_RIP();
4335 IEM_MC_END();
4336 }
4337 else
4338 {
4339 /* [mem64], MMX */
4340 IEM_MC_BEGIN(0, 2);
4341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4342 IEM_MC_LOCAL(uint64_t, u64Tmp);
4343
4344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4346 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4347 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4348
4349 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4350 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4351 IEM_MC_FPU_TO_MMX_MODE();
4352
4353 IEM_MC_ADVANCE_RIP();
4354 IEM_MC_END();
4355 }
4356 }
4357 else
4358 {
4359 /**
4360 * @opdone
4361 * @opcode 0x7e
4362 * @opcodesub rex.w=0
4363 * @oppfx none
4364 * @opcpuid mmx
4365 * @opgroup og_mmx_datamove
4366 * @opxcpttype 5
4367 * @opfunction iemOp_movd_q_Pd_Ey
4368 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4369 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4370 */
4371 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4373 {
4374 /* greg32, MMX */
4375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4376 IEM_MC_BEGIN(0, 1);
4377 IEM_MC_LOCAL(uint32_t, u32Tmp);
4378
4379 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4380 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4381
4382 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4383 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4384 IEM_MC_FPU_TO_MMX_MODE();
4385
4386 IEM_MC_ADVANCE_RIP();
4387 IEM_MC_END();
4388 }
4389 else
4390 {
4391 /* [mem32], MMX */
4392 IEM_MC_BEGIN(0, 2);
4393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4394 IEM_MC_LOCAL(uint32_t, u32Tmp);
4395
4396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4399 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4400
4401 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4402 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4403 IEM_MC_FPU_TO_MMX_MODE();
4404
4405 IEM_MC_ADVANCE_RIP();
4406 IEM_MC_END();
4407 }
4408 }
4409 return VINF_SUCCESS;
4410
4411}
4412
4413
4414FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4415{
4416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4417 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4418 {
4419 /**
4420 * @opcode 0x7e
4421 * @opcodesub rex.w=1
4422 * @oppfx 0x66
4423 * @opcpuid sse2
4424 * @opgroup og_sse2_simdint_datamove
4425 * @opxcpttype 5
4426 * @optest 64-bit / op1=1 op2=2 -> op1=2
4427 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4428 */
4429 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4430 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4431 {
4432 /* greg64, XMM */
4433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4434 IEM_MC_BEGIN(0, 1);
4435 IEM_MC_LOCAL(uint64_t, u64Tmp);
4436
4437 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4439
4440 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4441 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4442
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 }
4446 else
4447 {
4448 /* [mem64], XMM */
4449 IEM_MC_BEGIN(0, 2);
4450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4451 IEM_MC_LOCAL(uint64_t, u64Tmp);
4452
4453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4455 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4456 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4457
4458 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4459 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4460
4461 IEM_MC_ADVANCE_RIP();
4462 IEM_MC_END();
4463 }
4464 }
4465 else
4466 {
4467 /**
4468 * @opdone
4469 * @opcode 0x7e
4470 * @opcodesub rex.w=0
4471 * @oppfx 0x66
4472 * @opcpuid sse2
4473 * @opgroup og_sse2_simdint_datamove
4474 * @opxcpttype 5
4475 * @opfunction iemOp_movd_q_Vy_Ey
4476 * @optest op1=1 op2=2 -> op1=2
4477 * @optest op1=0 op2=-42 -> op1=-42
4478 */
4479 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4480 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4481 {
4482 /* greg32, XMM */
4483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4484 IEM_MC_BEGIN(0, 1);
4485 IEM_MC_LOCAL(uint32_t, u32Tmp);
4486
4487 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4489
4490 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4491 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4492
4493 IEM_MC_ADVANCE_RIP();
4494 IEM_MC_END();
4495 }
4496 else
4497 {
4498 /* [mem32], XMM */
4499 IEM_MC_BEGIN(0, 2);
4500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4501 IEM_MC_LOCAL(uint32_t, u32Tmp);
4502
4503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4505 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4507
4508 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4509 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4510
4511 IEM_MC_ADVANCE_RIP();
4512 IEM_MC_END();
4513 }
4514 }
4515 return VINF_SUCCESS;
4516
4517}
4518
4519/**
4520 * @opcode 0x7e
4521 * @oppfx 0xf3
4522 * @opcpuid sse2
4523 * @opgroup og_sse2_pcksclr_datamove
4524 * @opxcpttype none
4525 * @optest op1=1 op2=2 -> op1=2
4526 * @optest op1=0 op2=-42 -> op1=-42
4527 */
4528FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4529{
4530 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4533 {
4534 /*
4535 * Register, register.
4536 */
4537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4538 IEM_MC_BEGIN(0, 2);
4539 IEM_MC_LOCAL(uint64_t, uSrc);
4540
4541 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4542 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4543
4544 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4545 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4546
4547 IEM_MC_ADVANCE_RIP();
4548 IEM_MC_END();
4549 }
4550 else
4551 {
4552 /*
4553 * Memory, register.
4554 */
4555 IEM_MC_BEGIN(0, 2);
4556 IEM_MC_LOCAL(uint64_t, uSrc);
4557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4558
4559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4561 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4562 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4563
4564 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4565 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4566
4567 IEM_MC_ADVANCE_RIP();
4568 IEM_MC_END();
4569 }
4570 return VINF_SUCCESS;
4571}
4572
4573/* Opcode 0xf2 0x0f 0x7e - invalid */
4574
4575
4576/** Opcode 0x0f 0x7f - movq Qq, Pq */
4577FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4578{
4579 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4581 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4582 {
4583 /*
4584 * Register, register.
4585 */
4586 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4587 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4589 IEM_MC_BEGIN(0, 1);
4590 IEM_MC_LOCAL(uint64_t, u64Tmp);
4591 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4592 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4593 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4594 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4595 IEM_MC_ADVANCE_RIP();
4596 IEM_MC_END();
4597 }
4598 else
4599 {
4600 /*
4601 * Register, memory.
4602 */
4603 IEM_MC_BEGIN(0, 2);
4604 IEM_MC_LOCAL(uint64_t, u64Tmp);
4605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4606
4607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4609 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4610 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4611
4612 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4613 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4614
4615 IEM_MC_ADVANCE_RIP();
4616 IEM_MC_END();
4617 }
4618 return VINF_SUCCESS;
4619}
4620
4621/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4622FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4623{
4624 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4626 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4627 {
4628 /*
4629 * Register, register.
4630 */
4631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4632 IEM_MC_BEGIN(0, 0);
4633 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4635 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4636 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4637 IEM_MC_ADVANCE_RIP();
4638 IEM_MC_END();
4639 }
4640 else
4641 {
4642 /*
4643 * Register, memory.
4644 */
4645 IEM_MC_BEGIN(0, 2);
4646 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4648
4649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4653
4654 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4655 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4656
4657 IEM_MC_ADVANCE_RIP();
4658 IEM_MC_END();
4659 }
4660 return VINF_SUCCESS;
4661}
4662
4663/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4664FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4665{
4666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4667 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4669 {
4670 /*
4671 * Register, register.
4672 */
4673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4674 IEM_MC_BEGIN(0, 0);
4675 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4676 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4677 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4678 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4679 IEM_MC_ADVANCE_RIP();
4680 IEM_MC_END();
4681 }
4682 else
4683 {
4684 /*
4685 * Register, memory.
4686 */
4687 IEM_MC_BEGIN(0, 2);
4688 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4690
4691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4693 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4694 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4695
4696 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4697 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4698
4699 IEM_MC_ADVANCE_RIP();
4700 IEM_MC_END();
4701 }
4702 return VINF_SUCCESS;
4703}
4704
4705/* Opcode 0xf2 0x0f 0x7f - invalid */
4706
4707
4708
4709/** Opcode 0x0f 0x80. */
4710FNIEMOP_DEF(iemOp_jo_Jv)
4711{
4712 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4713 IEMOP_HLP_MIN_386();
4714 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4715 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4716 {
4717 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4719
4720 IEM_MC_BEGIN(0, 0);
4721 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4722 IEM_MC_REL_JMP_S16(i16Imm);
4723 } IEM_MC_ELSE() {
4724 IEM_MC_ADVANCE_RIP();
4725 } IEM_MC_ENDIF();
4726 IEM_MC_END();
4727 }
4728 else
4729 {
4730 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4732
4733 IEM_MC_BEGIN(0, 0);
4734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4735 IEM_MC_REL_JMP_S32(i32Imm);
4736 } IEM_MC_ELSE() {
4737 IEM_MC_ADVANCE_RIP();
4738 } IEM_MC_ENDIF();
4739 IEM_MC_END();
4740 }
4741 return VINF_SUCCESS;
4742}
4743
4744
4745/** Opcode 0x0f 0x81. */
4746FNIEMOP_DEF(iemOp_jno_Jv)
4747{
4748 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4749 IEMOP_HLP_MIN_386();
4750 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4751 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4752 {
4753 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4755
4756 IEM_MC_BEGIN(0, 0);
4757 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4758 IEM_MC_ADVANCE_RIP();
4759 } IEM_MC_ELSE() {
4760 IEM_MC_REL_JMP_S16(i16Imm);
4761 } IEM_MC_ENDIF();
4762 IEM_MC_END();
4763 }
4764 else
4765 {
4766 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768
4769 IEM_MC_BEGIN(0, 0);
4770 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4771 IEM_MC_ADVANCE_RIP();
4772 } IEM_MC_ELSE() {
4773 IEM_MC_REL_JMP_S32(i32Imm);
4774 } IEM_MC_ENDIF();
4775 IEM_MC_END();
4776 }
4777 return VINF_SUCCESS;
4778}
4779
4780
4781/** Opcode 0x0f 0x82. */
4782FNIEMOP_DEF(iemOp_jc_Jv)
4783{
4784 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4785 IEMOP_HLP_MIN_386();
4786 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4787 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4788 {
4789 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4791
4792 IEM_MC_BEGIN(0, 0);
4793 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4794 IEM_MC_REL_JMP_S16(i16Imm);
4795 } IEM_MC_ELSE() {
4796 IEM_MC_ADVANCE_RIP();
4797 } IEM_MC_ENDIF();
4798 IEM_MC_END();
4799 }
4800 else
4801 {
4802 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804
4805 IEM_MC_BEGIN(0, 0);
4806 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4807 IEM_MC_REL_JMP_S32(i32Imm);
4808 } IEM_MC_ELSE() {
4809 IEM_MC_ADVANCE_RIP();
4810 } IEM_MC_ENDIF();
4811 IEM_MC_END();
4812 }
4813 return VINF_SUCCESS;
4814}
4815
4816
4817/** Opcode 0x0f 0x83. */
4818FNIEMOP_DEF(iemOp_jnc_Jv)
4819{
4820 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4821 IEMOP_HLP_MIN_386();
4822 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4823 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4824 {
4825 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4827
4828 IEM_MC_BEGIN(0, 0);
4829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4830 IEM_MC_ADVANCE_RIP();
4831 } IEM_MC_ELSE() {
4832 IEM_MC_REL_JMP_S16(i16Imm);
4833 } IEM_MC_ENDIF();
4834 IEM_MC_END();
4835 }
4836 else
4837 {
4838 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840
4841 IEM_MC_BEGIN(0, 0);
4842 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4843 IEM_MC_ADVANCE_RIP();
4844 } IEM_MC_ELSE() {
4845 IEM_MC_REL_JMP_S32(i32Imm);
4846 } IEM_MC_ENDIF();
4847 IEM_MC_END();
4848 }
4849 return VINF_SUCCESS;
4850}
4851
4852
4853/** Opcode 0x0f 0x84. */
4854FNIEMOP_DEF(iemOp_je_Jv)
4855{
4856 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4857 IEMOP_HLP_MIN_386();
4858 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4859 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4860 {
4861 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4863
4864 IEM_MC_BEGIN(0, 0);
4865 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4866 IEM_MC_REL_JMP_S16(i16Imm);
4867 } IEM_MC_ELSE() {
4868 IEM_MC_ADVANCE_RIP();
4869 } IEM_MC_ENDIF();
4870 IEM_MC_END();
4871 }
4872 else
4873 {
4874 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4876
4877 IEM_MC_BEGIN(0, 0);
4878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4879 IEM_MC_REL_JMP_S32(i32Imm);
4880 } IEM_MC_ELSE() {
4881 IEM_MC_ADVANCE_RIP();
4882 } IEM_MC_ENDIF();
4883 IEM_MC_END();
4884 }
4885 return VINF_SUCCESS;
4886}
4887
4888
4889/** Opcode 0x0f 0x85. */
4890FNIEMOP_DEF(iemOp_jne_Jv)
4891{
4892 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4893 IEMOP_HLP_MIN_386();
4894 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4895 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4896 {
4897 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4899
4900 IEM_MC_BEGIN(0, 0);
4901 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4902 IEM_MC_ADVANCE_RIP();
4903 } IEM_MC_ELSE() {
4904 IEM_MC_REL_JMP_S16(i16Imm);
4905 } IEM_MC_ENDIF();
4906 IEM_MC_END();
4907 }
4908 else
4909 {
4910 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4912
4913 IEM_MC_BEGIN(0, 0);
4914 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4915 IEM_MC_ADVANCE_RIP();
4916 } IEM_MC_ELSE() {
4917 IEM_MC_REL_JMP_S32(i32Imm);
4918 } IEM_MC_ENDIF();
4919 IEM_MC_END();
4920 }
4921 return VINF_SUCCESS;
4922}
4923
4924
4925/** Opcode 0x0f 0x86. */
4926FNIEMOP_DEF(iemOp_jbe_Jv)
4927{
4928 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4929 IEMOP_HLP_MIN_386();
4930 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4931 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4932 {
4933 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4935
4936 IEM_MC_BEGIN(0, 0);
4937 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4938 IEM_MC_REL_JMP_S16(i16Imm);
4939 } IEM_MC_ELSE() {
4940 IEM_MC_ADVANCE_RIP();
4941 } IEM_MC_ENDIF();
4942 IEM_MC_END();
4943 }
4944 else
4945 {
4946 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4948
4949 IEM_MC_BEGIN(0, 0);
4950 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4951 IEM_MC_REL_JMP_S32(i32Imm);
4952 } IEM_MC_ELSE() {
4953 IEM_MC_ADVANCE_RIP();
4954 } IEM_MC_ENDIF();
4955 IEM_MC_END();
4956 }
4957 return VINF_SUCCESS;
4958}
4959
4960
4961/** Opcode 0x0f 0x87. */
4962FNIEMOP_DEF(iemOp_jnbe_Jv)
4963{
4964 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4965 IEMOP_HLP_MIN_386();
4966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4967 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4968 {
4969 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4971
4972 IEM_MC_BEGIN(0, 0);
4973 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4974 IEM_MC_ADVANCE_RIP();
4975 } IEM_MC_ELSE() {
4976 IEM_MC_REL_JMP_S16(i16Imm);
4977 } IEM_MC_ENDIF();
4978 IEM_MC_END();
4979 }
4980 else
4981 {
4982 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4984
4985 IEM_MC_BEGIN(0, 0);
4986 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4987 IEM_MC_ADVANCE_RIP();
4988 } IEM_MC_ELSE() {
4989 IEM_MC_REL_JMP_S32(i32Imm);
4990 } IEM_MC_ENDIF();
4991 IEM_MC_END();
4992 }
4993 return VINF_SUCCESS;
4994}
4995
4996
4997/** Opcode 0x0f 0x88. */
4998FNIEMOP_DEF(iemOp_js_Jv)
4999{
5000 IEMOP_MNEMONIC(js_Jv, "js Jv");
5001 IEMOP_HLP_MIN_386();
5002 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5003 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5004 {
5005 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5007
5008 IEM_MC_BEGIN(0, 0);
5009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5010 IEM_MC_REL_JMP_S16(i16Imm);
5011 } IEM_MC_ELSE() {
5012 IEM_MC_ADVANCE_RIP();
5013 } IEM_MC_ENDIF();
5014 IEM_MC_END();
5015 }
5016 else
5017 {
5018 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5020
5021 IEM_MC_BEGIN(0, 0);
5022 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5023 IEM_MC_REL_JMP_S32(i32Imm);
5024 } IEM_MC_ELSE() {
5025 IEM_MC_ADVANCE_RIP();
5026 } IEM_MC_ENDIF();
5027 IEM_MC_END();
5028 }
5029 return VINF_SUCCESS;
5030}
5031
5032
5033/** Opcode 0x0f 0x89. */
5034FNIEMOP_DEF(iemOp_jns_Jv)
5035{
5036 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5037 IEMOP_HLP_MIN_386();
5038 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5039 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5040 {
5041 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5043
5044 IEM_MC_BEGIN(0, 0);
5045 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5046 IEM_MC_ADVANCE_RIP();
5047 } IEM_MC_ELSE() {
5048 IEM_MC_REL_JMP_S16(i16Imm);
5049 } IEM_MC_ENDIF();
5050 IEM_MC_END();
5051 }
5052 else
5053 {
5054 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5056
5057 IEM_MC_BEGIN(0, 0);
5058 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5059 IEM_MC_ADVANCE_RIP();
5060 } IEM_MC_ELSE() {
5061 IEM_MC_REL_JMP_S32(i32Imm);
5062 } IEM_MC_ENDIF();
5063 IEM_MC_END();
5064 }
5065 return VINF_SUCCESS;
5066}
5067
5068
5069/** Opcode 0x0f 0x8a. */
5070FNIEMOP_DEF(iemOp_jp_Jv)
5071{
5072 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5073 IEMOP_HLP_MIN_386();
5074 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5075 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5076 {
5077 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5079
5080 IEM_MC_BEGIN(0, 0);
5081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5082 IEM_MC_REL_JMP_S16(i16Imm);
5083 } IEM_MC_ELSE() {
5084 IEM_MC_ADVANCE_RIP();
5085 } IEM_MC_ENDIF();
5086 IEM_MC_END();
5087 }
5088 else
5089 {
5090 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5092
5093 IEM_MC_BEGIN(0, 0);
5094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5095 IEM_MC_REL_JMP_S32(i32Imm);
5096 } IEM_MC_ELSE() {
5097 IEM_MC_ADVANCE_RIP();
5098 } IEM_MC_ENDIF();
5099 IEM_MC_END();
5100 }
5101 return VINF_SUCCESS;
5102}
5103
5104
5105/** Opcode 0x0f 0x8b. */
5106FNIEMOP_DEF(iemOp_jnp_Jv)
5107{
5108 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5109 IEMOP_HLP_MIN_386();
5110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5111 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5112 {
5113 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5115
5116 IEM_MC_BEGIN(0, 0);
5117 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5118 IEM_MC_ADVANCE_RIP();
5119 } IEM_MC_ELSE() {
5120 IEM_MC_REL_JMP_S16(i16Imm);
5121 } IEM_MC_ENDIF();
5122 IEM_MC_END();
5123 }
5124 else
5125 {
5126 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5128
5129 IEM_MC_BEGIN(0, 0);
5130 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5131 IEM_MC_ADVANCE_RIP();
5132 } IEM_MC_ELSE() {
5133 IEM_MC_REL_JMP_S32(i32Imm);
5134 } IEM_MC_ENDIF();
5135 IEM_MC_END();
5136 }
5137 return VINF_SUCCESS;
5138}
5139
5140
5141/** Opcode 0x0f 0x8c. */
5142FNIEMOP_DEF(iemOp_jl_Jv)
5143{
5144 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5145 IEMOP_HLP_MIN_386();
5146 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5147 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5148 {
5149 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5151
5152 IEM_MC_BEGIN(0, 0);
5153 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5154 IEM_MC_REL_JMP_S16(i16Imm);
5155 } IEM_MC_ELSE() {
5156 IEM_MC_ADVANCE_RIP();
5157 } IEM_MC_ENDIF();
5158 IEM_MC_END();
5159 }
5160 else
5161 {
5162 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164
5165 IEM_MC_BEGIN(0, 0);
5166 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5167 IEM_MC_REL_JMP_S32(i32Imm);
5168 } IEM_MC_ELSE() {
5169 IEM_MC_ADVANCE_RIP();
5170 } IEM_MC_ENDIF();
5171 IEM_MC_END();
5172 }
5173 return VINF_SUCCESS;
5174}
5175
5176
5177/** Opcode 0x0f 0x8d. */
5178FNIEMOP_DEF(iemOp_jnl_Jv)
5179{
5180 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5181 IEMOP_HLP_MIN_386();
5182 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5183 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5184 {
5185 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187
5188 IEM_MC_BEGIN(0, 0);
5189 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5190 IEM_MC_ADVANCE_RIP();
5191 } IEM_MC_ELSE() {
5192 IEM_MC_REL_JMP_S16(i16Imm);
5193 } IEM_MC_ENDIF();
5194 IEM_MC_END();
5195 }
5196 else
5197 {
5198 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5200
5201 IEM_MC_BEGIN(0, 0);
5202 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5203 IEM_MC_ADVANCE_RIP();
5204 } IEM_MC_ELSE() {
5205 IEM_MC_REL_JMP_S32(i32Imm);
5206 } IEM_MC_ENDIF();
5207 IEM_MC_END();
5208 }
5209 return VINF_SUCCESS;
5210}
5211
5212
5213/** Opcode 0x0f 0x8e. */
5214FNIEMOP_DEF(iemOp_jle_Jv)
5215{
5216 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5217 IEMOP_HLP_MIN_386();
5218 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5219 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5220 {
5221 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5223
5224 IEM_MC_BEGIN(0, 0);
5225 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5226 IEM_MC_REL_JMP_S16(i16Imm);
5227 } IEM_MC_ELSE() {
5228 IEM_MC_ADVANCE_RIP();
5229 } IEM_MC_ENDIF();
5230 IEM_MC_END();
5231 }
5232 else
5233 {
5234 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5236
5237 IEM_MC_BEGIN(0, 0);
5238 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5239 IEM_MC_REL_JMP_S32(i32Imm);
5240 } IEM_MC_ELSE() {
5241 IEM_MC_ADVANCE_RIP();
5242 } IEM_MC_ENDIF();
5243 IEM_MC_END();
5244 }
5245 return VINF_SUCCESS;
5246}
5247
5248
5249/** Opcode 0x0f 0x8f. */
5250FNIEMOP_DEF(iemOp_jnle_Jv)
5251{
5252 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5253 IEMOP_HLP_MIN_386();
5254 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5255 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5256 {
5257 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259
5260 IEM_MC_BEGIN(0, 0);
5261 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5262 IEM_MC_ADVANCE_RIP();
5263 } IEM_MC_ELSE() {
5264 IEM_MC_REL_JMP_S16(i16Imm);
5265 } IEM_MC_ENDIF();
5266 IEM_MC_END();
5267 }
5268 else
5269 {
5270 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5272
5273 IEM_MC_BEGIN(0, 0);
5274 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5275 IEM_MC_ADVANCE_RIP();
5276 } IEM_MC_ELSE() {
5277 IEM_MC_REL_JMP_S32(i32Imm);
5278 } IEM_MC_ENDIF();
5279 IEM_MC_END();
5280 }
5281 return VINF_SUCCESS;
5282}
5283
5284
5285/** Opcode 0x0f 0x90. */
5286FNIEMOP_DEF(iemOp_seto_Eb)
5287{
5288 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5289 IEMOP_HLP_MIN_386();
5290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5291
5292 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5293 * any way. AMD says it's "unused", whatever that means. We're
5294 * ignoring for now. */
5295 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5296 {
5297 /* register target */
5298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5299 IEM_MC_BEGIN(0, 0);
5300 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5301 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5302 } IEM_MC_ELSE() {
5303 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5304 } IEM_MC_ENDIF();
5305 IEM_MC_ADVANCE_RIP();
5306 IEM_MC_END();
5307 }
5308 else
5309 {
5310 /* memory target */
5311 IEM_MC_BEGIN(0, 1);
5312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5316 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5317 } IEM_MC_ELSE() {
5318 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5319 } IEM_MC_ENDIF();
5320 IEM_MC_ADVANCE_RIP();
5321 IEM_MC_END();
5322 }
5323 return VINF_SUCCESS;
5324}
5325
5326
5327/** Opcode 0x0f 0x91. */
5328FNIEMOP_DEF(iemOp_setno_Eb)
5329{
5330 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5331 IEMOP_HLP_MIN_386();
5332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5333
5334 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5335 * any way. AMD says it's "unused", whatever that means. We're
5336 * ignoring for now. */
5337 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5338 {
5339 /* register target */
5340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5341 IEM_MC_BEGIN(0, 0);
5342 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5343 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5344 } IEM_MC_ELSE() {
5345 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5346 } IEM_MC_ENDIF();
5347 IEM_MC_ADVANCE_RIP();
5348 IEM_MC_END();
5349 }
5350 else
5351 {
5352 /* memory target */
5353 IEM_MC_BEGIN(0, 1);
5354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5357 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5358 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5359 } IEM_MC_ELSE() {
5360 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5361 } IEM_MC_ENDIF();
5362 IEM_MC_ADVANCE_RIP();
5363 IEM_MC_END();
5364 }
5365 return VINF_SUCCESS;
5366}
5367
5368
5369/** Opcode 0x0f 0x92. */
5370FNIEMOP_DEF(iemOp_setc_Eb)
5371{
5372 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5373 IEMOP_HLP_MIN_386();
5374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5375
5376 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5377 * any way. AMD says it's "unused", whatever that means. We're
5378 * ignoring for now. */
5379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5380 {
5381 /* register target */
5382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5383 IEM_MC_BEGIN(0, 0);
5384 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5385 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5386 } IEM_MC_ELSE() {
5387 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5388 } IEM_MC_ENDIF();
5389 IEM_MC_ADVANCE_RIP();
5390 IEM_MC_END();
5391 }
5392 else
5393 {
5394 /* memory target */
5395 IEM_MC_BEGIN(0, 1);
5396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5399 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5400 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5401 } IEM_MC_ELSE() {
5402 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5403 } IEM_MC_ENDIF();
5404 IEM_MC_ADVANCE_RIP();
5405 IEM_MC_END();
5406 }
5407 return VINF_SUCCESS;
5408}
5409
5410
5411/** Opcode 0x0f 0x93. */
5412FNIEMOP_DEF(iemOp_setnc_Eb)
5413{
5414 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5415 IEMOP_HLP_MIN_386();
5416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5417
5418 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5419 * any way. AMD says it's "unused", whatever that means. We're
5420 * ignoring for now. */
5421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5422 {
5423 /* register target */
5424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5425 IEM_MC_BEGIN(0, 0);
5426 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5427 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5428 } IEM_MC_ELSE() {
5429 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5430 } IEM_MC_ENDIF();
5431 IEM_MC_ADVANCE_RIP();
5432 IEM_MC_END();
5433 }
5434 else
5435 {
5436 /* memory target */
5437 IEM_MC_BEGIN(0, 1);
5438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5442 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5443 } IEM_MC_ELSE() {
5444 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5445 } IEM_MC_ENDIF();
5446 IEM_MC_ADVANCE_RIP();
5447 IEM_MC_END();
5448 }
5449 return VINF_SUCCESS;
5450}
5451
5452
5453/** Opcode 0x0f 0x94. */
5454FNIEMOP_DEF(iemOp_sete_Eb)
5455{
5456 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5457 IEMOP_HLP_MIN_386();
5458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5459
5460 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5461 * any way. AMD says it's "unused", whatever that means. We're
5462 * ignoring for now. */
5463 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5464 {
5465 /* register target */
5466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5467 IEM_MC_BEGIN(0, 0);
5468 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5469 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5470 } IEM_MC_ELSE() {
5471 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5472 } IEM_MC_ENDIF();
5473 IEM_MC_ADVANCE_RIP();
5474 IEM_MC_END();
5475 }
5476 else
5477 {
5478 /* memory target */
5479 IEM_MC_BEGIN(0, 1);
5480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5484 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5485 } IEM_MC_ELSE() {
5486 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5487 } IEM_MC_ENDIF();
5488 IEM_MC_ADVANCE_RIP();
5489 IEM_MC_END();
5490 }
5491 return VINF_SUCCESS;
5492}
5493
5494
5495/** Opcode 0x0f 0x95. */
5496FNIEMOP_DEF(iemOp_setne_Eb)
5497{
5498 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5499 IEMOP_HLP_MIN_386();
5500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5501
5502 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5503 * any way. AMD says it's "unused", whatever that means. We're
5504 * ignoring for now. */
5505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5506 {
5507 /* register target */
5508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5509 IEM_MC_BEGIN(0, 0);
5510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5511 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5512 } IEM_MC_ELSE() {
5513 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5514 } IEM_MC_ENDIF();
5515 IEM_MC_ADVANCE_RIP();
5516 IEM_MC_END();
5517 }
5518 else
5519 {
5520 /* memory target */
5521 IEM_MC_BEGIN(0, 1);
5522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5526 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5527 } IEM_MC_ELSE() {
5528 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5529 } IEM_MC_ENDIF();
5530 IEM_MC_ADVANCE_RIP();
5531 IEM_MC_END();
5532 }
5533 return VINF_SUCCESS;
5534}
5535
5536
5537/** Opcode 0x0f 0x96. */
5538FNIEMOP_DEF(iemOp_setbe_Eb)
5539{
5540 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5541 IEMOP_HLP_MIN_386();
5542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5543
5544 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5545 * any way. AMD says it's "unused", whatever that means. We're
5546 * ignoring for now. */
5547 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5548 {
5549 /* register target */
5550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5551 IEM_MC_BEGIN(0, 0);
5552 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5553 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5554 } IEM_MC_ELSE() {
5555 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5556 } IEM_MC_ENDIF();
5557 IEM_MC_ADVANCE_RIP();
5558 IEM_MC_END();
5559 }
5560 else
5561 {
5562 /* memory target */
5563 IEM_MC_BEGIN(0, 1);
5564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5567 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5568 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5569 } IEM_MC_ELSE() {
5570 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5571 } IEM_MC_ENDIF();
5572 IEM_MC_ADVANCE_RIP();
5573 IEM_MC_END();
5574 }
5575 return VINF_SUCCESS;
5576}
5577
5578
5579/** Opcode 0x0f 0x97. */
5580FNIEMOP_DEF(iemOp_setnbe_Eb)
5581{
5582 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5583 IEMOP_HLP_MIN_386();
5584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5585
5586 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5587 * any way. AMD says it's "unused", whatever that means. We're
5588 * ignoring for now. */
5589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5590 {
5591 /* register target */
5592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5593 IEM_MC_BEGIN(0, 0);
5594 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5595 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5596 } IEM_MC_ELSE() {
5597 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5598 } IEM_MC_ENDIF();
5599 IEM_MC_ADVANCE_RIP();
5600 IEM_MC_END();
5601 }
5602 else
5603 {
5604 /* memory target */
5605 IEM_MC_BEGIN(0, 1);
5606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5609 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5610 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5611 } IEM_MC_ELSE() {
5612 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5613 } IEM_MC_ENDIF();
5614 IEM_MC_ADVANCE_RIP();
5615 IEM_MC_END();
5616 }
5617 return VINF_SUCCESS;
5618}
5619
5620
5621/** Opcode 0x0f 0x98. */
5622FNIEMOP_DEF(iemOp_sets_Eb)
5623{
5624 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5625 IEMOP_HLP_MIN_386();
5626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5627
5628 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5629 * any way. AMD says it's "unused", whatever that means. We're
5630 * ignoring for now. */
5631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5632 {
5633 /* register target */
5634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5635 IEM_MC_BEGIN(0, 0);
5636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5637 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5638 } IEM_MC_ELSE() {
5639 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5640 } IEM_MC_ENDIF();
5641 IEM_MC_ADVANCE_RIP();
5642 IEM_MC_END();
5643 }
5644 else
5645 {
5646 /* memory target */
5647 IEM_MC_BEGIN(0, 1);
5648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5652 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5653 } IEM_MC_ELSE() {
5654 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5655 } IEM_MC_ENDIF();
5656 IEM_MC_ADVANCE_RIP();
5657 IEM_MC_END();
5658 }
5659 return VINF_SUCCESS;
5660}
5661
5662
5663/** Opcode 0x0f 0x99. */
5664FNIEMOP_DEF(iemOp_setns_Eb)
5665{
5666 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5667 IEMOP_HLP_MIN_386();
5668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5669
5670 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5671 * any way. AMD says it's "unused", whatever that means. We're
5672 * ignoring for now. */
5673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5674 {
5675 /* register target */
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 IEM_MC_BEGIN(0, 0);
5678 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5679 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5680 } IEM_MC_ELSE() {
5681 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5682 } IEM_MC_ENDIF();
5683 IEM_MC_ADVANCE_RIP();
5684 IEM_MC_END();
5685 }
5686 else
5687 {
5688 /* memory target */
5689 IEM_MC_BEGIN(0, 1);
5690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5693 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5694 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5695 } IEM_MC_ELSE() {
5696 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5697 } IEM_MC_ENDIF();
5698 IEM_MC_ADVANCE_RIP();
5699 IEM_MC_END();
5700 }
5701 return VINF_SUCCESS;
5702}
5703
5704
5705/** Opcode 0x0f 0x9a. */
5706FNIEMOP_DEF(iemOp_setp_Eb)
5707{
5708 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5709 IEMOP_HLP_MIN_386();
5710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5711
5712 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5713 * any way. AMD says it's "unused", whatever that means. We're
5714 * ignoring for now. */
5715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5716 {
5717 /* register target */
5718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5719 IEM_MC_BEGIN(0, 0);
5720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5721 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5722 } IEM_MC_ELSE() {
5723 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5724 } IEM_MC_ENDIF();
5725 IEM_MC_ADVANCE_RIP();
5726 IEM_MC_END();
5727 }
5728 else
5729 {
5730 /* memory target */
5731 IEM_MC_BEGIN(0, 1);
5732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5736 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5737 } IEM_MC_ELSE() {
5738 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5739 } IEM_MC_ENDIF();
5740 IEM_MC_ADVANCE_RIP();
5741 IEM_MC_END();
5742 }
5743 return VINF_SUCCESS;
5744}
5745
5746
5747/** Opcode 0x0f 0x9b. */
5748FNIEMOP_DEF(iemOp_setnp_Eb)
5749{
5750 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5751 IEMOP_HLP_MIN_386();
5752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5753
5754 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5755 * any way. AMD says it's "unused", whatever that means. We're
5756 * ignoring for now. */
5757 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5758 {
5759 /* register target */
5760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5761 IEM_MC_BEGIN(0, 0);
5762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5763 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5764 } IEM_MC_ELSE() {
5765 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5766 } IEM_MC_ENDIF();
5767 IEM_MC_ADVANCE_RIP();
5768 IEM_MC_END();
5769 }
5770 else
5771 {
5772 /* memory target */
5773 IEM_MC_BEGIN(0, 1);
5774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5778 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5779 } IEM_MC_ELSE() {
5780 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5781 } IEM_MC_ENDIF();
5782 IEM_MC_ADVANCE_RIP();
5783 IEM_MC_END();
5784 }
5785 return VINF_SUCCESS;
5786}
5787
5788
5789/** Opcode 0x0f 0x9c. */
5790FNIEMOP_DEF(iemOp_setl_Eb)
5791{
5792 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5793 IEMOP_HLP_MIN_386();
5794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5795
5796 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5797 * any way. AMD says it's "unused", whatever that means. We're
5798 * ignoring for now. */
5799 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5800 {
5801 /* register target */
5802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5803 IEM_MC_BEGIN(0, 0);
5804 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5805 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5806 } IEM_MC_ELSE() {
5807 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5808 } IEM_MC_ENDIF();
5809 IEM_MC_ADVANCE_RIP();
5810 IEM_MC_END();
5811 }
5812 else
5813 {
5814 /* memory target */
5815 IEM_MC_BEGIN(0, 1);
5816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5819 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5820 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5821 } IEM_MC_ELSE() {
5822 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5823 } IEM_MC_ENDIF();
5824 IEM_MC_ADVANCE_RIP();
5825 IEM_MC_END();
5826 }
5827 return VINF_SUCCESS;
5828}
5829
5830
5831/** Opcode 0x0f 0x9d. */
5832FNIEMOP_DEF(iemOp_setnl_Eb)
5833{
5834 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5835 IEMOP_HLP_MIN_386();
5836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5837
5838 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5839 * any way. AMD says it's "unused", whatever that means. We're
5840 * ignoring for now. */
5841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5842 {
5843 /* register target */
5844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5845 IEM_MC_BEGIN(0, 0);
5846 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5847 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5848 } IEM_MC_ELSE() {
5849 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5850 } IEM_MC_ENDIF();
5851 IEM_MC_ADVANCE_RIP();
5852 IEM_MC_END();
5853 }
5854 else
5855 {
5856 /* memory target */
5857 IEM_MC_BEGIN(0, 1);
5858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5861 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5862 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5863 } IEM_MC_ELSE() {
5864 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5865 } IEM_MC_ENDIF();
5866 IEM_MC_ADVANCE_RIP();
5867 IEM_MC_END();
5868 }
5869 return VINF_SUCCESS;
5870}
5871
5872
5873/** Opcode 0x0f 0x9e. */
5874FNIEMOP_DEF(iemOp_setle_Eb)
5875{
5876 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5877 IEMOP_HLP_MIN_386();
5878 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5879
5880 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5881 * any way. AMD says it's "unused", whatever that means. We're
5882 * ignoring for now. */
5883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5884 {
5885 /* register target */
5886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5887 IEM_MC_BEGIN(0, 0);
5888 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5889 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5890 } IEM_MC_ELSE() {
5891 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5892 } IEM_MC_ENDIF();
5893 IEM_MC_ADVANCE_RIP();
5894 IEM_MC_END();
5895 }
5896 else
5897 {
5898 /* memory target */
5899 IEM_MC_BEGIN(0, 1);
5900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5903 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5904 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5905 } IEM_MC_ELSE() {
5906 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5907 } IEM_MC_ENDIF();
5908 IEM_MC_ADVANCE_RIP();
5909 IEM_MC_END();
5910 }
5911 return VINF_SUCCESS;
5912}
5913
5914
5915/** Opcode 0x0f 0x9f. */
5916FNIEMOP_DEF(iemOp_setnle_Eb)
5917{
5918 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5919 IEMOP_HLP_MIN_386();
5920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5921
5922 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5923 * any way. AMD says it's "unused", whatever that means. We're
5924 * ignoring for now. */
5925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5926 {
5927 /* register target */
5928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5929 IEM_MC_BEGIN(0, 0);
5930 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5931 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5932 } IEM_MC_ELSE() {
5933 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5934 } IEM_MC_ENDIF();
5935 IEM_MC_ADVANCE_RIP();
5936 IEM_MC_END();
5937 }
5938 else
5939 {
5940 /* memory target */
5941 IEM_MC_BEGIN(0, 1);
5942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5945 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5946 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5947 } IEM_MC_ELSE() {
5948 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5949 } IEM_MC_ENDIF();
5950 IEM_MC_ADVANCE_RIP();
5951 IEM_MC_END();
5952 }
5953 return VINF_SUCCESS;
5954}
5955
5956
5957/**
5958 * Common 'push segment-register' helper.
5959 */
5960FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5961{
5962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5963 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5964 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5965
5966 switch (pVCpu->iem.s.enmEffOpSize)
5967 {
5968 case IEMMODE_16BIT:
5969 IEM_MC_BEGIN(0, 1);
5970 IEM_MC_LOCAL(uint16_t, u16Value);
5971 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5972 IEM_MC_PUSH_U16(u16Value);
5973 IEM_MC_ADVANCE_RIP();
5974 IEM_MC_END();
5975 break;
5976
5977 case IEMMODE_32BIT:
5978 IEM_MC_BEGIN(0, 1);
5979 IEM_MC_LOCAL(uint32_t, u32Value);
5980 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5981 IEM_MC_PUSH_U32_SREG(u32Value);
5982 IEM_MC_ADVANCE_RIP();
5983 IEM_MC_END();
5984 break;
5985
5986 case IEMMODE_64BIT:
5987 IEM_MC_BEGIN(0, 1);
5988 IEM_MC_LOCAL(uint64_t, u64Value);
5989 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5990 IEM_MC_PUSH_U64(u64Value);
5991 IEM_MC_ADVANCE_RIP();
5992 IEM_MC_END();
5993 break;
5994 }
5995
5996 return VINF_SUCCESS;
5997}
5998
5999
6000/** Opcode 0x0f 0xa0. */
6001FNIEMOP_DEF(iemOp_push_fs)
6002{
6003 IEMOP_MNEMONIC(push_fs, "push fs");
6004 IEMOP_HLP_MIN_386();
6005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6006 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6007}
6008
6009
6010/** Opcode 0x0f 0xa1. */
6011FNIEMOP_DEF(iemOp_pop_fs)
6012{
6013 IEMOP_MNEMONIC(pop_fs, "pop fs");
6014 IEMOP_HLP_MIN_386();
6015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6016 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6017}
6018
6019
6020/** Opcode 0x0f 0xa2. */
6021FNIEMOP_DEF(iemOp_cpuid)
6022{
6023 IEMOP_MNEMONIC(cpuid, "cpuid");
6024 IEMOP_HLP_MIN_486(); /* not all 486es. */
6025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6026 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6027}
6028
6029
6030/**
6031 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6032 * iemOp_bts_Ev_Gv.
6033 */
6034FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6035{
6036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6037 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6038
6039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6040 {
6041 /* register destination. */
6042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6043 switch (pVCpu->iem.s.enmEffOpSize)
6044 {
6045 case IEMMODE_16BIT:
6046 IEM_MC_BEGIN(3, 0);
6047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6048 IEM_MC_ARG(uint16_t, u16Src, 1);
6049 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6050
6051 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6052 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6053 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6054 IEM_MC_REF_EFLAGS(pEFlags);
6055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6056
6057 IEM_MC_ADVANCE_RIP();
6058 IEM_MC_END();
6059 return VINF_SUCCESS;
6060
6061 case IEMMODE_32BIT:
6062 IEM_MC_BEGIN(3, 0);
6063 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6064 IEM_MC_ARG(uint32_t, u32Src, 1);
6065 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6066
6067 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6068 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6069 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6070 IEM_MC_REF_EFLAGS(pEFlags);
6071 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6072
6073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6074 IEM_MC_ADVANCE_RIP();
6075 IEM_MC_END();
6076 return VINF_SUCCESS;
6077
6078 case IEMMODE_64BIT:
6079 IEM_MC_BEGIN(3, 0);
6080 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6081 IEM_MC_ARG(uint64_t, u64Src, 1);
6082 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6083
6084 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6085 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6086 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6087 IEM_MC_REF_EFLAGS(pEFlags);
6088 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6089
6090 IEM_MC_ADVANCE_RIP();
6091 IEM_MC_END();
6092 return VINF_SUCCESS;
6093
6094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6095 }
6096 }
6097 else
6098 {
6099 /* memory destination. */
6100
6101 uint32_t fAccess;
6102 if (pImpl->pfnLockedU16)
6103 fAccess = IEM_ACCESS_DATA_RW;
6104 else /* BT */
6105 fAccess = IEM_ACCESS_DATA_R;
6106
6107 /** @todo test negative bit offsets! */
6108 switch (pVCpu->iem.s.enmEffOpSize)
6109 {
6110 case IEMMODE_16BIT:
6111 IEM_MC_BEGIN(3, 2);
6112 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6113 IEM_MC_ARG(uint16_t, u16Src, 1);
6114 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6116 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6117
6118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6119 if (pImpl->pfnLockedU16)
6120 IEMOP_HLP_DONE_DECODING();
6121 else
6122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6123 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6124 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6125 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6126 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6127 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6128 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6129 IEM_MC_FETCH_EFLAGS(EFlags);
6130
6131 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6132 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6134 else
6135 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6137
6138 IEM_MC_COMMIT_EFLAGS(EFlags);
6139 IEM_MC_ADVANCE_RIP();
6140 IEM_MC_END();
6141 return VINF_SUCCESS;
6142
6143 case IEMMODE_32BIT:
6144 IEM_MC_BEGIN(3, 2);
6145 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6146 IEM_MC_ARG(uint32_t, u32Src, 1);
6147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6149 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6150
6151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6152 if (pImpl->pfnLockedU16)
6153 IEMOP_HLP_DONE_DECODING();
6154 else
6155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6156 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6157 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6158 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6159 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6160 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6161 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6162 IEM_MC_FETCH_EFLAGS(EFlags);
6163
6164 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6165 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6167 else
6168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6169 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6170
6171 IEM_MC_COMMIT_EFLAGS(EFlags);
6172 IEM_MC_ADVANCE_RIP();
6173 IEM_MC_END();
6174 return VINF_SUCCESS;
6175
6176 case IEMMODE_64BIT:
6177 IEM_MC_BEGIN(3, 2);
6178 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6179 IEM_MC_ARG(uint64_t, u64Src, 1);
6180 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6182 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6183
6184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6185 if (pImpl->pfnLockedU16)
6186 IEMOP_HLP_DONE_DECODING();
6187 else
6188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6189 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6190 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6191 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6192 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6193 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6194 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6195 IEM_MC_FETCH_EFLAGS(EFlags);
6196
6197 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6198 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6200 else
6201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6203
6204 IEM_MC_COMMIT_EFLAGS(EFlags);
6205 IEM_MC_ADVANCE_RIP();
6206 IEM_MC_END();
6207 return VINF_SUCCESS;
6208
6209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6210 }
6211 }
6212}
6213
6214
6215/** Opcode 0x0f 0xa3. */
6216FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6217{
6218 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6219 IEMOP_HLP_MIN_386();
6220 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6221}
6222
6223
6224/**
6225 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6226 */
6227FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6228{
6229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6230 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6231
6232 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6233 {
6234 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6236
6237 switch (pVCpu->iem.s.enmEffOpSize)
6238 {
6239 case IEMMODE_16BIT:
6240 IEM_MC_BEGIN(4, 0);
6241 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6242 IEM_MC_ARG(uint16_t, u16Src, 1);
6243 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6244 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6245
6246 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6247 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6248 IEM_MC_REF_EFLAGS(pEFlags);
6249 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6250
6251 IEM_MC_ADVANCE_RIP();
6252 IEM_MC_END();
6253 return VINF_SUCCESS;
6254
6255 case IEMMODE_32BIT:
6256 IEM_MC_BEGIN(4, 0);
6257 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6258 IEM_MC_ARG(uint32_t, u32Src, 1);
6259 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6260 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6261
6262 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6263 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6264 IEM_MC_REF_EFLAGS(pEFlags);
6265 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6266
6267 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6268 IEM_MC_ADVANCE_RIP();
6269 IEM_MC_END();
6270 return VINF_SUCCESS;
6271
6272 case IEMMODE_64BIT:
6273 IEM_MC_BEGIN(4, 0);
6274 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6275 IEM_MC_ARG(uint64_t, u64Src, 1);
6276 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6277 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6278
6279 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6280 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6281 IEM_MC_REF_EFLAGS(pEFlags);
6282 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6283
6284 IEM_MC_ADVANCE_RIP();
6285 IEM_MC_END();
6286 return VINF_SUCCESS;
6287
6288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6289 }
6290 }
6291 else
6292 {
6293 switch (pVCpu->iem.s.enmEffOpSize)
6294 {
6295 case IEMMODE_16BIT:
6296 IEM_MC_BEGIN(4, 2);
6297 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6298 IEM_MC_ARG(uint16_t, u16Src, 1);
6299 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6300 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6302
6303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6304 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6305 IEM_MC_ASSIGN(cShiftArg, cShift);
6306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6307 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6308 IEM_MC_FETCH_EFLAGS(EFlags);
6309 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6310 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6311
6312 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6313 IEM_MC_COMMIT_EFLAGS(EFlags);
6314 IEM_MC_ADVANCE_RIP();
6315 IEM_MC_END();
6316 return VINF_SUCCESS;
6317
6318 case IEMMODE_32BIT:
6319 IEM_MC_BEGIN(4, 2);
6320 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6321 IEM_MC_ARG(uint32_t, u32Src, 1);
6322 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6323 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6325
6326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6327 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6328 IEM_MC_ASSIGN(cShiftArg, cShift);
6329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6330 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6331 IEM_MC_FETCH_EFLAGS(EFlags);
6332 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6333 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6334
6335 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6336 IEM_MC_COMMIT_EFLAGS(EFlags);
6337 IEM_MC_ADVANCE_RIP();
6338 IEM_MC_END();
6339 return VINF_SUCCESS;
6340
6341 case IEMMODE_64BIT:
6342 IEM_MC_BEGIN(4, 2);
6343 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6344 IEM_MC_ARG(uint64_t, u64Src, 1);
6345 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6346 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6348
6349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6350 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6351 IEM_MC_ASSIGN(cShiftArg, cShift);
6352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6353 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6354 IEM_MC_FETCH_EFLAGS(EFlags);
6355 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6356 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6357
6358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6359 IEM_MC_COMMIT_EFLAGS(EFlags);
6360 IEM_MC_ADVANCE_RIP();
6361 IEM_MC_END();
6362 return VINF_SUCCESS;
6363
6364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6365 }
6366 }
6367}
6368
6369
6370/**
6371 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6372 */
6373FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6374{
6375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6376 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6377
6378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6379 {
6380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6381
6382 switch (pVCpu->iem.s.enmEffOpSize)
6383 {
6384 case IEMMODE_16BIT:
6385 IEM_MC_BEGIN(4, 0);
6386 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6387 IEM_MC_ARG(uint16_t, u16Src, 1);
6388 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6389 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6390
6391 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6392 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6393 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6394 IEM_MC_REF_EFLAGS(pEFlags);
6395 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6396
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 case IEMMODE_32BIT:
6402 IEM_MC_BEGIN(4, 0);
6403 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6404 IEM_MC_ARG(uint32_t, u32Src, 1);
6405 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6406 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6407
6408 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6409 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6410 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6411 IEM_MC_REF_EFLAGS(pEFlags);
6412 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6413
6414 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6415 IEM_MC_ADVANCE_RIP();
6416 IEM_MC_END();
6417 return VINF_SUCCESS;
6418
6419 case IEMMODE_64BIT:
6420 IEM_MC_BEGIN(4, 0);
6421 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6422 IEM_MC_ARG(uint64_t, u64Src, 1);
6423 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6424 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6425
6426 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6427 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6428 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6429 IEM_MC_REF_EFLAGS(pEFlags);
6430 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6431
6432 IEM_MC_ADVANCE_RIP();
6433 IEM_MC_END();
6434 return VINF_SUCCESS;
6435
6436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6437 }
6438 }
6439 else
6440 {
6441 switch (pVCpu->iem.s.enmEffOpSize)
6442 {
6443 case IEMMODE_16BIT:
6444 IEM_MC_BEGIN(4, 2);
6445 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6446 IEM_MC_ARG(uint16_t, u16Src, 1);
6447 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6450
6451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6453 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6454 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6455 IEM_MC_FETCH_EFLAGS(EFlags);
6456 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6457 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6458
6459 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6460 IEM_MC_COMMIT_EFLAGS(EFlags);
6461 IEM_MC_ADVANCE_RIP();
6462 IEM_MC_END();
6463 return VINF_SUCCESS;
6464
6465 case IEMMODE_32BIT:
6466 IEM_MC_BEGIN(4, 2);
6467 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6468 IEM_MC_ARG(uint32_t, u32Src, 1);
6469 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6470 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6472
6473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6475 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6476 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6477 IEM_MC_FETCH_EFLAGS(EFlags);
6478 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6479 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6480
6481 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6482 IEM_MC_COMMIT_EFLAGS(EFlags);
6483 IEM_MC_ADVANCE_RIP();
6484 IEM_MC_END();
6485 return VINF_SUCCESS;
6486
6487 case IEMMODE_64BIT:
6488 IEM_MC_BEGIN(4, 2);
6489 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6490 IEM_MC_ARG(uint64_t, u64Src, 1);
6491 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6492 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6494
6495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6497 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6498 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6499 IEM_MC_FETCH_EFLAGS(EFlags);
6500 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6501 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6502
6503 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6504 IEM_MC_COMMIT_EFLAGS(EFlags);
6505 IEM_MC_ADVANCE_RIP();
6506 IEM_MC_END();
6507 return VINF_SUCCESS;
6508
6509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6510 }
6511 }
6512}
6513
6514
6515
6516/** Opcode 0x0f 0xa4. */
6517FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6518{
6519 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6520 IEMOP_HLP_MIN_386();
6521 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6522}
6523
6524
6525/** Opcode 0x0f 0xa5. */
6526FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6527{
6528 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6529 IEMOP_HLP_MIN_386();
6530 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6531}
6532
6533
6534/** Opcode 0x0f 0xa8. */
6535FNIEMOP_DEF(iemOp_push_gs)
6536{
6537 IEMOP_MNEMONIC(push_gs, "push gs");
6538 IEMOP_HLP_MIN_386();
6539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6540 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6541}
6542
6543
6544/** Opcode 0x0f 0xa9. */
6545FNIEMOP_DEF(iemOp_pop_gs)
6546{
6547 IEMOP_MNEMONIC(pop_gs, "pop gs");
6548 IEMOP_HLP_MIN_386();
6549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6550 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6551}
6552
6553
6554/** Opcode 0x0f 0xaa. */
6555FNIEMOP_DEF(iemOp_rsm)
6556{
6557 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6558 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6560 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6561}
6562
6563
6564
6565/** Opcode 0x0f 0xab. */
6566FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6567{
6568 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6569 IEMOP_HLP_MIN_386();
6570 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6571}
6572
6573
6574/** Opcode 0x0f 0xac. */
6575FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6576{
6577 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6578 IEMOP_HLP_MIN_386();
6579 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6580}
6581
6582
6583/** Opcode 0x0f 0xad. */
6584FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6585{
6586 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6587 IEMOP_HLP_MIN_386();
6588 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6589}
6590
6591
6592/** Opcode 0x0f 0xae mem/0. */
6593FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6594{
6595 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6596 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6597 return IEMOP_RAISE_INVALID_OPCODE();
6598
6599 IEM_MC_BEGIN(3, 1);
6600 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6601 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6602 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6605 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6606 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6607 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6608 IEM_MC_END();
6609 return VINF_SUCCESS;
6610}
6611
6612
6613/** Opcode 0x0f 0xae mem/1. */
6614FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6615{
6616 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6617 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6618 return IEMOP_RAISE_INVALID_OPCODE();
6619
6620 IEM_MC_BEGIN(3, 1);
6621 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6622 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6623 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6626 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6627 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6628 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6629 IEM_MC_END();
6630 return VINF_SUCCESS;
6631}
6632
6633
6634/**
6635 * @opmaps grp15
6636 * @opcode !11/2
6637 * @oppfx none
6638 * @opcpuid sse
6639 * @opgroup og_sse_mxcsrsm
6640 * @opxcpttype 5
6641 * @optest op1=0 -> mxcsr=0
6642 * @optest op1=0x2083 -> mxcsr=0x2083
6643 * @optest op1=0xfffffffe -> value.xcpt=0xd
6644 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6645 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6646 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6647 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6648 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6649 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6650 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6651 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6652 */
6653FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6654{
6655 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6656 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6657 return IEMOP_RAISE_INVALID_OPCODE();
6658
6659 IEM_MC_BEGIN(2, 0);
6660 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6661 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6664 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6665 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6666 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6667 IEM_MC_END();
6668 return VINF_SUCCESS;
6669}
6670
6671
6672/**
6673 * @opmaps grp15
6674 * @opcode !11/3
6675 * @oppfx none
6676 * @opcpuid sse
6677 * @opgroup og_sse_mxcsrsm
6678 * @opxcpttype 5
6679 * @optest mxcsr=0 -> op1=0
6680 * @optest mxcsr=0x2083 -> op1=0x2083
6681 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6682 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6683 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6684 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6685 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6686 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6687 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6688 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6689 */
6690FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6691{
6692 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6693 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6694 return IEMOP_RAISE_INVALID_OPCODE();
6695
6696 IEM_MC_BEGIN(2, 0);
6697 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6698 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6701 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6702 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6703 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6704 IEM_MC_END();
6705 return VINF_SUCCESS;
6706}
6707
6708
6709/**
6710 * @opmaps grp15
6711 * @opcode !11/4
6712 * @oppfx none
6713 * @opcpuid xsave
6714 * @opgroup og_system
6715 * @opxcpttype none
6716 */
6717FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6718{
6719 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6720 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6721 return IEMOP_RAISE_INVALID_OPCODE();
6722
6723 IEM_MC_BEGIN(3, 0);
6724 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6725 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6726 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6730 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6731 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6732 IEM_MC_END();
6733 return VINF_SUCCESS;
6734}
6735
6736
6737/**
6738 * @opmaps grp15
6739 * @opcode !11/5
6740 * @oppfx none
6741 * @opcpuid xsave
6742 * @opgroup og_system
6743 * @opxcpttype none
6744 */
6745FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6746{
6747 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6748 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6749 return IEMOP_RAISE_INVALID_OPCODE();
6750
6751 IEM_MC_BEGIN(3, 0);
6752 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6753 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6754 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6757 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6758 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6759 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6760 IEM_MC_END();
6761 return VINF_SUCCESS;
6762}
6763
6764/** Opcode 0x0f 0xae mem/6. */
6765FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6766
6767/**
6768 * @opmaps grp15
6769 * @opcode !11/7
6770 * @oppfx none
6771 * @opcpuid clfsh
6772 * @opgroup og_cachectl
6773 * @optest op1=1 ->
6774 */
6775FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6776{
6777 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6778 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6779 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6780
6781 IEM_MC_BEGIN(2, 0);
6782 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6783 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6786 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6787 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6788 IEM_MC_END();
6789 return VINF_SUCCESS;
6790}
6791
6792/**
6793 * @opmaps grp15
6794 * @opcode !11/7
6795 * @oppfx 0x66
6796 * @opcpuid clflushopt
6797 * @opgroup og_cachectl
6798 * @optest op1=1 ->
6799 */
6800FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6801{
6802 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6803 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6804 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6805
6806 IEM_MC_BEGIN(2, 0);
6807 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6808 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6811 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6812 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6813 IEM_MC_END();
6814 return VINF_SUCCESS;
6815}
6816
6817
6818/** Opcode 0x0f 0xae 11b/5. */
6819FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6820{
6821 RT_NOREF_PV(bRm);
6822 IEMOP_MNEMONIC(lfence, "lfence");
6823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6824 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6825 return IEMOP_RAISE_INVALID_OPCODE();
6826
6827 IEM_MC_BEGIN(0, 0);
6828 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6829 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6830 else
6831 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6832 IEM_MC_ADVANCE_RIP();
6833 IEM_MC_END();
6834 return VINF_SUCCESS;
6835}
6836
6837
6838/** Opcode 0x0f 0xae 11b/6. */
6839FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6840{
6841 RT_NOREF_PV(bRm);
6842 IEMOP_MNEMONIC(mfence, "mfence");
6843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6844 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6845 return IEMOP_RAISE_INVALID_OPCODE();
6846
6847 IEM_MC_BEGIN(0, 0);
6848 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6849 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6850 else
6851 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6852 IEM_MC_ADVANCE_RIP();
6853 IEM_MC_END();
6854 return VINF_SUCCESS;
6855}
6856
6857
6858/** Opcode 0x0f 0xae 11b/7. */
6859FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6860{
6861 RT_NOREF_PV(bRm);
6862 IEMOP_MNEMONIC(sfence, "sfence");
6863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6864 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6865 return IEMOP_RAISE_INVALID_OPCODE();
6866
6867 IEM_MC_BEGIN(0, 0);
6868 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6869 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6870 else
6871 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6872 IEM_MC_ADVANCE_RIP();
6873 IEM_MC_END();
6874 return VINF_SUCCESS;
6875}
6876
6877
6878/** Opcode 0xf3 0x0f 0xae 11b/0. */
6879FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
6880{
6881 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
6882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6883 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6884 {
6885 IEM_MC_BEGIN(1, 0);
6886 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6887 IEM_MC_ARG(uint64_t, u64Dst, 0);
6888 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
6889 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6890 IEM_MC_ADVANCE_RIP();
6891 IEM_MC_END();
6892 }
6893 else
6894 {
6895 IEM_MC_BEGIN(1, 0);
6896 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6897 IEM_MC_ARG(uint32_t, u32Dst, 0);
6898 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
6899 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6900 IEM_MC_ADVANCE_RIP();
6901 IEM_MC_END();
6902 }
6903 return VINF_SUCCESS;
6904}
6905
6906/** Opcode 0xf3 0x0f 0xae 11b/1. */
6907FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
6908{
6909 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
6910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6911 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6912 {
6913 IEM_MC_BEGIN(1, 0);
6914 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6915 IEM_MC_ARG(uint64_t, u64Dst, 0);
6916 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
6917 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6918 IEM_MC_ADVANCE_RIP();
6919 IEM_MC_END();
6920 }
6921 else
6922 {
6923 IEM_MC_BEGIN(1, 0);
6924 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6925 IEM_MC_ARG(uint32_t, u32Dst, 0);
6926 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
6927 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6928 IEM_MC_ADVANCE_RIP();
6929 IEM_MC_END();
6930 }
6931 return VINF_SUCCESS;
6932}
6933
6934/** Opcode 0xf3 0x0f 0xae 11b/2. */
6935FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
6936{
6937 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
6938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6939 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6940 {
6941 IEM_MC_BEGIN(1, 0);
6942 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6943 IEM_MC_ARG(uint64_t, u64Dst, 0);
6944 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6945 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
6946 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
6947 IEM_MC_ADVANCE_RIP();
6948 IEM_MC_END();
6949 }
6950 else
6951 {
6952 IEM_MC_BEGIN(1, 0);
6953 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6954 IEM_MC_ARG(uint32_t, u32Dst, 0);
6955 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6956 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
6957 IEM_MC_ADVANCE_RIP();
6958 IEM_MC_END();
6959 }
6960 return VINF_SUCCESS;
6961}
6962
6963/** Opcode 0xf3 0x0f 0xae 11b/3. */
6964FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
6965{
6966 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
6967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6968 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6969 {
6970 IEM_MC_BEGIN(1, 0);
6971 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6972 IEM_MC_ARG(uint64_t, u64Dst, 0);
6973 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6974 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
6975 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
6976 IEM_MC_ADVANCE_RIP();
6977 IEM_MC_END();
6978 }
6979 else
6980 {
6981 IEM_MC_BEGIN(1, 0);
6982 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6983 IEM_MC_ARG(uint32_t, u32Dst, 0);
6984 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6985 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
6986 IEM_MC_ADVANCE_RIP();
6987 IEM_MC_END();
6988 }
6989 return VINF_SUCCESS;
6990}
6991
6992
6993/**
6994 * Group 15 jump table for register variant.
6995 */
6996IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6997{ /* pfx: none, 066h, 0f3h, 0f2h */
6998 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6999 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7000 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7001 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7002 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7003 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7004 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7005 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7006};
7007AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7008
7009
7010/**
7011 * Group 15 jump table for memory variant.
7012 */
7013IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7014{ /* pfx: none, 066h, 0f3h, 0f2h */
7015 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7016 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7017 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7018 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7019 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7020 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7021 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7022 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7023};
7024AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7025
7026
7027/** Opcode 0x0f 0xae. */
7028FNIEMOP_DEF(iemOp_Grp15)
7029{
7030 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7033 /* register, register */
7034 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7035 + pVCpu->iem.s.idxPrefix], bRm);
7036 /* memory, register */
7037 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7038 + pVCpu->iem.s.idxPrefix], bRm);
7039}
7040
7041
7042/** Opcode 0x0f 0xaf. */
7043FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7044{
7045 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7046 IEMOP_HLP_MIN_386();
7047 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7048 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7049}
7050
7051
7052/** Opcode 0x0f 0xb0. */
7053FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7054{
7055 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7056 IEMOP_HLP_MIN_486();
7057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7058
7059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7060 {
7061 IEMOP_HLP_DONE_DECODING();
7062 IEM_MC_BEGIN(4, 0);
7063 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7064 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7065 IEM_MC_ARG(uint8_t, u8Src, 2);
7066 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7067
7068 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7069 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7070 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7071 IEM_MC_REF_EFLAGS(pEFlags);
7072 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7073 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7074 else
7075 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7076
7077 IEM_MC_ADVANCE_RIP();
7078 IEM_MC_END();
7079 }
7080 else
7081 {
7082 IEM_MC_BEGIN(4, 3);
7083 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7084 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7085 IEM_MC_ARG(uint8_t, u8Src, 2);
7086 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7088 IEM_MC_LOCAL(uint8_t, u8Al);
7089
7090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7091 IEMOP_HLP_DONE_DECODING();
7092 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7093 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7094 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7095 IEM_MC_FETCH_EFLAGS(EFlags);
7096 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7097 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7098 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7099 else
7100 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7101
7102 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7103 IEM_MC_COMMIT_EFLAGS(EFlags);
7104 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7105 IEM_MC_ADVANCE_RIP();
7106 IEM_MC_END();
7107 }
7108 return VINF_SUCCESS;
7109}
7110
7111/** Opcode 0x0f 0xb1. */
7112FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7113{
7114 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7115 IEMOP_HLP_MIN_486();
7116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7117
7118 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7119 {
7120 IEMOP_HLP_DONE_DECODING();
7121 switch (pVCpu->iem.s.enmEffOpSize)
7122 {
7123 case IEMMODE_16BIT:
7124 IEM_MC_BEGIN(4, 0);
7125 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7126 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7127 IEM_MC_ARG(uint16_t, u16Src, 2);
7128 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7129
7130 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7131 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7132 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7133 IEM_MC_REF_EFLAGS(pEFlags);
7134 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7136 else
7137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7138
7139 IEM_MC_ADVANCE_RIP();
7140 IEM_MC_END();
7141 return VINF_SUCCESS;
7142
7143 case IEMMODE_32BIT:
7144 IEM_MC_BEGIN(4, 0);
7145 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7146 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7147 IEM_MC_ARG(uint32_t, u32Src, 2);
7148 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7149
7150 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7151 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7152 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7153 IEM_MC_REF_EFLAGS(pEFlags);
7154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7155 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7156 else
7157 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7158
7159 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7160 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7161 IEM_MC_ADVANCE_RIP();
7162 IEM_MC_END();
7163 return VINF_SUCCESS;
7164
7165 case IEMMODE_64BIT:
7166 IEM_MC_BEGIN(4, 0);
7167 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7168 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7169#ifdef RT_ARCH_X86
7170 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7171#else
7172 IEM_MC_ARG(uint64_t, u64Src, 2);
7173#endif
7174 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7175
7176 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7177 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7178 IEM_MC_REF_EFLAGS(pEFlags);
7179#ifdef RT_ARCH_X86
7180 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7181 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7182 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7183 else
7184 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7185#else
7186 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7187 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7188 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7189 else
7190 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7191#endif
7192
7193 IEM_MC_ADVANCE_RIP();
7194 IEM_MC_END();
7195 return VINF_SUCCESS;
7196
7197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7198 }
7199 }
7200 else
7201 {
7202 switch (pVCpu->iem.s.enmEffOpSize)
7203 {
7204 case IEMMODE_16BIT:
7205 IEM_MC_BEGIN(4, 3);
7206 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7207 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7208 IEM_MC_ARG(uint16_t, u16Src, 2);
7209 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7211 IEM_MC_LOCAL(uint16_t, u16Ax);
7212
7213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7214 IEMOP_HLP_DONE_DECODING();
7215 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7216 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7217 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7218 IEM_MC_FETCH_EFLAGS(EFlags);
7219 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7220 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7222 else
7223 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7224
7225 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7226 IEM_MC_COMMIT_EFLAGS(EFlags);
7227 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7228 IEM_MC_ADVANCE_RIP();
7229 IEM_MC_END();
7230 return VINF_SUCCESS;
7231
7232 case IEMMODE_32BIT:
7233 IEM_MC_BEGIN(4, 3);
7234 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7235 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7236 IEM_MC_ARG(uint32_t, u32Src, 2);
7237 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7239 IEM_MC_LOCAL(uint32_t, u32Eax);
7240
7241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7242 IEMOP_HLP_DONE_DECODING();
7243 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7244 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7245 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7246 IEM_MC_FETCH_EFLAGS(EFlags);
7247 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7250 else
7251 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7252
7253 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7254 IEM_MC_COMMIT_EFLAGS(EFlags);
7255 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7256 IEM_MC_ADVANCE_RIP();
7257 IEM_MC_END();
7258 return VINF_SUCCESS;
7259
7260 case IEMMODE_64BIT:
7261 IEM_MC_BEGIN(4, 3);
7262 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7263 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7264#ifdef RT_ARCH_X86
7265 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7266#else
7267 IEM_MC_ARG(uint64_t, u64Src, 2);
7268#endif
7269 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7271 IEM_MC_LOCAL(uint64_t, u64Rax);
7272
7273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7274 IEMOP_HLP_DONE_DECODING();
7275 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7276 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7277 IEM_MC_FETCH_EFLAGS(EFlags);
7278 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7279#ifdef RT_ARCH_X86
7280 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7281 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7282 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7283 else
7284 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7285#else
7286 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7287 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7288 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7289 else
7290 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7291#endif
7292
7293 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7294 IEM_MC_COMMIT_EFLAGS(EFlags);
7295 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7296 IEM_MC_ADVANCE_RIP();
7297 IEM_MC_END();
7298 return VINF_SUCCESS;
7299
7300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7301 }
7302 }
7303}
7304
7305
7306FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7307{
7308 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7309 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7310
7311 switch (pVCpu->iem.s.enmEffOpSize)
7312 {
7313 case IEMMODE_16BIT:
7314 IEM_MC_BEGIN(5, 1);
7315 IEM_MC_ARG(uint16_t, uSel, 0);
7316 IEM_MC_ARG(uint16_t, offSeg, 1);
7317 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7318 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7319 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7320 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7323 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7324 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7325 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7326 IEM_MC_END();
7327 return VINF_SUCCESS;
7328
7329 case IEMMODE_32BIT:
7330 IEM_MC_BEGIN(5, 1);
7331 IEM_MC_ARG(uint16_t, uSel, 0);
7332 IEM_MC_ARG(uint32_t, offSeg, 1);
7333 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7334 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7335 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7336 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7339 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7340 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7341 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7342 IEM_MC_END();
7343 return VINF_SUCCESS;
7344
7345 case IEMMODE_64BIT:
7346 IEM_MC_BEGIN(5, 1);
7347 IEM_MC_ARG(uint16_t, uSel, 0);
7348 IEM_MC_ARG(uint64_t, offSeg, 1);
7349 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7350 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7351 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7352 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7355 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7356 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7357 else
7358 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7359 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7360 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7361 IEM_MC_END();
7362 return VINF_SUCCESS;
7363
7364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7365 }
7366}
7367
7368
7369/** Opcode 0x0f 0xb2. */
7370FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7371{
7372 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7373 IEMOP_HLP_MIN_386();
7374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7376 return IEMOP_RAISE_INVALID_OPCODE();
7377 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7378}
7379
7380
7381/** Opcode 0x0f 0xb3. */
7382FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7383{
7384 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7385 IEMOP_HLP_MIN_386();
7386 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7387}
7388
7389
7390/** Opcode 0x0f 0xb4. */
7391FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7392{
7393 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7394 IEMOP_HLP_MIN_386();
7395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7396 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7397 return IEMOP_RAISE_INVALID_OPCODE();
7398 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7399}
7400
7401
7402/** Opcode 0x0f 0xb5. */
7403FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7404{
7405 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7406 IEMOP_HLP_MIN_386();
7407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7408 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7409 return IEMOP_RAISE_INVALID_OPCODE();
7410 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7411}
7412
7413
7414/** Opcode 0x0f 0xb6. */
7415FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7416{
7417 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7418 IEMOP_HLP_MIN_386();
7419
7420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7421
7422 /*
7423 * If rm is denoting a register, no more instruction bytes.
7424 */
7425 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7426 {
7427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7428 switch (pVCpu->iem.s.enmEffOpSize)
7429 {
7430 case IEMMODE_16BIT:
7431 IEM_MC_BEGIN(0, 1);
7432 IEM_MC_LOCAL(uint16_t, u16Value);
7433 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7434 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7435 IEM_MC_ADVANCE_RIP();
7436 IEM_MC_END();
7437 return VINF_SUCCESS;
7438
7439 case IEMMODE_32BIT:
7440 IEM_MC_BEGIN(0, 1);
7441 IEM_MC_LOCAL(uint32_t, u32Value);
7442 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7443 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7444 IEM_MC_ADVANCE_RIP();
7445 IEM_MC_END();
7446 return VINF_SUCCESS;
7447
7448 case IEMMODE_64BIT:
7449 IEM_MC_BEGIN(0, 1);
7450 IEM_MC_LOCAL(uint64_t, u64Value);
7451 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7452 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7453 IEM_MC_ADVANCE_RIP();
7454 IEM_MC_END();
7455 return VINF_SUCCESS;
7456
7457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7458 }
7459 }
7460 else
7461 {
7462 /*
7463 * We're loading a register from memory.
7464 */
7465 switch (pVCpu->iem.s.enmEffOpSize)
7466 {
7467 case IEMMODE_16BIT:
7468 IEM_MC_BEGIN(0, 2);
7469 IEM_MC_LOCAL(uint16_t, u16Value);
7470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7473 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7474 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7475 IEM_MC_ADVANCE_RIP();
7476 IEM_MC_END();
7477 return VINF_SUCCESS;
7478
7479 case IEMMODE_32BIT:
7480 IEM_MC_BEGIN(0, 2);
7481 IEM_MC_LOCAL(uint32_t, u32Value);
7482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7485 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7486 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7487 IEM_MC_ADVANCE_RIP();
7488 IEM_MC_END();
7489 return VINF_SUCCESS;
7490
7491 case IEMMODE_64BIT:
7492 IEM_MC_BEGIN(0, 2);
7493 IEM_MC_LOCAL(uint64_t, u64Value);
7494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7497 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7498 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7499 IEM_MC_ADVANCE_RIP();
7500 IEM_MC_END();
7501 return VINF_SUCCESS;
7502
7503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7504 }
7505 }
7506}
7507
7508
7509/** Opcode 0x0f 0xb7. */
7510FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7511{
7512 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7513 IEMOP_HLP_MIN_386();
7514
7515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7516
7517 /** @todo Not entirely sure how the operand size prefix is handled here,
7518 * assuming that it will be ignored. Would be nice to have a few
7519 * test for this. */
7520 /*
7521 * If rm is denoting a register, no more instruction bytes.
7522 */
7523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7524 {
7525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7526 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7527 {
7528 IEM_MC_BEGIN(0, 1);
7529 IEM_MC_LOCAL(uint32_t, u32Value);
7530 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7531 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7532 IEM_MC_ADVANCE_RIP();
7533 IEM_MC_END();
7534 }
7535 else
7536 {
7537 IEM_MC_BEGIN(0, 1);
7538 IEM_MC_LOCAL(uint64_t, u64Value);
7539 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7540 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7541 IEM_MC_ADVANCE_RIP();
7542 IEM_MC_END();
7543 }
7544 }
7545 else
7546 {
7547 /*
7548 * We're loading a register from memory.
7549 */
7550 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7551 {
7552 IEM_MC_BEGIN(0, 2);
7553 IEM_MC_LOCAL(uint32_t, u32Value);
7554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7557 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7558 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7559 IEM_MC_ADVANCE_RIP();
7560 IEM_MC_END();
7561 }
7562 else
7563 {
7564 IEM_MC_BEGIN(0, 2);
7565 IEM_MC_LOCAL(uint64_t, u64Value);
7566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7569 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7570 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7571 IEM_MC_ADVANCE_RIP();
7572 IEM_MC_END();
7573 }
7574 }
7575 return VINF_SUCCESS;
7576}
7577
7578
7579/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7580FNIEMOP_UD_STUB(iemOp_jmpe);
7581/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7582FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7583
7584
7585/**
7586 * @opcode 0xb9
7587 * @opinvalid intel-modrm
7588 * @optest ->
7589 */
7590FNIEMOP_DEF(iemOp_Grp10)
7591{
7592 /*
7593 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7594 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7595 */
7596 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7597 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7598 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7599}
7600
7601
7602/** Opcode 0x0f 0xba. */
7603FNIEMOP_DEF(iemOp_Grp8)
7604{
7605 IEMOP_HLP_MIN_386();
7606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7607 PCIEMOPBINSIZES pImpl;
7608 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7609 {
7610 case 0: case 1: case 2: case 3:
7611 /* Both AMD and Intel want full modr/m decoding and imm8. */
7612 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7613 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7614 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7615 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7616 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7618 }
7619 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7620
7621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7622 {
7623 /* register destination. */
7624 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7626
7627 switch (pVCpu->iem.s.enmEffOpSize)
7628 {
7629 case IEMMODE_16BIT:
7630 IEM_MC_BEGIN(3, 0);
7631 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7632 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7634
7635 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7636 IEM_MC_REF_EFLAGS(pEFlags);
7637 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7638
7639 IEM_MC_ADVANCE_RIP();
7640 IEM_MC_END();
7641 return VINF_SUCCESS;
7642
7643 case IEMMODE_32BIT:
7644 IEM_MC_BEGIN(3, 0);
7645 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7646 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7647 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7648
7649 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7650 IEM_MC_REF_EFLAGS(pEFlags);
7651 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7652
7653 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7654 IEM_MC_ADVANCE_RIP();
7655 IEM_MC_END();
7656 return VINF_SUCCESS;
7657
7658 case IEMMODE_64BIT:
7659 IEM_MC_BEGIN(3, 0);
7660 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7661 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7662 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7663
7664 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7665 IEM_MC_REF_EFLAGS(pEFlags);
7666 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7667
7668 IEM_MC_ADVANCE_RIP();
7669 IEM_MC_END();
7670 return VINF_SUCCESS;
7671
7672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7673 }
7674 }
7675 else
7676 {
7677 /* memory destination. */
7678
7679 uint32_t fAccess;
7680 if (pImpl->pfnLockedU16)
7681 fAccess = IEM_ACCESS_DATA_RW;
7682 else /* BT */
7683 fAccess = IEM_ACCESS_DATA_R;
7684
7685 /** @todo test negative bit offsets! */
7686 switch (pVCpu->iem.s.enmEffOpSize)
7687 {
7688 case IEMMODE_16BIT:
7689 IEM_MC_BEGIN(3, 1);
7690 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7691 IEM_MC_ARG(uint16_t, u16Src, 1);
7692 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7694
7695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7696 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7697 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7698 if (pImpl->pfnLockedU16)
7699 IEMOP_HLP_DONE_DECODING();
7700 else
7701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7702 IEM_MC_FETCH_EFLAGS(EFlags);
7703 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7704 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7705 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7706 else
7707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7708 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7709
7710 IEM_MC_COMMIT_EFLAGS(EFlags);
7711 IEM_MC_ADVANCE_RIP();
7712 IEM_MC_END();
7713 return VINF_SUCCESS;
7714
7715 case IEMMODE_32BIT:
7716 IEM_MC_BEGIN(3, 1);
7717 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7718 IEM_MC_ARG(uint32_t, u32Src, 1);
7719 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7721
7722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7723 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7724 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7725 if (pImpl->pfnLockedU16)
7726 IEMOP_HLP_DONE_DECODING();
7727 else
7728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7729 IEM_MC_FETCH_EFLAGS(EFlags);
7730 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7731 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7732 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7733 else
7734 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7735 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7736
7737 IEM_MC_COMMIT_EFLAGS(EFlags);
7738 IEM_MC_ADVANCE_RIP();
7739 IEM_MC_END();
7740 return VINF_SUCCESS;
7741
7742 case IEMMODE_64BIT:
7743 IEM_MC_BEGIN(3, 1);
7744 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7745 IEM_MC_ARG(uint64_t, u64Src, 1);
7746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7748
7749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7750 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7751 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7752 if (pImpl->pfnLockedU16)
7753 IEMOP_HLP_DONE_DECODING();
7754 else
7755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7756 IEM_MC_FETCH_EFLAGS(EFlags);
7757 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7758 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7759 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7760 else
7761 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7762 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7763
7764 IEM_MC_COMMIT_EFLAGS(EFlags);
7765 IEM_MC_ADVANCE_RIP();
7766 IEM_MC_END();
7767 return VINF_SUCCESS;
7768
7769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7770 }
7771 }
7772}
7773
7774
7775/** Opcode 0x0f 0xbb. */
7776FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7777{
7778 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7779 IEMOP_HLP_MIN_386();
7780 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7781}
7782
7783
7784/** Opcode 0x0f 0xbc. */
7785FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7786{
7787 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7788 IEMOP_HLP_MIN_386();
7789 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7790 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7791}
7792
7793
7794/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7795FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7796
7797
7798/** Opcode 0x0f 0xbd. */
7799FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7800{
7801 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7802 IEMOP_HLP_MIN_386();
7803 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7804 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7805}
7806
7807
7808/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7809FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7810
7811
7812/** Opcode 0x0f 0xbe. */
7813FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7814{
7815 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7816 IEMOP_HLP_MIN_386();
7817
7818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7819
7820 /*
7821 * If rm is denoting a register, no more instruction bytes.
7822 */
7823 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7824 {
7825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7826 switch (pVCpu->iem.s.enmEffOpSize)
7827 {
7828 case IEMMODE_16BIT:
7829 IEM_MC_BEGIN(0, 1);
7830 IEM_MC_LOCAL(uint16_t, u16Value);
7831 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7832 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7833 IEM_MC_ADVANCE_RIP();
7834 IEM_MC_END();
7835 return VINF_SUCCESS;
7836
7837 case IEMMODE_32BIT:
7838 IEM_MC_BEGIN(0, 1);
7839 IEM_MC_LOCAL(uint32_t, u32Value);
7840 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7841 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7842 IEM_MC_ADVANCE_RIP();
7843 IEM_MC_END();
7844 return VINF_SUCCESS;
7845
7846 case IEMMODE_64BIT:
7847 IEM_MC_BEGIN(0, 1);
7848 IEM_MC_LOCAL(uint64_t, u64Value);
7849 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7850 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7851 IEM_MC_ADVANCE_RIP();
7852 IEM_MC_END();
7853 return VINF_SUCCESS;
7854
7855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7856 }
7857 }
7858 else
7859 {
7860 /*
7861 * We're loading a register from memory.
7862 */
7863 switch (pVCpu->iem.s.enmEffOpSize)
7864 {
7865 case IEMMODE_16BIT:
7866 IEM_MC_BEGIN(0, 2);
7867 IEM_MC_LOCAL(uint16_t, u16Value);
7868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7871 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7872 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7873 IEM_MC_ADVANCE_RIP();
7874 IEM_MC_END();
7875 return VINF_SUCCESS;
7876
7877 case IEMMODE_32BIT:
7878 IEM_MC_BEGIN(0, 2);
7879 IEM_MC_LOCAL(uint32_t, u32Value);
7880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7883 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7884 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7885 IEM_MC_ADVANCE_RIP();
7886 IEM_MC_END();
7887 return VINF_SUCCESS;
7888
7889 case IEMMODE_64BIT:
7890 IEM_MC_BEGIN(0, 2);
7891 IEM_MC_LOCAL(uint64_t, u64Value);
7892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7895 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7896 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7897 IEM_MC_ADVANCE_RIP();
7898 IEM_MC_END();
7899 return VINF_SUCCESS;
7900
7901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7902 }
7903 }
7904}
7905
7906
7907/** Opcode 0x0f 0xbf. */
7908FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7909{
7910 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7911 IEMOP_HLP_MIN_386();
7912
7913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7914
7915 /** @todo Not entirely sure how the operand size prefix is handled here,
7916 * assuming that it will be ignored. Would be nice to have a few
7917 * test for this. */
7918 /*
7919 * If rm is denoting a register, no more instruction bytes.
7920 */
7921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7922 {
7923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7924 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7925 {
7926 IEM_MC_BEGIN(0, 1);
7927 IEM_MC_LOCAL(uint32_t, u32Value);
7928 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7929 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7930 IEM_MC_ADVANCE_RIP();
7931 IEM_MC_END();
7932 }
7933 else
7934 {
7935 IEM_MC_BEGIN(0, 1);
7936 IEM_MC_LOCAL(uint64_t, u64Value);
7937 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7938 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7939 IEM_MC_ADVANCE_RIP();
7940 IEM_MC_END();
7941 }
7942 }
7943 else
7944 {
7945 /*
7946 * We're loading a register from memory.
7947 */
7948 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7949 {
7950 IEM_MC_BEGIN(0, 2);
7951 IEM_MC_LOCAL(uint32_t, u32Value);
7952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7955 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7956 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7957 IEM_MC_ADVANCE_RIP();
7958 IEM_MC_END();
7959 }
7960 else
7961 {
7962 IEM_MC_BEGIN(0, 2);
7963 IEM_MC_LOCAL(uint64_t, u64Value);
7964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7967 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7968 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7969 IEM_MC_ADVANCE_RIP();
7970 IEM_MC_END();
7971 }
7972 }
7973 return VINF_SUCCESS;
7974}
7975
7976
7977/** Opcode 0x0f 0xc0. */
7978FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7979{
7980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7981 IEMOP_HLP_MIN_486();
7982 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7983
7984 /*
7985 * If rm is denoting a register, no more instruction bytes.
7986 */
7987 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7988 {
7989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7990
7991 IEM_MC_BEGIN(3, 0);
7992 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7993 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7995
7996 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7997 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7998 IEM_MC_REF_EFLAGS(pEFlags);
7999 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8000
8001 IEM_MC_ADVANCE_RIP();
8002 IEM_MC_END();
8003 }
8004 else
8005 {
8006 /*
8007 * We're accessing memory.
8008 */
8009 IEM_MC_BEGIN(3, 3);
8010 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8011 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8012 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8013 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8015
8016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8017 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8018 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8019 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8020 IEM_MC_FETCH_EFLAGS(EFlags);
8021 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8022 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8023 else
8024 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8025
8026 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8027 IEM_MC_COMMIT_EFLAGS(EFlags);
8028 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8029 IEM_MC_ADVANCE_RIP();
8030 IEM_MC_END();
8031 return VINF_SUCCESS;
8032 }
8033 return VINF_SUCCESS;
8034}
8035
8036
8037/** Opcode 0x0f 0xc1. */
8038FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8039{
8040 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8041 IEMOP_HLP_MIN_486();
8042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8043
8044 /*
8045 * If rm is denoting a register, no more instruction bytes.
8046 */
8047 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8048 {
8049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8050
8051 switch (pVCpu->iem.s.enmEffOpSize)
8052 {
8053 case IEMMODE_16BIT:
8054 IEM_MC_BEGIN(3, 0);
8055 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8056 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8057 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8058
8059 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8060 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8061 IEM_MC_REF_EFLAGS(pEFlags);
8062 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8063
8064 IEM_MC_ADVANCE_RIP();
8065 IEM_MC_END();
8066 return VINF_SUCCESS;
8067
8068 case IEMMODE_32BIT:
8069 IEM_MC_BEGIN(3, 0);
8070 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8071 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8072 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8073
8074 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8075 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8076 IEM_MC_REF_EFLAGS(pEFlags);
8077 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8078
8079 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8080 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8081 IEM_MC_ADVANCE_RIP();
8082 IEM_MC_END();
8083 return VINF_SUCCESS;
8084
8085 case IEMMODE_64BIT:
8086 IEM_MC_BEGIN(3, 0);
8087 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8088 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8090
8091 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8092 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8093 IEM_MC_REF_EFLAGS(pEFlags);
8094 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8095
8096 IEM_MC_ADVANCE_RIP();
8097 IEM_MC_END();
8098 return VINF_SUCCESS;
8099
8100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8101 }
8102 }
8103 else
8104 {
8105 /*
8106 * We're accessing memory.
8107 */
8108 switch (pVCpu->iem.s.enmEffOpSize)
8109 {
8110 case IEMMODE_16BIT:
8111 IEM_MC_BEGIN(3, 3);
8112 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8113 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8114 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8115 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8117
8118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8119 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8120 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8121 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8122 IEM_MC_FETCH_EFLAGS(EFlags);
8123 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8124 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8125 else
8126 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8127
8128 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8129 IEM_MC_COMMIT_EFLAGS(EFlags);
8130 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8131 IEM_MC_ADVANCE_RIP();
8132 IEM_MC_END();
8133 return VINF_SUCCESS;
8134
8135 case IEMMODE_32BIT:
8136 IEM_MC_BEGIN(3, 3);
8137 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8138 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8139 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8140 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8142
8143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8144 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8145 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8146 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8147 IEM_MC_FETCH_EFLAGS(EFlags);
8148 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8149 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8150 else
8151 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8152
8153 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8154 IEM_MC_COMMIT_EFLAGS(EFlags);
8155 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8156 IEM_MC_ADVANCE_RIP();
8157 IEM_MC_END();
8158 return VINF_SUCCESS;
8159
8160 case IEMMODE_64BIT:
8161 IEM_MC_BEGIN(3, 3);
8162 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8163 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8164 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8165 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8167
8168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8169 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8170 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8171 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8172 IEM_MC_FETCH_EFLAGS(EFlags);
8173 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8174 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8175 else
8176 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8177
8178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8179 IEM_MC_COMMIT_EFLAGS(EFlags);
8180 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8181 IEM_MC_ADVANCE_RIP();
8182 IEM_MC_END();
8183 return VINF_SUCCESS;
8184
8185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8186 }
8187 }
8188}
8189
8190
8191/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8192FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8193/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8194FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8195/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8196FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8197/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8198FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8199
8200
8201/** Opcode 0x0f 0xc3. */
8202FNIEMOP_DEF(iemOp_movnti_My_Gy)
8203{
8204 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8205
8206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8207
8208 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8209 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8210 {
8211 switch (pVCpu->iem.s.enmEffOpSize)
8212 {
8213 case IEMMODE_32BIT:
8214 IEM_MC_BEGIN(0, 2);
8215 IEM_MC_LOCAL(uint32_t, u32Value);
8216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8217
8218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8220 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8221 return IEMOP_RAISE_INVALID_OPCODE();
8222
8223 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8224 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8225 IEM_MC_ADVANCE_RIP();
8226 IEM_MC_END();
8227 break;
8228
8229 case IEMMODE_64BIT:
8230 IEM_MC_BEGIN(0, 2);
8231 IEM_MC_LOCAL(uint64_t, u64Value);
8232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8233
8234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8236 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8237 return IEMOP_RAISE_INVALID_OPCODE();
8238
8239 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8240 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8241 IEM_MC_ADVANCE_RIP();
8242 IEM_MC_END();
8243 break;
8244
8245 case IEMMODE_16BIT:
8246 /** @todo check this form. */
8247 return IEMOP_RAISE_INVALID_OPCODE();
8248 }
8249 }
8250 else
8251 return IEMOP_RAISE_INVALID_OPCODE();
8252 return VINF_SUCCESS;
8253}
8254/* Opcode 0x66 0x0f 0xc3 - invalid */
8255/* Opcode 0xf3 0x0f 0xc3 - invalid */
8256/* Opcode 0xf2 0x0f 0xc3 - invalid */
8257
8258/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8259FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8260/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8261FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8262/* Opcode 0xf3 0x0f 0xc4 - invalid */
8263/* Opcode 0xf2 0x0f 0xc4 - invalid */
8264
8265/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8266FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8267/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8268FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8269/* Opcode 0xf3 0x0f 0xc5 - invalid */
8270/* Opcode 0xf2 0x0f 0xc5 - invalid */
8271
8272/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8273FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8274/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8275FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8276/* Opcode 0xf3 0x0f 0xc6 - invalid */
8277/* Opcode 0xf2 0x0f 0xc6 - invalid */
8278
8279
8280/** Opcode 0x0f 0xc7 !11/1. */
8281FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8282{
8283 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8284
8285 IEM_MC_BEGIN(4, 3);
8286 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8287 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8288 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8289 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8290 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8291 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8293
8294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8295 IEMOP_HLP_DONE_DECODING();
8296 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8297
8298 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8299 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8300 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8301
8302 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8303 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8304 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8305
8306 IEM_MC_FETCH_EFLAGS(EFlags);
8307 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8308 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8309 else
8310 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8311
8312 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8313 IEM_MC_COMMIT_EFLAGS(EFlags);
8314 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8315 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8316 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8317 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8318 IEM_MC_ENDIF();
8319 IEM_MC_ADVANCE_RIP();
8320
8321 IEM_MC_END();
8322 return VINF_SUCCESS;
8323}
8324
8325
8326/** Opcode REX.W 0x0f 0xc7 !11/1. */
8327FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8328{
8329 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8330 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8331 {
8332#if 0
8333 RT_NOREF(bRm);
8334 IEMOP_BITCH_ABOUT_STUB();
8335 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8336#else
8337 IEM_MC_BEGIN(4, 3);
8338 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8339 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8340 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8341 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8342 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8343 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8345
8346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8347 IEMOP_HLP_DONE_DECODING();
8348 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8349 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8350
8351 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8352 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8353 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8354
8355 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8356 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8357 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8358
8359 IEM_MC_FETCH_EFLAGS(EFlags);
8360# ifdef RT_ARCH_AMD64
8361 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8362 {
8363 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8364 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8365 else
8366 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8367 }
8368 else
8369# endif
8370 {
8371 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8372 accesses and not all all atomic, which works fine on in UNI CPU guest
8373 configuration (ignoring DMA). If guest SMP is active we have no choice
8374 but to use a rendezvous callback here. Sigh. */
8375 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8376 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8377 else
8378 {
8379 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8380 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8381 }
8382 }
8383
8384 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8385 IEM_MC_COMMIT_EFLAGS(EFlags);
8386 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8387 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8388 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8389 IEM_MC_ENDIF();
8390 IEM_MC_ADVANCE_RIP();
8391
8392 IEM_MC_END();
8393 return VINF_SUCCESS;
8394#endif
8395 }
8396 Log(("cmpxchg16b -> #UD\n"));
8397 return IEMOP_RAISE_INVALID_OPCODE();
8398}
8399
8400FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8401{
8402 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8403 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8404 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8405}
8406
8407/** Opcode 0x0f 0xc7 11/6. */
8408FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8409
8410/** Opcode 0x0f 0xc7 !11/6. */
8411FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8412
8413/** Opcode 0x66 0x0f 0xc7 !11/6. */
8414FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8415
8416/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8417FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8418
8419/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8420FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8421
8422/** Opcode 0x0f 0xc7 11/7. */
8423FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8424
8425
8426/**
8427 * Group 9 jump table for register variant.
8428 */
8429IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8430{ /* pfx: none, 066h, 0f3h, 0f2h */
8431 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8432 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8433 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8434 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8435 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8436 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8437 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8438 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8439};
8440AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8441
8442
8443/**
8444 * Group 9 jump table for memory variant.
8445 */
8446IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8447{ /* pfx: none, 066h, 0f3h, 0f2h */
8448 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8449 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8450 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8451 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8452 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8453 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8454 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8455 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8456};
8457AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8458
8459
8460/** Opcode 0x0f 0xc7. */
8461FNIEMOP_DEF(iemOp_Grp9)
8462{
8463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8464 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8465 /* register, register */
8466 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8467 + pVCpu->iem.s.idxPrefix], bRm);
8468 /* memory, register */
8469 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8470 + pVCpu->iem.s.idxPrefix], bRm);
8471}
8472
8473
8474/**
8475 * Common 'bswap register' helper.
8476 */
8477FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8478{
8479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8480 switch (pVCpu->iem.s.enmEffOpSize)
8481 {
8482 case IEMMODE_16BIT:
8483 IEM_MC_BEGIN(1, 0);
8484 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8485 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8486 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8487 IEM_MC_ADVANCE_RIP();
8488 IEM_MC_END();
8489 return VINF_SUCCESS;
8490
8491 case IEMMODE_32BIT:
8492 IEM_MC_BEGIN(1, 0);
8493 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8494 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8495 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8496 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8497 IEM_MC_ADVANCE_RIP();
8498 IEM_MC_END();
8499 return VINF_SUCCESS;
8500
8501 case IEMMODE_64BIT:
8502 IEM_MC_BEGIN(1, 0);
8503 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8504 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8505 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8506 IEM_MC_ADVANCE_RIP();
8507 IEM_MC_END();
8508 return VINF_SUCCESS;
8509
8510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8511 }
8512}
8513
8514
8515/** Opcode 0x0f 0xc8. */
8516FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8517{
8518 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8519 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8520 prefix. REX.B is the correct prefix it appears. For a parallel
8521 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8522 IEMOP_HLP_MIN_486();
8523 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8524}
8525
8526
8527/** Opcode 0x0f 0xc9. */
8528FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8529{
8530 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8531 IEMOP_HLP_MIN_486();
8532 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8533}
8534
8535
8536/** Opcode 0x0f 0xca. */
8537FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8538{
8539 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8540 IEMOP_HLP_MIN_486();
8541 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8542}
8543
8544
8545/** Opcode 0x0f 0xcb. */
8546FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8547{
8548 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8549 IEMOP_HLP_MIN_486();
8550 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8551}
8552
8553
8554/** Opcode 0x0f 0xcc. */
8555FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8556{
8557 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8558 IEMOP_HLP_MIN_486();
8559 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8560}
8561
8562
8563/** Opcode 0x0f 0xcd. */
8564FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8565{
8566 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8567 IEMOP_HLP_MIN_486();
8568 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8569}
8570
8571
8572/** Opcode 0x0f 0xce. */
8573FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8574{
8575 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8576 IEMOP_HLP_MIN_486();
8577 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8578}
8579
8580
8581/** Opcode 0x0f 0xcf. */
8582FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8583{
8584 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8585 IEMOP_HLP_MIN_486();
8586 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8587}
8588
8589
8590/* Opcode 0x0f 0xd0 - invalid */
8591/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8592FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8593/* Opcode 0xf3 0x0f 0xd0 - invalid */
8594/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8595FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8596
8597/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8598FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8599/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8600FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8601/* Opcode 0xf3 0x0f 0xd1 - invalid */
8602/* Opcode 0xf2 0x0f 0xd1 - invalid */
8603
8604/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8605FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8606/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8607FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8608/* Opcode 0xf3 0x0f 0xd2 - invalid */
8609/* Opcode 0xf2 0x0f 0xd2 - invalid */
8610
8611/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8612FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8613/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8614FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8615/* Opcode 0xf3 0x0f 0xd3 - invalid */
8616/* Opcode 0xf2 0x0f 0xd3 - invalid */
8617
8618/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8619FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8620/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8621FNIEMOP_STUB(iemOp_paddq_Vx_W);
8622/* Opcode 0xf3 0x0f 0xd4 - invalid */
8623/* Opcode 0xf2 0x0f 0xd4 - invalid */
8624
8625/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8626FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8627/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8628FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8629/* Opcode 0xf3 0x0f 0xd5 - invalid */
8630/* Opcode 0xf2 0x0f 0xd5 - invalid */
8631
8632/* Opcode 0x0f 0xd6 - invalid */
8633
8634/**
8635 * @opcode 0xd6
8636 * @oppfx 0x66
8637 * @opcpuid sse2
8638 * @opgroup og_sse2_pcksclr_datamove
8639 * @opxcpttype none
8640 * @optest op1=-1 op2=2 -> op1=2
8641 * @optest op1=0 op2=-42 -> op1=-42
8642 */
8643FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8644{
8645 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8648 {
8649 /*
8650 * Register, register.
8651 */
8652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8653 IEM_MC_BEGIN(0, 2);
8654 IEM_MC_LOCAL(uint64_t, uSrc);
8655
8656 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8657 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8658
8659 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8660 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8661
8662 IEM_MC_ADVANCE_RIP();
8663 IEM_MC_END();
8664 }
8665 else
8666 {
8667 /*
8668 * Memory, register.
8669 */
8670 IEM_MC_BEGIN(0, 2);
8671 IEM_MC_LOCAL(uint64_t, uSrc);
8672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8673
8674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8676 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8678
8679 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8680 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8681
8682 IEM_MC_ADVANCE_RIP();
8683 IEM_MC_END();
8684 }
8685 return VINF_SUCCESS;
8686}
8687
8688
8689/**
8690 * @opcode 0xd6
8691 * @opcodesub 11 mr/reg
8692 * @oppfx f3
8693 * @opcpuid sse2
8694 * @opgroup og_sse2_simdint_datamove
8695 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8696 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8697 */
8698FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8699{
8700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8702 {
8703 /*
8704 * Register, register.
8705 */
8706 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8708 IEM_MC_BEGIN(0, 1);
8709 IEM_MC_LOCAL(uint64_t, uSrc);
8710
8711 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8712 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8713
8714 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8715 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8716 IEM_MC_FPU_TO_MMX_MODE();
8717
8718 IEM_MC_ADVANCE_RIP();
8719 IEM_MC_END();
8720 return VINF_SUCCESS;
8721 }
8722
8723 /**
8724 * @opdone
8725 * @opmnemonic udf30fd6mem
8726 * @opcode 0xd6
8727 * @opcodesub !11 mr/reg
8728 * @oppfx f3
8729 * @opunused intel-modrm
8730 * @opcpuid sse
8731 * @optest ->
8732 */
8733 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8734}
8735
8736
8737/**
8738 * @opcode 0xd6
8739 * @opcodesub 11 mr/reg
8740 * @oppfx f2
8741 * @opcpuid sse2
8742 * @opgroup og_sse2_simdint_datamove
8743 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8744 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8745 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8746 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8747 * @optest op1=-42 op2=0xfedcba9876543210
8748 * -> op1=0xfedcba9876543210 ftw=0xff
8749 */
8750FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8751{
8752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8753 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8754 {
8755 /*
8756 * Register, register.
8757 */
8758 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8760 IEM_MC_BEGIN(0, 1);
8761 IEM_MC_LOCAL(uint64_t, uSrc);
8762
8763 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8764 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8765
8766 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8767 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8768 IEM_MC_FPU_TO_MMX_MODE();
8769
8770 IEM_MC_ADVANCE_RIP();
8771 IEM_MC_END();
8772 return VINF_SUCCESS;
8773 }
8774
8775 /**
8776 * @opdone
8777 * @opmnemonic udf20fd6mem
8778 * @opcode 0xd6
8779 * @opcodesub !11 mr/reg
8780 * @oppfx f2
8781 * @opunused intel-modrm
8782 * @opcpuid sse
8783 * @optest ->
8784 */
8785 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8786}
8787
8788/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8789FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8790{
8791 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8792 /** @todo testcase: Check that the instruction implicitly clears the high
8793 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8794 * and opcode modifications are made to work with the whole width (not
8795 * just 128). */
8796 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8797 /* Docs says register only. */
8798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8799 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8800 {
8801 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8802 IEM_MC_BEGIN(2, 0);
8803 IEM_MC_ARG(uint64_t *, pDst, 0);
8804 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8805 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8806 IEM_MC_PREPARE_FPU_USAGE();
8807 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8808 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8809 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8810 IEM_MC_ADVANCE_RIP();
8811 IEM_MC_END();
8812 return VINF_SUCCESS;
8813 }
8814 return IEMOP_RAISE_INVALID_OPCODE();
8815}
8816
8817/** Opcode 0x66 0x0f 0xd7 - */
8818FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8819{
8820 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8821 /** @todo testcase: Check that the instruction implicitly clears the high
8822 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8823 * and opcode modifications are made to work with the whole width (not
8824 * just 128). */
8825 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8826 /* Docs says register only. */
8827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8828 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8829 {
8830 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8831 IEM_MC_BEGIN(2, 0);
8832 IEM_MC_ARG(uint64_t *, pDst, 0);
8833 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8834 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8835 IEM_MC_PREPARE_SSE_USAGE();
8836 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8837 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8838 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8839 IEM_MC_ADVANCE_RIP();
8840 IEM_MC_END();
8841 return VINF_SUCCESS;
8842 }
8843 return IEMOP_RAISE_INVALID_OPCODE();
8844}
8845
8846/* Opcode 0xf3 0x0f 0xd7 - invalid */
8847/* Opcode 0xf2 0x0f 0xd7 - invalid */
8848
8849
8850/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8851FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8852/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8853FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8854/* Opcode 0xf3 0x0f 0xd8 - invalid */
8855/* Opcode 0xf2 0x0f 0xd8 - invalid */
8856
8857/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8858FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8859/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8860FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8861/* Opcode 0xf3 0x0f 0xd9 - invalid */
8862/* Opcode 0xf2 0x0f 0xd9 - invalid */
8863
8864/** Opcode 0x0f 0xda - pminub Pq, Qq */
8865FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8866/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8867FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8868/* Opcode 0xf3 0x0f 0xda - invalid */
8869/* Opcode 0xf2 0x0f 0xda - invalid */
8870
8871/** Opcode 0x0f 0xdb - pand Pq, Qq */
8872FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8873/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8874FNIEMOP_STUB(iemOp_pand_Vx_W);
8875/* Opcode 0xf3 0x0f 0xdb - invalid */
8876/* Opcode 0xf2 0x0f 0xdb - invalid */
8877
8878/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8879FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8880/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8881FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8882/* Opcode 0xf3 0x0f 0xdc - invalid */
8883/* Opcode 0xf2 0x0f 0xdc - invalid */
8884
8885/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8886FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8887/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8888FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8889/* Opcode 0xf3 0x0f 0xdd - invalid */
8890/* Opcode 0xf2 0x0f 0xdd - invalid */
8891
8892/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8893FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8894/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8895FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8896/* Opcode 0xf3 0x0f 0xde - invalid */
8897/* Opcode 0xf2 0x0f 0xde - invalid */
8898
8899/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8900FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8901/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8902FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8903/* Opcode 0xf3 0x0f 0xdf - invalid */
8904/* Opcode 0xf2 0x0f 0xdf - invalid */
8905
8906/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8907FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8908/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8909FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8910/* Opcode 0xf3 0x0f 0xe0 - invalid */
8911/* Opcode 0xf2 0x0f 0xe0 - invalid */
8912
8913/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8914FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8915/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8916FNIEMOP_STUB(iemOp_psraw_Vx_W);
8917/* Opcode 0xf3 0x0f 0xe1 - invalid */
8918/* Opcode 0xf2 0x0f 0xe1 - invalid */
8919
8920/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8921FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8922/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8923FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8924/* Opcode 0xf3 0x0f 0xe2 - invalid */
8925/* Opcode 0xf2 0x0f 0xe2 - invalid */
8926
8927/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8928FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8929/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8930FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8931/* Opcode 0xf3 0x0f 0xe3 - invalid */
8932/* Opcode 0xf2 0x0f 0xe3 - invalid */
8933
8934/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8935FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8936/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8937FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8938/* Opcode 0xf3 0x0f 0xe4 - invalid */
8939/* Opcode 0xf2 0x0f 0xe4 - invalid */
8940
8941/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8942FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8943/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8944FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8945/* Opcode 0xf3 0x0f 0xe5 - invalid */
8946/* Opcode 0xf2 0x0f 0xe5 - invalid */
8947
8948/* Opcode 0x0f 0xe6 - invalid */
8949/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8950FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8951/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8952FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8953/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8954FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8955
8956
8957/**
8958 * @opcode 0xe7
8959 * @opcodesub !11 mr/reg
8960 * @oppfx none
8961 * @opcpuid sse
8962 * @opgroup og_sse1_cachect
8963 * @opxcpttype none
8964 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
8965 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8966 */
8967FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8968{
8969 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8971 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8972 {
8973 /* Register, memory. */
8974 IEM_MC_BEGIN(0, 2);
8975 IEM_MC_LOCAL(uint64_t, uSrc);
8976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8977
8978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8980 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8981 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8982
8983 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8984 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8985 IEM_MC_FPU_TO_MMX_MODE();
8986
8987 IEM_MC_ADVANCE_RIP();
8988 IEM_MC_END();
8989 return VINF_SUCCESS;
8990 }
8991 /**
8992 * @opdone
8993 * @opmnemonic ud0fe7reg
8994 * @opcode 0xe7
8995 * @opcodesub 11 mr/reg
8996 * @oppfx none
8997 * @opunused immediate
8998 * @opcpuid sse
8999 * @optest ->
9000 */
9001 return IEMOP_RAISE_INVALID_OPCODE();
9002}
9003
9004/**
9005 * @opcode 0xe7
9006 * @opcodesub !11 mr/reg
9007 * @oppfx 0x66
9008 * @opcpuid sse2
9009 * @opgroup og_sse2_cachect
9010 * @opxcpttype 1
9011 * @optest op1=-1 op2=2 -> op1=2
9012 * @optest op1=0 op2=-42 -> op1=-42
9013 */
9014FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9015{
9016 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9018 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9019 {
9020 /* Register, memory. */
9021 IEM_MC_BEGIN(0, 2);
9022 IEM_MC_LOCAL(RTUINT128U, uSrc);
9023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9024
9025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9027 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9028 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9029
9030 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9031 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9032
9033 IEM_MC_ADVANCE_RIP();
9034 IEM_MC_END();
9035 return VINF_SUCCESS;
9036 }
9037
9038 /**
9039 * @opdone
9040 * @opmnemonic ud660fe7reg
9041 * @opcode 0xe7
9042 * @opcodesub 11 mr/reg
9043 * @oppfx 0x66
9044 * @opunused immediate
9045 * @opcpuid sse
9046 * @optest ->
9047 */
9048 return IEMOP_RAISE_INVALID_OPCODE();
9049}
9050
9051/* Opcode 0xf3 0x0f 0xe7 - invalid */
9052/* Opcode 0xf2 0x0f 0xe7 - invalid */
9053
9054
9055/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9056FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9057/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9058FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9059/* Opcode 0xf3 0x0f 0xe8 - invalid */
9060/* Opcode 0xf2 0x0f 0xe8 - invalid */
9061
9062/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9063FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9064/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9065FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9066/* Opcode 0xf3 0x0f 0xe9 - invalid */
9067/* Opcode 0xf2 0x0f 0xe9 - invalid */
9068
9069/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9070FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9071/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9072FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9073/* Opcode 0xf3 0x0f 0xea - invalid */
9074/* Opcode 0xf2 0x0f 0xea - invalid */
9075
9076/** Opcode 0x0f 0xeb - por Pq, Qq */
9077FNIEMOP_STUB(iemOp_por_Pq_Qq);
9078/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9079FNIEMOP_STUB(iemOp_por_Vx_W);
9080/* Opcode 0xf3 0x0f 0xeb - invalid */
9081/* Opcode 0xf2 0x0f 0xeb - invalid */
9082
9083/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9084FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9085/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9086FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9087/* Opcode 0xf3 0x0f 0xec - invalid */
9088/* Opcode 0xf2 0x0f 0xec - invalid */
9089
9090/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9091FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9092/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9093FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9094/* Opcode 0xf3 0x0f 0xed - invalid */
9095/* Opcode 0xf2 0x0f 0xed - invalid */
9096
9097/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9098FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9099/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9100FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9101/* Opcode 0xf3 0x0f 0xee - invalid */
9102/* Opcode 0xf2 0x0f 0xee - invalid */
9103
9104
9105/** Opcode 0x0f 0xef - pxor Pq, Qq */
9106FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9107{
9108 IEMOP_MNEMONIC(pxor, "pxor");
9109 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9110}
9111
9112/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9113FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9114{
9115 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9116 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9117}
9118
9119/* Opcode 0xf3 0x0f 0xef - invalid */
9120/* Opcode 0xf2 0x0f 0xef - invalid */
9121
9122/* Opcode 0x0f 0xf0 - invalid */
9123/* Opcode 0x66 0x0f 0xf0 - invalid */
9124/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9125FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9126
9127/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9128FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9129/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9130FNIEMOP_STUB(iemOp_psllw_Vx_W);
9131/* Opcode 0xf2 0x0f 0xf1 - invalid */
9132
9133/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9134FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9135/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9136FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9137/* Opcode 0xf2 0x0f 0xf2 - invalid */
9138
9139/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9140FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9141/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9142FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9143/* Opcode 0xf2 0x0f 0xf3 - invalid */
9144
9145/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9146FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9147/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9148FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9149/* Opcode 0xf2 0x0f 0xf4 - invalid */
9150
9151/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9152FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9153/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9154FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9155/* Opcode 0xf2 0x0f 0xf5 - invalid */
9156
9157/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9158FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9159/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9160FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9161/* Opcode 0xf2 0x0f 0xf6 - invalid */
9162
9163/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9164FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9165/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9166FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9167/* Opcode 0xf2 0x0f 0xf7 - invalid */
9168
9169/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9170FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9171/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9172FNIEMOP_STUB(iemOp_psubb_Vx_W);
9173/* Opcode 0xf2 0x0f 0xf8 - invalid */
9174
9175/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9176FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9177/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9178FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9179/* Opcode 0xf2 0x0f 0xf9 - invalid */
9180
9181/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9182FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9183/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9184FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9185/* Opcode 0xf2 0x0f 0xfa - invalid */
9186
9187/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9188FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9189/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9190FNIEMOP_STUB(iemOp_psubq_Vx_W);
9191/* Opcode 0xf2 0x0f 0xfb - invalid */
9192
9193/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9194FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9195/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9196FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9197/* Opcode 0xf2 0x0f 0xfc - invalid */
9198
9199/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9200FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9201/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9202FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9203/* Opcode 0xf2 0x0f 0xfd - invalid */
9204
9205/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9206FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9207/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9208FNIEMOP_STUB(iemOp_paddd_Vx_W);
9209/* Opcode 0xf2 0x0f 0xfe - invalid */
9210
9211
9212/** Opcode **** 0x0f 0xff - UD0 */
9213FNIEMOP_DEF(iemOp_ud0)
9214{
9215 IEMOP_MNEMONIC(ud0, "ud0");
9216 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9217 {
9218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9219#ifndef TST_IEM_CHECK_MC
9220 RTGCPTR GCPtrEff;
9221 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9222 if (rcStrict != VINF_SUCCESS)
9223 return rcStrict;
9224#endif
9225 IEMOP_HLP_DONE_DECODING();
9226 }
9227 return IEMOP_RAISE_INVALID_OPCODE();
9228}
9229
9230
9231
9232/**
9233 * Two byte opcode map, first byte 0x0f.
9234 *
9235 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9236 * check if it needs updating as well when making changes.
9237 */
9238IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9239{
9240 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9241 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9242 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9243 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9244 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9245 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9246 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9247 /* 0x06 */ IEMOP_X4(iemOp_clts),
9248 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9249 /* 0x08 */ IEMOP_X4(iemOp_invd),
9250 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9251 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9252 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9253 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9254 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9255 /* 0x0e */ IEMOP_X4(iemOp_femms),
9256 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9257
9258 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9259 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9260 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9261 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9262 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9263 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9264 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9265 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9266 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9267 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9268 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9269 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9270 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9271 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9272 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9273 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9274
9275 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9276 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9277 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9278 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9279 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9280 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9281 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9282 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9283 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9284 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9285 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9286 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9287 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9288 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9289 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9290 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9291
9292 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9293 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9294 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9295 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9296 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9297 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9298 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9299 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9300 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9301 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9302 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9303 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9304 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9305 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9306 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9307 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9308
9309 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9310 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9311 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9312 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9313 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9314 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9315 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9316 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9317 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9318 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9319 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9320 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9321 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9322 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9323 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9324 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9325
9326 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9327 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9328 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9329 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9330 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9331 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9332 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9333 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9334 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9335 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9336 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9337 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9338 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9339 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9340 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9341 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9342
9343 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9344 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9345 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9346 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9347 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9348 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9349 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9350 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9351 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9352 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9353 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9354 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9355 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9356 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9357 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9358 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9359
9360 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9361 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9362 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9363 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9364 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9365 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9366 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9367 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9368
9369 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9370 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9371 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9372 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9373 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9374 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9375 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9376 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9377
9378 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9379 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9380 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9381 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9382 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9383 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9384 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9385 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9386 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9387 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9388 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9389 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9390 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9391 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9392 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9393 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9394
9395 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9396 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9397 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9398 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9399 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9400 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9401 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9402 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9403 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9404 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9405 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9406 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9407 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9408 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9409 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9410 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9411
9412 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9413 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9414 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9415 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9416 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9417 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9418 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9419 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9420 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9421 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9422 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9423 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9424 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9425 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9426 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9427 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9428
9429 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9430 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9431 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9432 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9433 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9434 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9435 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9436 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9437 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9438 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9439 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9440 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9441 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9442 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9443 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9444 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9445
9446 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9447 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9448 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9449 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9450 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9451 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9452 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9453 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9454 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9455 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9456 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9457 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9458 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9459 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9460 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9461 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9462
9463 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9464 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9465 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9466 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9467 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9468 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9469 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9470 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9471 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9472 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9473 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9474 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9475 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9476 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9477 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9478 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9479
9480 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9481 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9482 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9483 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9484 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9485 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9486 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9487 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9488 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9489 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9490 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9491 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9492 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9493 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9494 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9495 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9496
9497 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9498 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9499 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9500 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9501 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9502 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9503 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9504 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9505 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9506 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9507 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9508 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9509 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9510 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9511 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9512 /* 0xff */ IEMOP_X4(iemOp_ud0),
9513};
9514AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9515
9516/** @} */
9517
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette