VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 73959

最後變更 在這個檔案從73959是 73959,由 vboxsync 提交於 7 年 前

VMM/IEM, HM: Nested VMX: bugref:9180 Use VMXEXITINFO to pass decoder info to IEM. Avoid unnecessarily constructing VM-exit info.
in IEM for VMX instructions.

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 338.1 KB
 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 73959 2018-08-29 15:24:49Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.alldomusa.eu.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
251{
252 IEMOP_BITCH_ABOUT_STUB();
253 return IEMOP_RAISE_INVALID_OPCODE();
254}
255
256
257/** Opcode 0x0f 0x01 /0. */
258FNIEMOP_DEF(iemOp_Grp7_vmresume)
259{
260 IEMOP_BITCH_ABOUT_STUB();
261 return IEMOP_RAISE_INVALID_OPCODE();
262}
263
264
265/** Opcode 0x0f 0x01 /0. */
266#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
267FNIEMOP_DEF(iemOp_Grp7_vmxoff)
268{
269 IEMOP_MNEMONIC(vmxoff, "vmxoff");
270 IEMOP_HLP_DONE_DECODING();
271 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
272}
273#else
274FNIEMOP_DEF(iemOp_Grp7_vmxoff)
275{
276 IEMOP_BITCH_ABOUT_STUB();
277 return IEMOP_RAISE_INVALID_OPCODE();
278}
279#endif
280
281
282/** Opcode 0x0f 0x01 /1. */
283FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
284{
285 IEMOP_MNEMONIC(sidt, "sidt Ms");
286 IEMOP_HLP_MIN_286();
287 IEMOP_HLP_64BIT_OP_SIZE();
288 IEM_MC_BEGIN(2, 1);
289 IEM_MC_ARG(uint8_t, iEffSeg, 0);
290 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
293 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
294 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
295 IEM_MC_END();
296 return VINF_SUCCESS;
297}
298
299
300/** Opcode 0x0f 0x01 /1. */
301FNIEMOP_DEF(iemOp_Grp7_monitor)
302{
303 IEMOP_MNEMONIC(monitor, "monitor");
304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
305 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
306}
307
308
309/** Opcode 0x0f 0x01 /1. */
310FNIEMOP_DEF(iemOp_Grp7_mwait)
311{
312 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
315}
316
317
318/** Opcode 0x0f 0x01 /2. */
319FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
320{
321 IEMOP_MNEMONIC(lgdt, "lgdt");
322 IEMOP_HLP_64BIT_OP_SIZE();
323 IEM_MC_BEGIN(3, 1);
324 IEM_MC_ARG(uint8_t, iEffSeg, 0);
325 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
326 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
329 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
330 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
331 IEM_MC_END();
332 return VINF_SUCCESS;
333}
334
335
336/** Opcode 0x0f 0x01 0xd0. */
337FNIEMOP_DEF(iemOp_Grp7_xgetbv)
338{
339 IEMOP_MNEMONIC(xgetbv, "xgetbv");
340 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
341 {
342 /** @todo r=ramshankar: We should use
343 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
344 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
345 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
346 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
347 }
348 return IEMOP_RAISE_INVALID_OPCODE();
349}
350
351
352/** Opcode 0x0f 0x01 0xd1. */
353FNIEMOP_DEF(iemOp_Grp7_xsetbv)
354{
355 IEMOP_MNEMONIC(xsetbv, "xsetbv");
356 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
357 {
358 /** @todo r=ramshankar: We should use
359 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
360 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
361 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
362 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
363 }
364 return IEMOP_RAISE_INVALID_OPCODE();
365}
366
367
368/** Opcode 0x0f 0x01 /3. */
369FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
370{
371 IEMOP_MNEMONIC(lidt, "lidt");
372 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
373 ? IEMMODE_64BIT
374 : pVCpu->iem.s.enmEffOpSize;
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd8. */
389#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
390FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
391{
392 IEMOP_MNEMONIC(vmrun, "vmrun");
393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
394 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
395}
396#else
397FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
398#endif
399
400/** Opcode 0x0f 0x01 0xd9. */
401FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
402{
403 IEMOP_MNEMONIC(vmmcall, "vmmcall");
404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
405
406 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
407 want all hypercalls regardless of instruction used, and if a
408 hypercall isn't handled by GIM or HMSvm will raise an #UD.
409 (NEM/win makes ASSUMPTIONS about this behavior.) */
410 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
411}
412
413/** Opcode 0x0f 0x01 0xda. */
414#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
415FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
416{
417 IEMOP_MNEMONIC(vmload, "vmload");
418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
419 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
420}
421#else
422FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
423#endif
424
425
426/** Opcode 0x0f 0x01 0xdb. */
427#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
428FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
429{
430 IEMOP_MNEMONIC(vmsave, "vmsave");
431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
432 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
433}
434#else
435FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
436#endif
437
438
439/** Opcode 0x0f 0x01 0xdc. */
440#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
441FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
442{
443 IEMOP_MNEMONIC(stgi, "stgi");
444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
445 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
446}
447#else
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
449#endif
450
451
452/** Opcode 0x0f 0x01 0xdd. */
453#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
454FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
455{
456 IEMOP_MNEMONIC(clgi, "clgi");
457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
459}
460#else
461FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
462#endif
463
464
465/** Opcode 0x0f 0x01 0xdf. */
466#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
467FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
468{
469 IEMOP_MNEMONIC(invlpga, "invlpga");
470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
471 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
472}
473#else
474FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
475#endif
476
477
478/** Opcode 0x0f 0x01 0xde. */
479#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
480FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
481{
482 IEMOP_MNEMONIC(skinit, "skinit");
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
484 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
485}
486#else
487FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
488#endif
489
490
491/** Opcode 0x0f 0x01 /4. */
492FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
493{
494 IEMOP_MNEMONIC(smsw, "smsw");
495 IEMOP_HLP_MIN_286();
496 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
497 {
498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
499 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
500 }
501
502 /* Ignore operand size here, memory refs are always 16-bit. */
503 IEM_MC_BEGIN(2, 0);
504 IEM_MC_ARG(uint16_t, iEffSeg, 0);
505 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
508 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
509 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
510 IEM_MC_END();
511 return VINF_SUCCESS;
512}
513
514
515/** Opcode 0x0f 0x01 /6. */
516FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
517{
518 /* The operand size is effectively ignored, all is 16-bit and only the
519 lower 3-bits are used. */
520 IEMOP_MNEMONIC(lmsw, "lmsw");
521 IEMOP_HLP_MIN_286();
522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
523 {
524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
525 IEM_MC_BEGIN(1, 0);
526 IEM_MC_ARG(uint16_t, u16Tmp, 0);
527 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
528 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
529 IEM_MC_END();
530 }
531 else
532 {
533 IEM_MC_BEGIN(1, 1);
534 IEM_MC_ARG(uint16_t, u16Tmp, 0);
535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
538 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
539 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
540 IEM_MC_END();
541 }
542 return VINF_SUCCESS;
543}
544
545
546/** Opcode 0x0f 0x01 /7. */
547FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
548{
549 IEMOP_MNEMONIC(invlpg, "invlpg");
550 IEMOP_HLP_MIN_486();
551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
555 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
556 IEM_MC_END();
557 return VINF_SUCCESS;
558}
559
560
561/** Opcode 0x0f 0x01 /7. */
562FNIEMOP_DEF(iemOp_Grp7_swapgs)
563{
564 IEMOP_MNEMONIC(swapgs, "swapgs");
565 IEMOP_HLP_ONLY_64BIT();
566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
567 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
568}
569
570
571/** Opcode 0x0f 0x01 /7. */
572FNIEMOP_DEF(iemOp_Grp7_rdtscp)
573{
574 IEMOP_MNEMONIC(rdtscp, "rdtscp");
575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
576 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
577}
578
579
580/**
581 * Group 7 jump table, memory variant.
582 */
583IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
584{
585 iemOp_Grp7_sgdt,
586 iemOp_Grp7_sidt,
587 iemOp_Grp7_lgdt,
588 iemOp_Grp7_lidt,
589 iemOp_Grp7_smsw,
590 iemOp_InvalidWithRM,
591 iemOp_Grp7_lmsw,
592 iemOp_Grp7_invlpg
593};
594
595
596/** Opcode 0x0f 0x01. */
597FNIEMOP_DEF(iemOp_Grp7)
598{
599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
600 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
601 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
602
603 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
604 {
605 case 0:
606 switch (bRm & X86_MODRM_RM_MASK)
607 {
608 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
609 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
610 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
611 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
612 }
613 return IEMOP_RAISE_INVALID_OPCODE();
614
615 case 1:
616 switch (bRm & X86_MODRM_RM_MASK)
617 {
618 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
619 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
620 }
621 return IEMOP_RAISE_INVALID_OPCODE();
622
623 case 2:
624 switch (bRm & X86_MODRM_RM_MASK)
625 {
626 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
627 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
628 }
629 return IEMOP_RAISE_INVALID_OPCODE();
630
631 case 3:
632 switch (bRm & X86_MODRM_RM_MASK)
633 {
634 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
635 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
636 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
637 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
638 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
639 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
640 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
641 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
643 }
644
645 case 4:
646 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
647
648 case 5:
649 return IEMOP_RAISE_INVALID_OPCODE();
650
651 case 6:
652 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
653
654 case 7:
655 switch (bRm & X86_MODRM_RM_MASK)
656 {
657 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
658 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
659 }
660 return IEMOP_RAISE_INVALID_OPCODE();
661
662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
663 }
664}
665
666/** Opcode 0x0f 0x00 /3. */
667FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
668{
669 IEMOP_HLP_NO_REAL_OR_V86_MODE();
670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
671
672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
673 {
674 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
675 switch (pVCpu->iem.s.enmEffOpSize)
676 {
677 case IEMMODE_16BIT:
678 {
679 IEM_MC_BEGIN(3, 0);
680 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
681 IEM_MC_ARG(uint16_t, u16Sel, 1);
682 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
683
684 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
685 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
686 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
687
688 IEM_MC_END();
689 return VINF_SUCCESS;
690 }
691
692 case IEMMODE_32BIT:
693 case IEMMODE_64BIT:
694 {
695 IEM_MC_BEGIN(3, 0);
696 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
697 IEM_MC_ARG(uint16_t, u16Sel, 1);
698 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
699
700 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
701 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
702 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
703
704 IEM_MC_END();
705 return VINF_SUCCESS;
706 }
707
708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
709 }
710 }
711 else
712 {
713 switch (pVCpu->iem.s.enmEffOpSize)
714 {
715 case IEMMODE_16BIT:
716 {
717 IEM_MC_BEGIN(3, 1);
718 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
719 IEM_MC_ARG(uint16_t, u16Sel, 1);
720 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
722
723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
724 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
725
726 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
727 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
728 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
729
730 IEM_MC_END();
731 return VINF_SUCCESS;
732 }
733
734 case IEMMODE_32BIT:
735 case IEMMODE_64BIT:
736 {
737 IEM_MC_BEGIN(3, 1);
738 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
739 IEM_MC_ARG(uint16_t, u16Sel, 1);
740 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
742
743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
744 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
745/** @todo testcase: make sure it's a 16-bit read. */
746
747 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
748 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
749 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
750
751 IEM_MC_END();
752 return VINF_SUCCESS;
753 }
754
755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
756 }
757 }
758}
759
760
761
762/** Opcode 0x0f 0x02. */
763FNIEMOP_DEF(iemOp_lar_Gv_Ew)
764{
765 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
766 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
767}
768
769
770/** Opcode 0x0f 0x03. */
771FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
772{
773 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
774 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
775}
776
777
778/** Opcode 0x0f 0x05. */
779FNIEMOP_DEF(iemOp_syscall)
780{
781 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
783 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
784}
785
786
787/** Opcode 0x0f 0x06. */
788FNIEMOP_DEF(iemOp_clts)
789{
790 IEMOP_MNEMONIC(clts, "clts");
791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
792 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
793}
794
795
796/** Opcode 0x0f 0x07. */
797FNIEMOP_DEF(iemOp_sysret)
798{
799 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
801 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
802}
803
804
805/** Opcode 0x0f 0x08. */
806FNIEMOP_DEF(iemOp_invd)
807{
808 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
809 IEMOP_HLP_MIN_486();
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
812}
813
814
815/** Opcode 0x0f 0x09. */
816FNIEMOP_DEF(iemOp_wbinvd)
817{
818 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
819 IEMOP_HLP_MIN_486();
820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
821 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
822}
823
824
825/** Opcode 0x0f 0x0b. */
826FNIEMOP_DEF(iemOp_ud2)
827{
828 IEMOP_MNEMONIC(ud2, "ud2");
829 return IEMOP_RAISE_INVALID_OPCODE();
830}
831
832/** Opcode 0x0f 0x0d. */
833FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
834{
835 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
836 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
837 {
838 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
839 return IEMOP_RAISE_INVALID_OPCODE();
840 }
841
842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
843 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
844 {
845 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
846 return IEMOP_RAISE_INVALID_OPCODE();
847 }
848
849 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
850 {
851 case 2: /* Aliased to /0 for the time being. */
852 case 4: /* Aliased to /0 for the time being. */
853 case 5: /* Aliased to /0 for the time being. */
854 case 6: /* Aliased to /0 for the time being. */
855 case 7: /* Aliased to /0 for the time being. */
856 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
857 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
858 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
860 }
861
862 IEM_MC_BEGIN(0, 1);
863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
866 /* Currently a NOP. */
867 NOREF(GCPtrEffSrc);
868 IEM_MC_ADVANCE_RIP();
869 IEM_MC_END();
870 return VINF_SUCCESS;
871}
872
873
874/** Opcode 0x0f 0x0e. */
875FNIEMOP_DEF(iemOp_femms)
876{
877 IEMOP_MNEMONIC(femms, "femms");
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879
880 IEM_MC_BEGIN(0,0);
881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
883 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
884 IEM_MC_FPU_FROM_MMX_MODE();
885 IEM_MC_ADVANCE_RIP();
886 IEM_MC_END();
887 return VINF_SUCCESS;
888}
889
890
891/** Opcode 0x0f 0x0f. */
892FNIEMOP_DEF(iemOp_3Dnow)
893{
894 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
895 {
896 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
897 return IEMOP_RAISE_INVALID_OPCODE();
898 }
899
900#ifdef IEM_WITH_3DNOW
901 /* This is pretty sparse, use switch instead of table. */
902 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
903 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
904#else
905 IEMOP_BITCH_ABOUT_STUB();
906 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
907#endif
908}
909
910
911/**
912 * @opcode 0x10
913 * @oppfx none
914 * @opcpuid sse
915 * @opgroup og_sse_simdfp_datamove
916 * @opxcpttype 4UA
917 * @optest op1=1 op2=2 -> op1=2
918 * @optest op1=0 op2=-22 -> op1=-22
919 */
920FNIEMOP_DEF(iemOp_movups_Vps_Wps)
921{
922 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
925 {
926 /*
927 * Register, register.
928 */
929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
930 IEM_MC_BEGIN(0, 0);
931 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
932 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
933 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
934 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
935 IEM_MC_ADVANCE_RIP();
936 IEM_MC_END();
937 }
938 else
939 {
940 /*
941 * Memory, register.
942 */
943 IEM_MC_BEGIN(0, 2);
944 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
946
947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
949 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
950 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
951
952 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
953 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
954
955 IEM_MC_ADVANCE_RIP();
956 IEM_MC_END();
957 }
958 return VINF_SUCCESS;
959
960}
961
962
963/**
964 * @opcode 0x10
965 * @oppfx 0x66
966 * @opcpuid sse2
967 * @opgroup og_sse2_pcksclr_datamove
968 * @opxcpttype 4UA
969 * @optest op1=1 op2=2 -> op1=2
970 * @optest op1=0 op2=-42 -> op1=-42
971 */
972FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
973{
974 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
977 {
978 /*
979 * Register, register.
980 */
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_BEGIN(0, 0);
983 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
984 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
985 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
986 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
987 IEM_MC_ADVANCE_RIP();
988 IEM_MC_END();
989 }
990 else
991 {
992 /*
993 * Memory, register.
994 */
995 IEM_MC_BEGIN(0, 2);
996 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
998
999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1001 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1002 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1003
1004 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1005 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1006
1007 IEM_MC_ADVANCE_RIP();
1008 IEM_MC_END();
1009 }
1010 return VINF_SUCCESS;
1011}
1012
1013
1014/**
1015 * @opcode 0x10
1016 * @oppfx 0xf3
1017 * @opcpuid sse
1018 * @opgroup og_sse_simdfp_datamove
1019 * @opxcpttype 5
1020 * @optest op1=1 op2=2 -> op1=2
1021 * @optest op1=0 op2=-22 -> op1=-22
1022 */
1023FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1024{
1025 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1028 {
1029 /*
1030 * Register, register.
1031 */
1032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1033 IEM_MC_BEGIN(0, 1);
1034 IEM_MC_LOCAL(uint32_t, uSrc);
1035
1036 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1038 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1039 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1040
1041 IEM_MC_ADVANCE_RIP();
1042 IEM_MC_END();
1043 }
1044 else
1045 {
1046 /*
1047 * Memory, register.
1048 */
1049 IEM_MC_BEGIN(0, 2);
1050 IEM_MC_LOCAL(uint32_t, uSrc);
1051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1052
1053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1055 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1056 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1057
1058 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1059 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1060
1061 IEM_MC_ADVANCE_RIP();
1062 IEM_MC_END();
1063 }
1064 return VINF_SUCCESS;
1065}
1066
1067
1068/**
1069 * @opcode 0x10
1070 * @oppfx 0xf2
1071 * @opcpuid sse2
1072 * @opgroup og_sse2_pcksclr_datamove
1073 * @opxcpttype 5
1074 * @optest op1=1 op2=2 -> op1=2
1075 * @optest op1=0 op2=-42 -> op1=-42
1076 */
1077FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1078{
1079 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1082 {
1083 /*
1084 * Register, register.
1085 */
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1087 IEM_MC_BEGIN(0, 1);
1088 IEM_MC_LOCAL(uint64_t, uSrc);
1089
1090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1091 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1092 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1093 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1094
1095 IEM_MC_ADVANCE_RIP();
1096 IEM_MC_END();
1097 }
1098 else
1099 {
1100 /*
1101 * Memory, register.
1102 */
1103 IEM_MC_BEGIN(0, 2);
1104 IEM_MC_LOCAL(uint64_t, uSrc);
1105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1106
1107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1109 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1110 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1111
1112 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1113 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1114
1115 IEM_MC_ADVANCE_RIP();
1116 IEM_MC_END();
1117 }
1118 return VINF_SUCCESS;
1119}
1120
1121
1122/**
1123 * @opcode 0x11
1124 * @oppfx none
1125 * @opcpuid sse
1126 * @opgroup og_sse_simdfp_datamove
1127 * @opxcpttype 4UA
1128 * @optest op1=1 op2=2 -> op1=2
1129 * @optest op1=0 op2=-42 -> op1=-42
1130 */
1131FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1132{
1133 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1136 {
1137 /*
1138 * Register, register.
1139 */
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_BEGIN(0, 0);
1142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1144 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1145 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1146 IEM_MC_ADVANCE_RIP();
1147 IEM_MC_END();
1148 }
1149 else
1150 {
1151 /*
1152 * Memory, register.
1153 */
1154 IEM_MC_BEGIN(0, 2);
1155 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1157
1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1160 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1161 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1162
1163 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1164 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1165
1166 IEM_MC_ADVANCE_RIP();
1167 IEM_MC_END();
1168 }
1169 return VINF_SUCCESS;
1170}
1171
1172
1173/**
1174 * @opcode 0x11
1175 * @oppfx 0x66
1176 * @opcpuid sse2
1177 * @opgroup og_sse2_pcksclr_datamove
1178 * @opxcpttype 4UA
1179 * @optest op1=1 op2=2 -> op1=2
1180 * @optest op1=0 op2=-42 -> op1=-42
1181 */
1182FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1183{
1184 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1187 {
1188 /*
1189 * Register, register.
1190 */
1191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1192 IEM_MC_BEGIN(0, 0);
1193 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1194 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1195 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1196 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1197 IEM_MC_ADVANCE_RIP();
1198 IEM_MC_END();
1199 }
1200 else
1201 {
1202 /*
1203 * Memory, register.
1204 */
1205 IEM_MC_BEGIN(0, 2);
1206 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1208
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1211 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1212 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1213
1214 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1215 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1216
1217 IEM_MC_ADVANCE_RIP();
1218 IEM_MC_END();
1219 }
1220 return VINF_SUCCESS;
1221}
1222
1223
1224/**
1225 * @opcode 0x11
1226 * @oppfx 0xf3
1227 * @opcpuid sse
1228 * @opgroup og_sse_simdfp_datamove
1229 * @opxcpttype 5
1230 * @optest op1=1 op2=2 -> op1=2
1231 * @optest op1=0 op2=-22 -> op1=-22
1232 */
1233FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1234{
1235 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1238 {
1239 /*
1240 * Register, register.
1241 */
1242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1243 IEM_MC_BEGIN(0, 1);
1244 IEM_MC_LOCAL(uint32_t, uSrc);
1245
1246 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1248 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1249 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1250
1251 IEM_MC_ADVANCE_RIP();
1252 IEM_MC_END();
1253 }
1254 else
1255 {
1256 /*
1257 * Memory, register.
1258 */
1259 IEM_MC_BEGIN(0, 2);
1260 IEM_MC_LOCAL(uint32_t, uSrc);
1261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1262
1263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1266 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1267
1268 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1269 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1270
1271 IEM_MC_ADVANCE_RIP();
1272 IEM_MC_END();
1273 }
1274 return VINF_SUCCESS;
1275}
1276
1277
1278/**
1279 * @opcode 0x11
1280 * @oppfx 0xf2
1281 * @opcpuid sse2
1282 * @opgroup og_sse2_pcksclr_datamove
1283 * @opxcpttype 5
1284 * @optest op1=1 op2=2 -> op1=2
1285 * @optest op1=0 op2=-42 -> op1=-42
1286 */
1287FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1288{
1289 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1292 {
1293 /*
1294 * Register, register.
1295 */
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_BEGIN(0, 1);
1298 IEM_MC_LOCAL(uint64_t, uSrc);
1299
1300 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1301 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1302 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1303 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1304
1305 IEM_MC_ADVANCE_RIP();
1306 IEM_MC_END();
1307 }
1308 else
1309 {
1310 /*
1311 * Memory, register.
1312 */
1313 IEM_MC_BEGIN(0, 2);
1314 IEM_MC_LOCAL(uint64_t, uSrc);
1315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1316
1317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1319 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1320 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1321
1322 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1323 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1324
1325 IEM_MC_ADVANCE_RIP();
1326 IEM_MC_END();
1327 }
1328 return VINF_SUCCESS;
1329}
1330
1331
1332FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1333{
1334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1335 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1336 {
1337 /**
1338 * @opcode 0x12
1339 * @opcodesub 11 mr/reg
1340 * @oppfx none
1341 * @opcpuid sse
1342 * @opgroup og_sse_simdfp_datamove
1343 * @opxcpttype 5
1344 * @optest op1=1 op2=2 -> op1=2
1345 * @optest op1=0 op2=-42 -> op1=-42
1346 */
1347 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1348
1349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1350 IEM_MC_BEGIN(0, 1);
1351 IEM_MC_LOCAL(uint64_t, uSrc);
1352
1353 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1354 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1355 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1356 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1357
1358 IEM_MC_ADVANCE_RIP();
1359 IEM_MC_END();
1360 }
1361 else
1362 {
1363 /**
1364 * @opdone
1365 * @opcode 0x12
1366 * @opcodesub !11 mr/reg
1367 * @oppfx none
1368 * @opcpuid sse
1369 * @opgroup og_sse_simdfp_datamove
1370 * @opxcpttype 5
1371 * @optest op1=1 op2=2 -> op1=2
1372 * @optest op1=0 op2=-42 -> op1=-42
1373 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1374 */
1375 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1376
1377 IEM_MC_BEGIN(0, 2);
1378 IEM_MC_LOCAL(uint64_t, uSrc);
1379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1380
1381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1385
1386 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1387 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1388
1389 IEM_MC_ADVANCE_RIP();
1390 IEM_MC_END();
1391 }
1392 return VINF_SUCCESS;
1393}
1394
1395
1396/**
1397 * @opcode 0x12
1398 * @opcodesub !11 mr/reg
1399 * @oppfx 0x66
1400 * @opcpuid sse2
1401 * @opgroup og_sse2_pcksclr_datamove
1402 * @opxcpttype 5
1403 * @optest op1=1 op2=2 -> op1=2
1404 * @optest op1=0 op2=-42 -> op1=-42
1405 */
1406FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1407{
1408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1409 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1410 {
1411 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1412
1413 IEM_MC_BEGIN(0, 2);
1414 IEM_MC_LOCAL(uint64_t, uSrc);
1415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1416
1417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1419 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1420 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1421
1422 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1423 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1424
1425 IEM_MC_ADVANCE_RIP();
1426 IEM_MC_END();
1427 return VINF_SUCCESS;
1428 }
1429
1430 /**
1431 * @opdone
1432 * @opmnemonic ud660f12m3
1433 * @opcode 0x12
1434 * @opcodesub 11 mr/reg
1435 * @oppfx 0x66
1436 * @opunused immediate
1437 * @opcpuid sse
1438 * @optest ->
1439 */
1440 return IEMOP_RAISE_INVALID_OPCODE();
1441}
1442
1443
1444/**
1445 * @opcode 0x12
1446 * @oppfx 0xf3
1447 * @opcpuid sse3
1448 * @opgroup og_sse3_pcksclr_datamove
1449 * @opxcpttype 4
1450 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1451 * op1=0x00000002000000020000000100000001
1452 */
1453FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1454{
1455 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1458 {
1459 /*
1460 * Register, register.
1461 */
1462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1463 IEM_MC_BEGIN(2, 0);
1464 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1465 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1466
1467 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1468 IEM_MC_PREPARE_SSE_USAGE();
1469
1470 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1471 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1472 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1473
1474 IEM_MC_ADVANCE_RIP();
1475 IEM_MC_END();
1476 }
1477 else
1478 {
1479 /*
1480 * Register, memory.
1481 */
1482 IEM_MC_BEGIN(2, 2);
1483 IEM_MC_LOCAL(RTUINT128U, uSrc);
1484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1485 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1487
1488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1490 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1491 IEM_MC_PREPARE_SSE_USAGE();
1492
1493 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1494 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1495 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1496
1497 IEM_MC_ADVANCE_RIP();
1498 IEM_MC_END();
1499 }
1500 return VINF_SUCCESS;
1501}
1502
1503
1504/**
1505 * @opcode 0x12
1506 * @oppfx 0xf2
1507 * @opcpuid sse3
1508 * @opgroup og_sse3_pcksclr_datamove
1509 * @opxcpttype 5
1510 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1511 * op1=0x22222222111111112222222211111111
1512 */
1513FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1514{
1515 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1518 {
1519 /*
1520 * Register, register.
1521 */
1522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1523 IEM_MC_BEGIN(2, 0);
1524 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1525 IEM_MC_ARG(uint64_t, uSrc, 1);
1526
1527 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1528 IEM_MC_PREPARE_SSE_USAGE();
1529
1530 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1531 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1532 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1533
1534 IEM_MC_ADVANCE_RIP();
1535 IEM_MC_END();
1536 }
1537 else
1538 {
1539 /*
1540 * Register, memory.
1541 */
1542 IEM_MC_BEGIN(2, 2);
1543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1545 IEM_MC_ARG(uint64_t, uSrc, 1);
1546
1547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1549 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1550 IEM_MC_PREPARE_SSE_USAGE();
1551
1552 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1553 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1554 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1555
1556 IEM_MC_ADVANCE_RIP();
1557 IEM_MC_END();
1558 }
1559 return VINF_SUCCESS;
1560}
1561
1562
1563/**
1564 * @opcode 0x13
1565 * @opcodesub !11 mr/reg
1566 * @oppfx none
1567 * @opcpuid sse
1568 * @opgroup og_sse_simdfp_datamove
1569 * @opxcpttype 5
1570 * @optest op1=1 op2=2 -> op1=2
1571 * @optest op1=0 op2=-42 -> op1=-42
1572 */
1573FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1574{
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1579
1580 IEM_MC_BEGIN(0, 2);
1581 IEM_MC_LOCAL(uint64_t, uSrc);
1582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1583
1584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1586 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1587 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1588
1589 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1590 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1591
1592 IEM_MC_ADVANCE_RIP();
1593 IEM_MC_END();
1594 return VINF_SUCCESS;
1595 }
1596
1597 /**
1598 * @opdone
1599 * @opmnemonic ud0f13m3
1600 * @opcode 0x13
1601 * @opcodesub 11 mr/reg
1602 * @oppfx none
1603 * @opunused immediate
1604 * @opcpuid sse
1605 * @optest ->
1606 */
1607 return IEMOP_RAISE_INVALID_OPCODE();
1608}
1609
1610
1611/**
1612 * @opcode 0x13
1613 * @opcodesub !11 mr/reg
1614 * @oppfx 0x66
1615 * @opcpuid sse2
1616 * @opgroup og_sse2_pcksclr_datamove
1617 * @opxcpttype 5
1618 * @optest op1=1 op2=2 -> op1=2
1619 * @optest op1=0 op2=-42 -> op1=-42
1620 */
1621FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1622{
1623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1624 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1625 {
1626 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1627 IEM_MC_BEGIN(0, 2);
1628 IEM_MC_LOCAL(uint64_t, uSrc);
1629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1630
1631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1633 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1635
1636 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1637 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1638
1639 IEM_MC_ADVANCE_RIP();
1640 IEM_MC_END();
1641 return VINF_SUCCESS;
1642 }
1643
1644 /**
1645 * @opdone
1646 * @opmnemonic ud660f13m3
1647 * @opcode 0x13
1648 * @opcodesub 11 mr/reg
1649 * @oppfx 0x66
1650 * @opunused immediate
1651 * @opcpuid sse
1652 * @optest ->
1653 */
1654 return IEMOP_RAISE_INVALID_OPCODE();
1655}
1656
1657
1658/**
1659 * @opmnemonic udf30f13
1660 * @opcode 0x13
1661 * @oppfx 0xf3
1662 * @opunused intel-modrm
1663 * @opcpuid sse
1664 * @optest ->
1665 * @opdone
1666 */
1667
1668/**
1669 * @opmnemonic udf20f13
1670 * @opcode 0x13
1671 * @oppfx 0xf2
1672 * @opunused intel-modrm
1673 * @opcpuid sse
1674 * @optest ->
1675 * @opdone
1676 */
1677
1678/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1679FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1680/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1681FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1682
1683/**
1684 * @opdone
1685 * @opmnemonic udf30f14
1686 * @opcode 0x14
1687 * @oppfx 0xf3
1688 * @opunused intel-modrm
1689 * @opcpuid sse
1690 * @optest ->
1691 * @opdone
1692 */
1693
1694/**
1695 * @opmnemonic udf20f14
1696 * @opcode 0x14
1697 * @oppfx 0xf2
1698 * @opunused intel-modrm
1699 * @opcpuid sse
1700 * @optest ->
1701 * @opdone
1702 */
1703
1704/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1705FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1706/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1707FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1708/* Opcode 0xf3 0x0f 0x15 - invalid */
1709/* Opcode 0xf2 0x0f 0x15 - invalid */
1710
1711/**
1712 * @opdone
1713 * @opmnemonic udf30f15
1714 * @opcode 0x15
1715 * @oppfx 0xf3
1716 * @opunused intel-modrm
1717 * @opcpuid sse
1718 * @optest ->
1719 * @opdone
1720 */
1721
1722/**
1723 * @opmnemonic udf20f15
1724 * @opcode 0x15
1725 * @oppfx 0xf2
1726 * @opunused intel-modrm
1727 * @opcpuid sse
1728 * @optest ->
1729 * @opdone
1730 */
1731
1732FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1733{
1734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1736 {
1737 /**
1738 * @opcode 0x16
1739 * @opcodesub 11 mr/reg
1740 * @oppfx none
1741 * @opcpuid sse
1742 * @opgroup og_sse_simdfp_datamove
1743 * @opxcpttype 5
1744 * @optest op1=1 op2=2 -> op1=2
1745 * @optest op1=0 op2=-42 -> op1=-42
1746 */
1747 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1748
1749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1750 IEM_MC_BEGIN(0, 1);
1751 IEM_MC_LOCAL(uint64_t, uSrc);
1752
1753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1754 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1755 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1756 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1757
1758 IEM_MC_ADVANCE_RIP();
1759 IEM_MC_END();
1760 }
1761 else
1762 {
1763 /**
1764 * @opdone
1765 * @opcode 0x16
1766 * @opcodesub !11 mr/reg
1767 * @oppfx none
1768 * @opcpuid sse
1769 * @opgroup og_sse_simdfp_datamove
1770 * @opxcpttype 5
1771 * @optest op1=1 op2=2 -> op1=2
1772 * @optest op1=0 op2=-42 -> op1=-42
1773 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1774 */
1775 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1776
1777 IEM_MC_BEGIN(0, 2);
1778 IEM_MC_LOCAL(uint64_t, uSrc);
1779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1780
1781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1783 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1784 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1785
1786 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1787 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1788
1789 IEM_MC_ADVANCE_RIP();
1790 IEM_MC_END();
1791 }
1792 return VINF_SUCCESS;
1793}
1794
1795
1796/**
1797 * @opcode 0x16
1798 * @opcodesub !11 mr/reg
1799 * @oppfx 0x66
1800 * @opcpuid sse2
1801 * @opgroup og_sse2_pcksclr_datamove
1802 * @opxcpttype 5
1803 * @optest op1=1 op2=2 -> op1=2
1804 * @optest op1=0 op2=-42 -> op1=-42
1805 */
1806FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1807{
1808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1809 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1810 {
1811 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1812 IEM_MC_BEGIN(0, 2);
1813 IEM_MC_LOCAL(uint64_t, uSrc);
1814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1815
1816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1818 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1819 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1820
1821 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1822 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1823
1824 IEM_MC_ADVANCE_RIP();
1825 IEM_MC_END();
1826 return VINF_SUCCESS;
1827 }
1828
1829 /**
1830 * @opdone
1831 * @opmnemonic ud660f16m3
1832 * @opcode 0x16
1833 * @opcodesub 11 mr/reg
1834 * @oppfx 0x66
1835 * @opunused immediate
1836 * @opcpuid sse
1837 * @optest ->
1838 */
1839 return IEMOP_RAISE_INVALID_OPCODE();
1840}
1841
1842
1843/**
1844 * @opcode 0x16
1845 * @oppfx 0xf3
1846 * @opcpuid sse3
1847 * @opgroup og_sse3_pcksclr_datamove
1848 * @opxcpttype 4
1849 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1850 * op1=0x00000002000000020000000100000001
1851 */
1852FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1853{
1854 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1857 {
1858 /*
1859 * Register, register.
1860 */
1861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1862 IEM_MC_BEGIN(2, 0);
1863 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1864 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1865
1866 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1867 IEM_MC_PREPARE_SSE_USAGE();
1868
1869 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1870 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1871 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1872
1873 IEM_MC_ADVANCE_RIP();
1874 IEM_MC_END();
1875 }
1876 else
1877 {
1878 /*
1879 * Register, memory.
1880 */
1881 IEM_MC_BEGIN(2, 2);
1882 IEM_MC_LOCAL(RTUINT128U, uSrc);
1883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1884 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1885 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1886
1887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1889 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1890 IEM_MC_PREPARE_SSE_USAGE();
1891
1892 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1893 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1894 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1895
1896 IEM_MC_ADVANCE_RIP();
1897 IEM_MC_END();
1898 }
1899 return VINF_SUCCESS;
1900}
1901
1902/**
1903 * @opdone
1904 * @opmnemonic udf30f16
1905 * @opcode 0x16
1906 * @oppfx 0xf2
1907 * @opunused intel-modrm
1908 * @opcpuid sse
1909 * @optest ->
1910 * @opdone
1911 */
1912
1913
1914/**
1915 * @opcode 0x17
1916 * @opcodesub !11 mr/reg
1917 * @oppfx none
1918 * @opcpuid sse
1919 * @opgroup og_sse_simdfp_datamove
1920 * @opxcpttype 5
1921 * @optest op1=1 op2=2 -> op1=2
1922 * @optest op1=0 op2=-42 -> op1=-42
1923 */
1924FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1925{
1926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1927 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1928 {
1929 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1930
1931 IEM_MC_BEGIN(0, 2);
1932 IEM_MC_LOCAL(uint64_t, uSrc);
1933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1934
1935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1937 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1938 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1939
1940 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1941 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1942
1943 IEM_MC_ADVANCE_RIP();
1944 IEM_MC_END();
1945 return VINF_SUCCESS;
1946 }
1947
1948 /**
1949 * @opdone
1950 * @opmnemonic ud0f17m3
1951 * @opcode 0x17
1952 * @opcodesub 11 mr/reg
1953 * @oppfx none
1954 * @opunused immediate
1955 * @opcpuid sse
1956 * @optest ->
1957 */
1958 return IEMOP_RAISE_INVALID_OPCODE();
1959}
1960
1961
1962/**
1963 * @opcode 0x17
1964 * @opcodesub !11 mr/reg
1965 * @oppfx 0x66
1966 * @opcpuid sse2
1967 * @opgroup og_sse2_pcksclr_datamove
1968 * @opxcpttype 5
1969 * @optest op1=1 op2=2 -> op1=2
1970 * @optest op1=0 op2=-42 -> op1=-42
1971 */
1972FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1973{
1974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1975 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1976 {
1977 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1978
1979 IEM_MC_BEGIN(0, 2);
1980 IEM_MC_LOCAL(uint64_t, uSrc);
1981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1982
1983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1985 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1986 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1987
1988 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1989 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1990
1991 IEM_MC_ADVANCE_RIP();
1992 IEM_MC_END();
1993 return VINF_SUCCESS;
1994 }
1995
1996 /**
1997 * @opdone
1998 * @opmnemonic ud660f17m3
1999 * @opcode 0x17
2000 * @opcodesub 11 mr/reg
2001 * @oppfx 0x66
2002 * @opunused immediate
2003 * @opcpuid sse
2004 * @optest ->
2005 */
2006 return IEMOP_RAISE_INVALID_OPCODE();
2007}
2008
2009
2010/**
2011 * @opdone
2012 * @opmnemonic udf30f17
2013 * @opcode 0x17
2014 * @oppfx 0xf3
2015 * @opunused intel-modrm
2016 * @opcpuid sse
2017 * @optest ->
2018 * @opdone
2019 */
2020
2021/**
2022 * @opmnemonic udf20f17
2023 * @opcode 0x17
2024 * @oppfx 0xf2
2025 * @opunused intel-modrm
2026 * @opcpuid sse
2027 * @optest ->
2028 * @opdone
2029 */
2030
2031
2032/** Opcode 0x0f 0x18. */
2033FNIEMOP_DEF(iemOp_prefetch_Grp16)
2034{
2035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2036 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2037 {
2038 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2039 {
2040 case 4: /* Aliased to /0 for the time being according to AMD. */
2041 case 5: /* Aliased to /0 for the time being according to AMD. */
2042 case 6: /* Aliased to /0 for the time being according to AMD. */
2043 case 7: /* Aliased to /0 for the time being according to AMD. */
2044 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2045 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2046 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2047 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2049 }
2050
2051 IEM_MC_BEGIN(0, 1);
2052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2055 /* Currently a NOP. */
2056 NOREF(GCPtrEffSrc);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 return VINF_SUCCESS;
2060 }
2061
2062 return IEMOP_RAISE_INVALID_OPCODE();
2063}
2064
2065
2066/** Opcode 0x0f 0x19..0x1f. */
2067FNIEMOP_DEF(iemOp_nop_Ev)
2068{
2069 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2072 {
2073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2074 IEM_MC_BEGIN(0, 0);
2075 IEM_MC_ADVANCE_RIP();
2076 IEM_MC_END();
2077 }
2078 else
2079 {
2080 IEM_MC_BEGIN(0, 1);
2081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2084 /* Currently a NOP. */
2085 NOREF(GCPtrEffSrc);
2086 IEM_MC_ADVANCE_RIP();
2087 IEM_MC_END();
2088 }
2089 return VINF_SUCCESS;
2090}
2091
2092
2093/** Opcode 0x0f 0x20. */
2094FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2095{
2096 /* mod is ignored, as is operand size overrides. */
2097 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2098 IEMOP_HLP_MIN_386();
2099 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2100 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2101 else
2102 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2103
2104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2105 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2106 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2107 {
2108 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2109 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2110 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2111 iCrReg |= 8;
2112 }
2113 switch (iCrReg)
2114 {
2115 case 0: case 2: case 3: case 4: case 8:
2116 break;
2117 default:
2118 return IEMOP_RAISE_INVALID_OPCODE();
2119 }
2120 IEMOP_HLP_DONE_DECODING();
2121
2122 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2123}
2124
2125
2126/** Opcode 0x0f 0x21. */
2127FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2128{
2129 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2130 IEMOP_HLP_MIN_386();
2131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2133 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2134 return IEMOP_RAISE_INVALID_OPCODE();
2135 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2136 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2137 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2138}
2139
2140
2141/** Opcode 0x0f 0x22. */
2142FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2143{
2144 /* mod is ignored, as is operand size overrides. */
2145 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2146 IEMOP_HLP_MIN_386();
2147 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2148 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2149 else
2150 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2151
2152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2153 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2154 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2155 {
2156 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2157 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2158 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2159 iCrReg |= 8;
2160 }
2161 switch (iCrReg)
2162 {
2163 case 0: case 2: case 3: case 4: case 8:
2164 break;
2165 default:
2166 return IEMOP_RAISE_INVALID_OPCODE();
2167 }
2168 IEMOP_HLP_DONE_DECODING();
2169
2170 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2171}
2172
2173
2174/** Opcode 0x0f 0x23. */
2175FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2176{
2177 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2178 IEMOP_HLP_MIN_386();
2179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2181 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2182 return IEMOP_RAISE_INVALID_OPCODE();
2183 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2184 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2185 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2186}
2187
2188
2189/** Opcode 0x0f 0x24. */
2190FNIEMOP_DEF(iemOp_mov_Rd_Td)
2191{
2192 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2193 /** @todo works on 386 and 486. */
2194 /* The RM byte is not considered, see testcase. */
2195 return IEMOP_RAISE_INVALID_OPCODE();
2196}
2197
2198
2199/** Opcode 0x0f 0x26. */
2200FNIEMOP_DEF(iemOp_mov_Td_Rd)
2201{
2202 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2203 /** @todo works on 386 and 486. */
2204 /* The RM byte is not considered, see testcase. */
2205 return IEMOP_RAISE_INVALID_OPCODE();
2206}
2207
2208
2209/**
2210 * @opcode 0x28
2211 * @oppfx none
2212 * @opcpuid sse
2213 * @opgroup og_sse_simdfp_datamove
2214 * @opxcpttype 1
2215 * @optest op1=1 op2=2 -> op1=2
2216 * @optest op1=0 op2=-42 -> op1=-42
2217 */
2218FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2219{
2220 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2222 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2223 {
2224 /*
2225 * Register, register.
2226 */
2227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2228 IEM_MC_BEGIN(0, 0);
2229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2230 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2231 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2232 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2233 IEM_MC_ADVANCE_RIP();
2234 IEM_MC_END();
2235 }
2236 else
2237 {
2238 /*
2239 * Register, memory.
2240 */
2241 IEM_MC_BEGIN(0, 2);
2242 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2244
2245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2247 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2248 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2249
2250 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2251 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2252
2253 IEM_MC_ADVANCE_RIP();
2254 IEM_MC_END();
2255 }
2256 return VINF_SUCCESS;
2257}
2258
2259/**
2260 * @opcode 0x28
2261 * @oppfx 66
2262 * @opcpuid sse2
2263 * @opgroup og_sse2_pcksclr_datamove
2264 * @opxcpttype 1
2265 * @optest op1=1 op2=2 -> op1=2
2266 * @optest op1=0 op2=-42 -> op1=-42
2267 */
2268FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2269{
2270 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2273 {
2274 /*
2275 * Register, register.
2276 */
2277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2278 IEM_MC_BEGIN(0, 0);
2279 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2280 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2281 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2282 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2283 IEM_MC_ADVANCE_RIP();
2284 IEM_MC_END();
2285 }
2286 else
2287 {
2288 /*
2289 * Register, memory.
2290 */
2291 IEM_MC_BEGIN(0, 2);
2292 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2294
2295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2297 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2298 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2299
2300 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2301 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2302
2303 IEM_MC_ADVANCE_RIP();
2304 IEM_MC_END();
2305 }
2306 return VINF_SUCCESS;
2307}
2308
2309/* Opcode 0xf3 0x0f 0x28 - invalid */
2310/* Opcode 0xf2 0x0f 0x28 - invalid */
2311
2312/**
2313 * @opcode 0x29
2314 * @oppfx none
2315 * @opcpuid sse
2316 * @opgroup og_sse_simdfp_datamove
2317 * @opxcpttype 1
2318 * @optest op1=1 op2=2 -> op1=2
2319 * @optest op1=0 op2=-42 -> op1=-42
2320 */
2321FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2322{
2323 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2326 {
2327 /*
2328 * Register, register.
2329 */
2330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2331 IEM_MC_BEGIN(0, 0);
2332 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2333 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2334 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2335 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2336 IEM_MC_ADVANCE_RIP();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 /*
2342 * Memory, register.
2343 */
2344 IEM_MC_BEGIN(0, 2);
2345 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2347
2348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2351 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2352
2353 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2354 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2355
2356 IEM_MC_ADVANCE_RIP();
2357 IEM_MC_END();
2358 }
2359 return VINF_SUCCESS;
2360}
2361
2362/**
2363 * @opcode 0x29
2364 * @oppfx 66
2365 * @opcpuid sse2
2366 * @opgroup og_sse2_pcksclr_datamove
2367 * @opxcpttype 1
2368 * @optest op1=1 op2=2 -> op1=2
2369 * @optest op1=0 op2=-42 -> op1=-42
2370 */
2371FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2372{
2373 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2376 {
2377 /*
2378 * Register, register.
2379 */
2380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2381 IEM_MC_BEGIN(0, 0);
2382 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2384 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2385 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2386 IEM_MC_ADVANCE_RIP();
2387 IEM_MC_END();
2388 }
2389 else
2390 {
2391 /*
2392 * Memory, register.
2393 */
2394 IEM_MC_BEGIN(0, 2);
2395 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2397
2398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2402
2403 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2404 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2405
2406 IEM_MC_ADVANCE_RIP();
2407 IEM_MC_END();
2408 }
2409 return VINF_SUCCESS;
2410}
2411
2412/* Opcode 0xf3 0x0f 0x29 - invalid */
2413/* Opcode 0xf2 0x0f 0x29 - invalid */
2414
2415
2416/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2417FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2418/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2419FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2420/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2421FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2422/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2423FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2424
2425
2426/**
2427 * @opcode 0x2b
2428 * @opcodesub !11 mr/reg
2429 * @oppfx none
2430 * @opcpuid sse
2431 * @opgroup og_sse1_cachect
2432 * @opxcpttype 1
2433 * @optest op1=1 op2=2 -> op1=2
2434 * @optest op1=0 op2=-42 -> op1=-42
2435 */
2436FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2437{
2438 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2440 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2441 {
2442 /*
2443 * memory, register.
2444 */
2445 IEM_MC_BEGIN(0, 2);
2446 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2448
2449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2451 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2453
2454 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2455 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2456
2457 IEM_MC_ADVANCE_RIP();
2458 IEM_MC_END();
2459 }
2460 /* The register, register encoding is invalid. */
2461 else
2462 return IEMOP_RAISE_INVALID_OPCODE();
2463 return VINF_SUCCESS;
2464}
2465
2466/**
2467 * @opcode 0x2b
2468 * @opcodesub !11 mr/reg
2469 * @oppfx 0x66
2470 * @opcpuid sse2
2471 * @opgroup og_sse2_cachect
2472 * @opxcpttype 1
2473 * @optest op1=1 op2=2 -> op1=2
2474 * @optest op1=0 op2=-42 -> op1=-42
2475 */
2476FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2477{
2478 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2480 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2481 {
2482 /*
2483 * memory, register.
2484 */
2485 IEM_MC_BEGIN(0, 2);
2486 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2488
2489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2491 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2493
2494 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2495 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2496
2497 IEM_MC_ADVANCE_RIP();
2498 IEM_MC_END();
2499 }
2500 /* The register, register encoding is invalid. */
2501 else
2502 return IEMOP_RAISE_INVALID_OPCODE();
2503 return VINF_SUCCESS;
2504}
2505/* Opcode 0xf3 0x0f 0x2b - invalid */
2506/* Opcode 0xf2 0x0f 0x2b - invalid */
2507
2508
2509/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2510FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2511/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2512FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2513/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2514FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2515/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2516FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2517
2518/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2519FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2520/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2521FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2522/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2523FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2524/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2525FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2526
2527/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2528FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2529/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2530FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2531/* Opcode 0xf3 0x0f 0x2e - invalid */
2532/* Opcode 0xf2 0x0f 0x2e - invalid */
2533
2534/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2535FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2536/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2537FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2538/* Opcode 0xf3 0x0f 0x2f - invalid */
2539/* Opcode 0xf2 0x0f 0x2f - invalid */
2540
2541/** Opcode 0x0f 0x30. */
2542FNIEMOP_DEF(iemOp_wrmsr)
2543{
2544 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2546 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2547}
2548
2549
2550/** Opcode 0x0f 0x31. */
2551FNIEMOP_DEF(iemOp_rdtsc)
2552{
2553 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2556}
2557
2558
2559/** Opcode 0x0f 0x33. */
2560FNIEMOP_DEF(iemOp_rdmsr)
2561{
2562 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2564 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2565}
2566
2567
2568/** Opcode 0x0f 0x34. */
2569FNIEMOP_DEF(iemOp_rdpmc)
2570{
2571 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2574}
2575
2576
2577/** Opcode 0x0f 0x34. */
2578FNIEMOP_STUB(iemOp_sysenter);
2579/** Opcode 0x0f 0x35. */
2580FNIEMOP_STUB(iemOp_sysexit);
2581/** Opcode 0x0f 0x37. */
2582FNIEMOP_STUB(iemOp_getsec);
2583
2584
2585/** Opcode 0x0f 0x38. */
2586FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2587{
2588#ifdef IEM_WITH_THREE_0F_38
2589 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2590 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2591#else
2592 IEMOP_BITCH_ABOUT_STUB();
2593 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2594#endif
2595}
2596
2597
2598/** Opcode 0x0f 0x3a. */
2599FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2600{
2601#ifdef IEM_WITH_THREE_0F_3A
2602 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2603 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2604#else
2605 IEMOP_BITCH_ABOUT_STUB();
2606 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2607#endif
2608}
2609
2610
2611/**
2612 * Implements a conditional move.
2613 *
2614 * Wish there was an obvious way to do this where we could share and reduce
2615 * code bloat.
2616 *
2617 * @param a_Cnd The conditional "microcode" operation.
2618 */
2619#define CMOV_X(a_Cnd) \
2620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2622 { \
2623 switch (pVCpu->iem.s.enmEffOpSize) \
2624 { \
2625 case IEMMODE_16BIT: \
2626 IEM_MC_BEGIN(0, 1); \
2627 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2628 a_Cnd { \
2629 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2630 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2631 } IEM_MC_ENDIF(); \
2632 IEM_MC_ADVANCE_RIP(); \
2633 IEM_MC_END(); \
2634 return VINF_SUCCESS; \
2635 \
2636 case IEMMODE_32BIT: \
2637 IEM_MC_BEGIN(0, 1); \
2638 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2639 a_Cnd { \
2640 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2641 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2642 } IEM_MC_ELSE() { \
2643 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2644 } IEM_MC_ENDIF(); \
2645 IEM_MC_ADVANCE_RIP(); \
2646 IEM_MC_END(); \
2647 return VINF_SUCCESS; \
2648 \
2649 case IEMMODE_64BIT: \
2650 IEM_MC_BEGIN(0, 1); \
2651 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2652 a_Cnd { \
2653 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2654 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2655 } IEM_MC_ENDIF(); \
2656 IEM_MC_ADVANCE_RIP(); \
2657 IEM_MC_END(); \
2658 return VINF_SUCCESS; \
2659 \
2660 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2661 } \
2662 } \
2663 else \
2664 { \
2665 switch (pVCpu->iem.s.enmEffOpSize) \
2666 { \
2667 case IEMMODE_16BIT: \
2668 IEM_MC_BEGIN(0, 2); \
2669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2670 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2672 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2673 a_Cnd { \
2674 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2675 } IEM_MC_ENDIF(); \
2676 IEM_MC_ADVANCE_RIP(); \
2677 IEM_MC_END(); \
2678 return VINF_SUCCESS; \
2679 \
2680 case IEMMODE_32BIT: \
2681 IEM_MC_BEGIN(0, 2); \
2682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2683 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2685 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2686 a_Cnd { \
2687 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2688 } IEM_MC_ELSE() { \
2689 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2690 } IEM_MC_ENDIF(); \
2691 IEM_MC_ADVANCE_RIP(); \
2692 IEM_MC_END(); \
2693 return VINF_SUCCESS; \
2694 \
2695 case IEMMODE_64BIT: \
2696 IEM_MC_BEGIN(0, 2); \
2697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2698 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2700 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2701 a_Cnd { \
2702 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2703 } IEM_MC_ENDIF(); \
2704 IEM_MC_ADVANCE_RIP(); \
2705 IEM_MC_END(); \
2706 return VINF_SUCCESS; \
2707 \
2708 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2709 } \
2710 } do {} while (0)
2711
2712
2713
2714/** Opcode 0x0f 0x40. */
2715FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2716{
2717 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2718 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2719}
2720
2721
2722/** Opcode 0x0f 0x41. */
2723FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2724{
2725 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2726 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2727}
2728
2729
2730/** Opcode 0x0f 0x42. */
2731FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2732{
2733 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2734 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2735}
2736
2737
2738/** Opcode 0x0f 0x43. */
2739FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2740{
2741 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2742 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2743}
2744
2745
2746/** Opcode 0x0f 0x44. */
2747FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2748{
2749 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2750 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2751}
2752
2753
2754/** Opcode 0x0f 0x45. */
2755FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2756{
2757 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2758 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2759}
2760
2761
2762/** Opcode 0x0f 0x46. */
2763FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2764{
2765 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2766 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2767}
2768
2769
2770/** Opcode 0x0f 0x47. */
2771FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2772{
2773 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2774 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2775}
2776
2777
2778/** Opcode 0x0f 0x48. */
2779FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2780{
2781 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2782 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2783}
2784
2785
2786/** Opcode 0x0f 0x49. */
2787FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2788{
2789 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2790 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2791}
2792
2793
2794/** Opcode 0x0f 0x4a. */
2795FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2796{
2797 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2798 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2799}
2800
2801
2802/** Opcode 0x0f 0x4b. */
2803FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2804{
2805 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2806 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2807}
2808
2809
2810/** Opcode 0x0f 0x4c. */
2811FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2812{
2813 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2814 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2815}
2816
2817
2818/** Opcode 0x0f 0x4d. */
2819FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2820{
2821 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2822 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2823}
2824
2825
2826/** Opcode 0x0f 0x4e. */
2827FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2828{
2829 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2830 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2831}
2832
2833
2834/** Opcode 0x0f 0x4f. */
2835FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2836{
2837 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2838 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2839}
2840
2841#undef CMOV_X
2842
2843/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2844FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2845/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2846FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2847/* Opcode 0xf3 0x0f 0x50 - invalid */
2848/* Opcode 0xf2 0x0f 0x50 - invalid */
2849
2850/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2851FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2852/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2853FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2854/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2855FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2856/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2857FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2858
2859/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2860FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2861/* Opcode 0x66 0x0f 0x52 - invalid */
2862/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2863FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2864/* Opcode 0xf2 0x0f 0x52 - invalid */
2865
2866/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2867FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2868/* Opcode 0x66 0x0f 0x53 - invalid */
2869/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2870FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2871/* Opcode 0xf2 0x0f 0x53 - invalid */
2872
2873/** Opcode 0x0f 0x54 - andps Vps, Wps */
2874FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2875/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2876FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2877/* Opcode 0xf3 0x0f 0x54 - invalid */
2878/* Opcode 0xf2 0x0f 0x54 - invalid */
2879
2880/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2881FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2882/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2883FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2884/* Opcode 0xf3 0x0f 0x55 - invalid */
2885/* Opcode 0xf2 0x0f 0x55 - invalid */
2886
2887/** Opcode 0x0f 0x56 - orps Vps, Wps */
2888FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2889/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2890FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2891/* Opcode 0xf3 0x0f 0x56 - invalid */
2892/* Opcode 0xf2 0x0f 0x56 - invalid */
2893
2894/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2895FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2896/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2897FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2898/* Opcode 0xf3 0x0f 0x57 - invalid */
2899/* Opcode 0xf2 0x0f 0x57 - invalid */
2900
2901/** Opcode 0x0f 0x58 - addps Vps, Wps */
2902FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2903/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2904FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2905/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2906FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2907/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2908FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2909
2910/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2911FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2912/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2913FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2914/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2915FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2916/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2917FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2918
2919/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2920FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2921/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2922FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2923/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2924FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2925/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2926FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2927
2928/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2929FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2930/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2931FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2932/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2933FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2934/* Opcode 0xf2 0x0f 0x5b - invalid */
2935
2936/** Opcode 0x0f 0x5c - subps Vps, Wps */
2937FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2938/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2939FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2940/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2941FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2942/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2943FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2944
2945/** Opcode 0x0f 0x5d - minps Vps, Wps */
2946FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2947/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2948FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2949/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2950FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2951/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2952FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2953
2954/** Opcode 0x0f 0x5e - divps Vps, Wps */
2955FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2958/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2959FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2960/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2961FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2962
2963/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2964FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2965/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2966FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2967/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2968FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2969/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2970FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2971
2972/**
2973 * Common worker for MMX instructions on the forms:
2974 * pxxxx mm1, mm2/mem32
2975 *
2976 * The 2nd operand is the first half of a register, which in the memory case
2977 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2978 * memory accessed for MMX.
2979 *
2980 * Exceptions type 4.
2981 */
2982FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2983{
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2986 {
2987 /*
2988 * Register, register.
2989 */
2990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2991 IEM_MC_BEGIN(2, 0);
2992 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2993 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2994 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2995 IEM_MC_PREPARE_SSE_USAGE();
2996 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2997 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2998 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2999 IEM_MC_ADVANCE_RIP();
3000 IEM_MC_END();
3001 }
3002 else
3003 {
3004 /*
3005 * Register, memory.
3006 */
3007 IEM_MC_BEGIN(2, 2);
3008 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3009 IEM_MC_LOCAL(uint64_t, uSrc);
3010 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3012
3013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3015 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3016 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3017
3018 IEM_MC_PREPARE_SSE_USAGE();
3019 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3020 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3021
3022 IEM_MC_ADVANCE_RIP();
3023 IEM_MC_END();
3024 }
3025 return VINF_SUCCESS;
3026}
3027
3028
3029/**
3030 * Common worker for SSE2 instructions on the forms:
3031 * pxxxx xmm1, xmm2/mem128
3032 *
3033 * The 2nd operand is the first half of a register, which in the memory case
3034 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3035 * memory accessed for MMX.
3036 *
3037 * Exceptions type 4.
3038 */
3039FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3040{
3041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3042 if (!pImpl->pfnU64)
3043 return IEMOP_RAISE_INVALID_OPCODE();
3044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3045 {
3046 /*
3047 * Register, register.
3048 */
3049 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3050 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3052 IEM_MC_BEGIN(2, 0);
3053 IEM_MC_ARG(uint64_t *, pDst, 0);
3054 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3055 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3056 IEM_MC_PREPARE_FPU_USAGE();
3057 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3058 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3059 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3060 IEM_MC_ADVANCE_RIP();
3061 IEM_MC_END();
3062 }
3063 else
3064 {
3065 /*
3066 * Register, memory.
3067 */
3068 IEM_MC_BEGIN(2, 2);
3069 IEM_MC_ARG(uint64_t *, pDst, 0);
3070 IEM_MC_LOCAL(uint32_t, uSrc);
3071 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3073
3074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3077 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3078
3079 IEM_MC_PREPARE_FPU_USAGE();
3080 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3081 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3082
3083 IEM_MC_ADVANCE_RIP();
3084 IEM_MC_END();
3085 }
3086 return VINF_SUCCESS;
3087}
3088
3089
3090/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3091FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3092{
3093 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3094 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3095}
3096
3097/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3098FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3099{
3100 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3101 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3102}
3103
3104/* Opcode 0xf3 0x0f 0x60 - invalid */
3105
3106
3107/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3108FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3109{
3110 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3111 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3112}
3113
3114/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3115FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3116{
3117 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3118 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3119}
3120
3121/* Opcode 0xf3 0x0f 0x61 - invalid */
3122
3123
3124/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3125FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3126{
3127 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3128 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3129}
3130
3131/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3132FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3133{
3134 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3135 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3136}
3137
3138/* Opcode 0xf3 0x0f 0x62 - invalid */
3139
3140
3141
3142/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3143FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3144/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3145FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3146/* Opcode 0xf3 0x0f 0x63 - invalid */
3147
3148/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3149FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3150/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3151FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3152/* Opcode 0xf3 0x0f 0x64 - invalid */
3153
3154/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3155FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3156/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3157FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3158/* Opcode 0xf3 0x0f 0x65 - invalid */
3159
3160/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3161FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3162/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3163FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3164/* Opcode 0xf3 0x0f 0x66 - invalid */
3165
3166/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3167FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3168/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3169FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3170/* Opcode 0xf3 0x0f 0x67 - invalid */
3171
3172
3173/**
3174 * Common worker for MMX instructions on the form:
3175 * pxxxx mm1, mm2/mem64
3176 *
3177 * The 2nd operand is the second half of a register, which in the memory case
3178 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3179 * where it may read the full 128 bits or only the upper 64 bits.
3180 *
3181 * Exceptions type 4.
3182 */
3183FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3184{
3185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3186 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3188 {
3189 /*
3190 * Register, register.
3191 */
3192 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3193 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_BEGIN(2, 0);
3196 IEM_MC_ARG(uint64_t *, pDst, 0);
3197 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3198 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3199 IEM_MC_PREPARE_FPU_USAGE();
3200 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3201 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3202 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3203 IEM_MC_ADVANCE_RIP();
3204 IEM_MC_END();
3205 }
3206 else
3207 {
3208 /*
3209 * Register, memory.
3210 */
3211 IEM_MC_BEGIN(2, 2);
3212 IEM_MC_ARG(uint64_t *, pDst, 0);
3213 IEM_MC_LOCAL(uint64_t, uSrc);
3214 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3216
3217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3219 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3220 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3221
3222 IEM_MC_PREPARE_FPU_USAGE();
3223 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3224 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3225
3226 IEM_MC_ADVANCE_RIP();
3227 IEM_MC_END();
3228 }
3229 return VINF_SUCCESS;
3230}
3231
3232
3233/**
3234 * Common worker for SSE2 instructions on the form:
3235 * pxxxx xmm1, xmm2/mem128
3236 *
3237 * The 2nd operand is the second half of a register, which in the memory case
3238 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3239 * where it may read the full 128 bits or only the upper 64 bits.
3240 *
3241 * Exceptions type 4.
3242 */
3243FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3244{
3245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3247 {
3248 /*
3249 * Register, register.
3250 */
3251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3252 IEM_MC_BEGIN(2, 0);
3253 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3254 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3255 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3256 IEM_MC_PREPARE_SSE_USAGE();
3257 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3258 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3259 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3260 IEM_MC_ADVANCE_RIP();
3261 IEM_MC_END();
3262 }
3263 else
3264 {
3265 /*
3266 * Register, memory.
3267 */
3268 IEM_MC_BEGIN(2, 2);
3269 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3270 IEM_MC_LOCAL(RTUINT128U, uSrc);
3271 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3273
3274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3277 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3278
3279 IEM_MC_PREPARE_SSE_USAGE();
3280 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3281 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3282
3283 IEM_MC_ADVANCE_RIP();
3284 IEM_MC_END();
3285 }
3286 return VINF_SUCCESS;
3287}
3288
3289
3290/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3291FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3292{
3293 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3294 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3295}
3296
3297/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3298FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3299{
3300 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3301 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3302}
3303/* Opcode 0xf3 0x0f 0x68 - invalid */
3304
3305
3306/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3307FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3308{
3309 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3310 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3311}
3312
3313/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3314FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3315{
3316 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3317 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3318
3319}
3320/* Opcode 0xf3 0x0f 0x69 - invalid */
3321
3322
3323/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3324FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3325{
3326 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3327 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3328}
3329
3330/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3331FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3332{
3333 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3334 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3335}
3336/* Opcode 0xf3 0x0f 0x6a - invalid */
3337
3338
3339/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3340FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3341/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3342FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3343/* Opcode 0xf3 0x0f 0x6b - invalid */
3344
3345
3346/* Opcode 0x0f 0x6c - invalid */
3347
3348/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3349FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3350{
3351 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3352 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3353}
3354
3355/* Opcode 0xf3 0x0f 0x6c - invalid */
3356/* Opcode 0xf2 0x0f 0x6c - invalid */
3357
3358
3359/* Opcode 0x0f 0x6d - invalid */
3360
3361/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3362FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3363{
3364 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3365 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3366}
3367
3368/* Opcode 0xf3 0x0f 0x6d - invalid */
3369
3370
3371FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3372{
3373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3374 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3375 {
3376 /**
3377 * @opcode 0x6e
3378 * @opcodesub rex.w=1
3379 * @oppfx none
3380 * @opcpuid mmx
3381 * @opgroup og_mmx_datamove
3382 * @opxcpttype 5
3383 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3384 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3385 */
3386 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3388 {
3389 /* MMX, greg64 */
3390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3391 IEM_MC_BEGIN(0, 1);
3392 IEM_MC_LOCAL(uint64_t, u64Tmp);
3393
3394 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3395 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3396
3397 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3398 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3399 IEM_MC_FPU_TO_MMX_MODE();
3400
3401 IEM_MC_ADVANCE_RIP();
3402 IEM_MC_END();
3403 }
3404 else
3405 {
3406 /* MMX, [mem64] */
3407 IEM_MC_BEGIN(0, 2);
3408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3409 IEM_MC_LOCAL(uint64_t, u64Tmp);
3410
3411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3413 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3414 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3415
3416 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3417 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3418 IEM_MC_FPU_TO_MMX_MODE();
3419
3420 IEM_MC_ADVANCE_RIP();
3421 IEM_MC_END();
3422 }
3423 }
3424 else
3425 {
3426 /**
3427 * @opdone
3428 * @opcode 0x6e
3429 * @opcodesub rex.w=0
3430 * @oppfx none
3431 * @opcpuid mmx
3432 * @opgroup og_mmx_datamove
3433 * @opxcpttype 5
3434 * @opfunction iemOp_movd_q_Pd_Ey
3435 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3436 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3437 */
3438 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3440 {
3441 /* MMX, greg */
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 IEM_MC_BEGIN(0, 1);
3444 IEM_MC_LOCAL(uint64_t, u64Tmp);
3445
3446 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3448
3449 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3450 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3451 IEM_MC_FPU_TO_MMX_MODE();
3452
3453 IEM_MC_ADVANCE_RIP();
3454 IEM_MC_END();
3455 }
3456 else
3457 {
3458 /* MMX, [mem] */
3459 IEM_MC_BEGIN(0, 2);
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3461 IEM_MC_LOCAL(uint32_t, u32Tmp);
3462
3463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3465 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3467
3468 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3469 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3470 IEM_MC_FPU_TO_MMX_MODE();
3471
3472 IEM_MC_ADVANCE_RIP();
3473 IEM_MC_END();
3474 }
3475 }
3476 return VINF_SUCCESS;
3477}
3478
3479FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3480{
3481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3482 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3483 {
3484 /**
3485 * @opcode 0x6e
3486 * @opcodesub rex.w=1
3487 * @oppfx 0x66
3488 * @opcpuid sse2
3489 * @opgroup og_sse2_simdint_datamove
3490 * @opxcpttype 5
3491 * @optest 64-bit / op1=1 op2=2 -> op1=2
3492 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3493 */
3494 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3495 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3496 {
3497 /* XMM, greg64 */
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3499 IEM_MC_BEGIN(0, 1);
3500 IEM_MC_LOCAL(uint64_t, u64Tmp);
3501
3502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3503 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3504
3505 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3506 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3507
3508 IEM_MC_ADVANCE_RIP();
3509 IEM_MC_END();
3510 }
3511 else
3512 {
3513 /* XMM, [mem64] */
3514 IEM_MC_BEGIN(0, 2);
3515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3516 IEM_MC_LOCAL(uint64_t, u64Tmp);
3517
3518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3520 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3522
3523 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3524 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3525
3526 IEM_MC_ADVANCE_RIP();
3527 IEM_MC_END();
3528 }
3529 }
3530 else
3531 {
3532 /**
3533 * @opdone
3534 * @opcode 0x6e
3535 * @opcodesub rex.w=0
3536 * @oppfx 0x66
3537 * @opcpuid sse2
3538 * @opgroup og_sse2_simdint_datamove
3539 * @opxcpttype 5
3540 * @opfunction iemOp_movd_q_Vy_Ey
3541 * @optest op1=1 op2=2 -> op1=2
3542 * @optest op1=0 op2=-42 -> op1=-42
3543 */
3544 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3546 {
3547 /* XMM, greg32 */
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549 IEM_MC_BEGIN(0, 1);
3550 IEM_MC_LOCAL(uint32_t, u32Tmp);
3551
3552 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3554
3555 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3556 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3557
3558 IEM_MC_ADVANCE_RIP();
3559 IEM_MC_END();
3560 }
3561 else
3562 {
3563 /* XMM, [mem32] */
3564 IEM_MC_BEGIN(0, 2);
3565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3566 IEM_MC_LOCAL(uint32_t, u32Tmp);
3567
3568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3571 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3572
3573 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3574 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3575
3576 IEM_MC_ADVANCE_RIP();
3577 IEM_MC_END();
3578 }
3579 }
3580 return VINF_SUCCESS;
3581}
3582
3583/* Opcode 0xf3 0x0f 0x6e - invalid */
3584
3585
3586/**
3587 * @opcode 0x6f
3588 * @oppfx none
3589 * @opcpuid mmx
3590 * @opgroup og_mmx_datamove
3591 * @opxcpttype 5
3592 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3593 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3594 */
3595FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3596{
3597 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3600 {
3601 /*
3602 * Register, register.
3603 */
3604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3605 IEM_MC_BEGIN(0, 1);
3606 IEM_MC_LOCAL(uint64_t, u64Tmp);
3607
3608 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3609 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3610
3611 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3612 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3613 IEM_MC_FPU_TO_MMX_MODE();
3614
3615 IEM_MC_ADVANCE_RIP();
3616 IEM_MC_END();
3617 }
3618 else
3619 {
3620 /*
3621 * Register, memory.
3622 */
3623 IEM_MC_BEGIN(0, 2);
3624 IEM_MC_LOCAL(uint64_t, u64Tmp);
3625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3626
3627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3630 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3631
3632 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3633 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3634 IEM_MC_FPU_TO_MMX_MODE();
3635
3636 IEM_MC_ADVANCE_RIP();
3637 IEM_MC_END();
3638 }
3639 return VINF_SUCCESS;
3640}
3641
3642/**
3643 * @opcode 0x6f
3644 * @oppfx 0x66
3645 * @opcpuid sse2
3646 * @opgroup og_sse2_simdint_datamove
3647 * @opxcpttype 1
3648 * @optest op1=1 op2=2 -> op1=2
3649 * @optest op1=0 op2=-42 -> op1=-42
3650 */
3651FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3652{
3653 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3656 {
3657 /*
3658 * Register, register.
3659 */
3660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3661 IEM_MC_BEGIN(0, 0);
3662
3663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3664 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3665
3666 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3667 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3668 IEM_MC_ADVANCE_RIP();
3669 IEM_MC_END();
3670 }
3671 else
3672 {
3673 /*
3674 * Register, memory.
3675 */
3676 IEM_MC_BEGIN(0, 2);
3677 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3679
3680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3682 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3683 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3684
3685 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3686 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3687
3688 IEM_MC_ADVANCE_RIP();
3689 IEM_MC_END();
3690 }
3691 return VINF_SUCCESS;
3692}
3693
3694/**
3695 * @opcode 0x6f
3696 * @oppfx 0xf3
3697 * @opcpuid sse2
3698 * @opgroup og_sse2_simdint_datamove
3699 * @opxcpttype 4UA
3700 * @optest op1=1 op2=2 -> op1=2
3701 * @optest op1=0 op2=-42 -> op1=-42
3702 */
3703FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3704{
3705 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3708 {
3709 /*
3710 * Register, register.
3711 */
3712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3713 IEM_MC_BEGIN(0, 0);
3714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3716 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3717 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 }
3721 else
3722 {
3723 /*
3724 * Register, memory.
3725 */
3726 IEM_MC_BEGIN(0, 2);
3727 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3729
3730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3732 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3733 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3734 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3735 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3736
3737 IEM_MC_ADVANCE_RIP();
3738 IEM_MC_END();
3739 }
3740 return VINF_SUCCESS;
3741}
3742
3743
3744/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3745FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3746{
3747 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3749 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3750 {
3751 /*
3752 * Register, register.
3753 */
3754 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756
3757 IEM_MC_BEGIN(3, 0);
3758 IEM_MC_ARG(uint64_t *, pDst, 0);
3759 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3760 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3761 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3762 IEM_MC_PREPARE_FPU_USAGE();
3763 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3764 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3765 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3766 IEM_MC_ADVANCE_RIP();
3767 IEM_MC_END();
3768 }
3769 else
3770 {
3771 /*
3772 * Register, memory.
3773 */
3774 IEM_MC_BEGIN(3, 2);
3775 IEM_MC_ARG(uint64_t *, pDst, 0);
3776 IEM_MC_LOCAL(uint64_t, uSrc);
3777 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3779
3780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3781 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3782 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3784 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3785
3786 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3787 IEM_MC_PREPARE_FPU_USAGE();
3788 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3789 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3790
3791 IEM_MC_ADVANCE_RIP();
3792 IEM_MC_END();
3793 }
3794 return VINF_SUCCESS;
3795}
3796
3797/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3798FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3799{
3800 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3803 {
3804 /*
3805 * Register, register.
3806 */
3807 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(3, 0);
3811 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3812 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3813 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3815 IEM_MC_PREPARE_SSE_USAGE();
3816 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3817 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3818 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3819 IEM_MC_ADVANCE_RIP();
3820 IEM_MC_END();
3821 }
3822 else
3823 {
3824 /*
3825 * Register, memory.
3826 */
3827 IEM_MC_BEGIN(3, 2);
3828 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3829 IEM_MC_LOCAL(RTUINT128U, uSrc);
3830 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3832
3833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3834 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3835 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3837 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3838
3839 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3840 IEM_MC_PREPARE_SSE_USAGE();
3841 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3842 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3843
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 }
3847 return VINF_SUCCESS;
3848}
3849
3850/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3851FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3852{
3853 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3856 {
3857 /*
3858 * Register, register.
3859 */
3860 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862
3863 IEM_MC_BEGIN(3, 0);
3864 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3865 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3866 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3867 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3868 IEM_MC_PREPARE_SSE_USAGE();
3869 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3870 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3871 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3872 IEM_MC_ADVANCE_RIP();
3873 IEM_MC_END();
3874 }
3875 else
3876 {
3877 /*
3878 * Register, memory.
3879 */
3880 IEM_MC_BEGIN(3, 2);
3881 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3882 IEM_MC_LOCAL(RTUINT128U, uSrc);
3883 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3885
3886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3887 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3888 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3891
3892 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3893 IEM_MC_PREPARE_SSE_USAGE();
3894 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3895 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3896
3897 IEM_MC_ADVANCE_RIP();
3898 IEM_MC_END();
3899 }
3900 return VINF_SUCCESS;
3901}
3902
3903/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3904FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3905{
3906 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3909 {
3910 /*
3911 * Register, register.
3912 */
3913 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915
3916 IEM_MC_BEGIN(3, 0);
3917 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3918 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3919 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3920 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3921 IEM_MC_PREPARE_SSE_USAGE();
3922 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3923 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3924 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3925 IEM_MC_ADVANCE_RIP();
3926 IEM_MC_END();
3927 }
3928 else
3929 {
3930 /*
3931 * Register, memory.
3932 */
3933 IEM_MC_BEGIN(3, 2);
3934 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3935 IEM_MC_LOCAL(RTUINT128U, uSrc);
3936 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3938
3939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3940 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3941 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3943 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3944
3945 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3946 IEM_MC_PREPARE_SSE_USAGE();
3947 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3948 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3949
3950 IEM_MC_ADVANCE_RIP();
3951 IEM_MC_END();
3952 }
3953 return VINF_SUCCESS;
3954}
3955
3956
3957/** Opcode 0x0f 0x71 11/2. */
3958FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3959
3960/** Opcode 0x66 0x0f 0x71 11/2. */
3961FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3962
3963/** Opcode 0x0f 0x71 11/4. */
3964FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3965
3966/** Opcode 0x66 0x0f 0x71 11/4. */
3967FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3968
3969/** Opcode 0x0f 0x71 11/6. */
3970FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3971
3972/** Opcode 0x66 0x0f 0x71 11/6. */
3973FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3974
3975
3976/**
3977 * Group 12 jump table for register variant.
3978 */
3979IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3980{
3981 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3982 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3983 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3984 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3985 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3986 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3987 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3988 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3989};
3990AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3991
3992
3993/** Opcode 0x0f 0x71. */
3994FNIEMOP_DEF(iemOp_Grp12)
3995{
3996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3998 /* register, register */
3999 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4000 + pVCpu->iem.s.idxPrefix], bRm);
4001 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4002}
4003
4004
4005/** Opcode 0x0f 0x72 11/2. */
4006FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4007
4008/** Opcode 0x66 0x0f 0x72 11/2. */
4009FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4010
4011/** Opcode 0x0f 0x72 11/4. */
4012FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4013
4014/** Opcode 0x66 0x0f 0x72 11/4. */
4015FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4016
4017/** Opcode 0x0f 0x72 11/6. */
4018FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4019
4020/** Opcode 0x66 0x0f 0x72 11/6. */
4021FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4022
4023
4024/**
4025 * Group 13 jump table for register variant.
4026 */
4027IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4028{
4029 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4030 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4031 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4032 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4033 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4034 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4035 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4036 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4037};
4038AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4039
4040/** Opcode 0x0f 0x72. */
4041FNIEMOP_DEF(iemOp_Grp13)
4042{
4043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4045 /* register, register */
4046 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4047 + pVCpu->iem.s.idxPrefix], bRm);
4048 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4049}
4050
4051
4052/** Opcode 0x0f 0x73 11/2. */
4053FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4054
4055/** Opcode 0x66 0x0f 0x73 11/2. */
4056FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4057
4058/** Opcode 0x66 0x0f 0x73 11/3. */
4059FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4060
4061/** Opcode 0x0f 0x73 11/6. */
4062FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4063
4064/** Opcode 0x66 0x0f 0x73 11/6. */
4065FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4066
4067/** Opcode 0x66 0x0f 0x73 11/7. */
4068FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4069
4070/**
4071 * Group 14 jump table for register variant.
4072 */
4073IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4074{
4075 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4076 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4077 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4078 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4079 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4080 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4081 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4082 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4083};
4084AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4085
4086
4087/** Opcode 0x0f 0x73. */
4088FNIEMOP_DEF(iemOp_Grp14)
4089{
4090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4092 /* register, register */
4093 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4094 + pVCpu->iem.s.idxPrefix], bRm);
4095 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4096}
4097
4098
4099/**
4100 * Common worker for MMX instructions on the form:
4101 * pxxx mm1, mm2/mem64
4102 */
4103FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4104{
4105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4107 {
4108 /*
4109 * Register, register.
4110 */
4111 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4112 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114 IEM_MC_BEGIN(2, 0);
4115 IEM_MC_ARG(uint64_t *, pDst, 0);
4116 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4117 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4118 IEM_MC_PREPARE_FPU_USAGE();
4119 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4120 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4121 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4122 IEM_MC_ADVANCE_RIP();
4123 IEM_MC_END();
4124 }
4125 else
4126 {
4127 /*
4128 * Register, memory.
4129 */
4130 IEM_MC_BEGIN(2, 2);
4131 IEM_MC_ARG(uint64_t *, pDst, 0);
4132 IEM_MC_LOCAL(uint64_t, uSrc);
4133 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4135
4136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4138 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4139 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4140
4141 IEM_MC_PREPARE_FPU_USAGE();
4142 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4143 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4144
4145 IEM_MC_ADVANCE_RIP();
4146 IEM_MC_END();
4147 }
4148 return VINF_SUCCESS;
4149}
4150
4151
4152/**
4153 * Common worker for SSE2 instructions on the forms:
4154 * pxxx xmm1, xmm2/mem128
4155 *
4156 * Proper alignment of the 128-bit operand is enforced.
4157 * Exceptions type 4. SSE2 cpuid checks.
4158 */
4159FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4160{
4161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4163 {
4164 /*
4165 * Register, register.
4166 */
4167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4168 IEM_MC_BEGIN(2, 0);
4169 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4170 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4172 IEM_MC_PREPARE_SSE_USAGE();
4173 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4174 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4175 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4176 IEM_MC_ADVANCE_RIP();
4177 IEM_MC_END();
4178 }
4179 else
4180 {
4181 /*
4182 * Register, memory.
4183 */
4184 IEM_MC_BEGIN(2, 2);
4185 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4186 IEM_MC_LOCAL(RTUINT128U, uSrc);
4187 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4189
4190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4192 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4193 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4194
4195 IEM_MC_PREPARE_SSE_USAGE();
4196 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4197 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4198
4199 IEM_MC_ADVANCE_RIP();
4200 IEM_MC_END();
4201 }
4202 return VINF_SUCCESS;
4203}
4204
4205
4206/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4207FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4208{
4209 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4210 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4211}
4212
4213/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4214FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4215{
4216 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4217 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4218}
4219
4220/* Opcode 0xf3 0x0f 0x74 - invalid */
4221/* Opcode 0xf2 0x0f 0x74 - invalid */
4222
4223
4224/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4225FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4226{
4227 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4228 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4229}
4230
4231/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4232FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4233{
4234 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4235 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4236}
4237
4238/* Opcode 0xf3 0x0f 0x75 - invalid */
4239/* Opcode 0xf2 0x0f 0x75 - invalid */
4240
4241
4242/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4243FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4244{
4245 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4246 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4247}
4248
4249/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4250FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4251{
4252 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4253 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4254}
4255
4256/* Opcode 0xf3 0x0f 0x76 - invalid */
4257/* Opcode 0xf2 0x0f 0x76 - invalid */
4258
4259
4260/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4261FNIEMOP_DEF(iemOp_emms)
4262{
4263 IEMOP_MNEMONIC(emms, "emms");
4264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4265
4266 IEM_MC_BEGIN(0,0);
4267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4268 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4269 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4270 IEM_MC_FPU_FROM_MMX_MODE();
4271 IEM_MC_ADVANCE_RIP();
4272 IEM_MC_END();
4273 return VINF_SUCCESS;
4274}
4275
4276/* Opcode 0x66 0x0f 0x77 - invalid */
4277/* Opcode 0xf3 0x0f 0x77 - invalid */
4278/* Opcode 0xf2 0x0f 0x77 - invalid */
4279
4280/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4281FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4282/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4283FNIEMOP_STUB(iemOp_AmdGrp17);
4284/* Opcode 0xf3 0x0f 0x78 - invalid */
4285/* Opcode 0xf2 0x0f 0x78 - invalid */
4286
4287/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4288#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4289FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4290{
4291 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4292 IEMOP_HLP_IN_VMX_OPERATION();
4293 IEMOP_HLP_VMX_INSTR();
4294 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4295
4296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4298 {
4299 /*
4300 * Register, register.
4301 */
4302 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4303 if (enmEffOpSize == IEMMODE_64BIT)
4304 {
4305 IEM_MC_BEGIN(2, 0);
4306 IEM_MC_ARG(uint64_t, u64Val, 0);
4307 IEM_MC_ARG(uint64_t, u64Enc, 1);
4308 IEM_MC_FETCH_GREG_U64(u64Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4309 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4310 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4311 IEM_MC_END();
4312 }
4313 else
4314 {
4315 IEM_MC_BEGIN(2, 0);
4316 IEM_MC_ARG(uint32_t, u32Val, 0);
4317 IEM_MC_ARG(uint32_t, u32Enc, 1);
4318 IEM_MC_FETCH_GREG_U32(u32Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4319 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4320 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4321 IEM_MC_END();
4322 }
4323 }
4324 else
4325 {
4326 /*
4327 * Register, memory.
4328 */
4329 if (enmEffOpSize == IEMMODE_64BIT)
4330 {
4331 IEM_MC_BEGIN(4, 0);
4332 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4333 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4334 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4335 IEM_MC_ARG(uint64_t, u64Enc, 3);
4336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4337 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4338 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4339 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4340 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4341 IEM_MC_END();
4342 }
4343 else
4344 {
4345 IEM_MC_BEGIN(4, 0);
4346 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4347 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4348 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4349 IEM_MC_ARG(uint32_t, u32Enc, 3);
4350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4351 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4352 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4353 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4354 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4355 IEM_MC_END();
4356 }
4357 }
4358 return VINF_SUCCESS;
4359}
4360#else
4361FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4362#endif
4363/* Opcode 0x66 0x0f 0x79 - invalid */
4364/* Opcode 0xf3 0x0f 0x79 - invalid */
4365/* Opcode 0xf2 0x0f 0x79 - invalid */
4366
4367/* Opcode 0x0f 0x7a - invalid */
4368/* Opcode 0x66 0x0f 0x7a - invalid */
4369/* Opcode 0xf3 0x0f 0x7a - invalid */
4370/* Opcode 0xf2 0x0f 0x7a - invalid */
4371
4372/* Opcode 0x0f 0x7b - invalid */
4373/* Opcode 0x66 0x0f 0x7b - invalid */
4374/* Opcode 0xf3 0x0f 0x7b - invalid */
4375/* Opcode 0xf2 0x0f 0x7b - invalid */
4376
4377/* Opcode 0x0f 0x7c - invalid */
4378/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4379FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4380/* Opcode 0xf3 0x0f 0x7c - invalid */
4381/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4382FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4383
4384/* Opcode 0x0f 0x7d - invalid */
4385/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4386FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4387/* Opcode 0xf3 0x0f 0x7d - invalid */
4388/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4389FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4390
4391
4392/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4393FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4394{
4395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4396 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4397 {
4398 /**
4399 * @opcode 0x7e
4400 * @opcodesub rex.w=1
4401 * @oppfx none
4402 * @opcpuid mmx
4403 * @opgroup og_mmx_datamove
4404 * @opxcpttype 5
4405 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4406 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4407 */
4408 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4410 {
4411 /* greg64, MMX */
4412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4413 IEM_MC_BEGIN(0, 1);
4414 IEM_MC_LOCAL(uint64_t, u64Tmp);
4415
4416 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4417 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4418
4419 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4420 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4421 IEM_MC_FPU_TO_MMX_MODE();
4422
4423 IEM_MC_ADVANCE_RIP();
4424 IEM_MC_END();
4425 }
4426 else
4427 {
4428 /* [mem64], MMX */
4429 IEM_MC_BEGIN(0, 2);
4430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4431 IEM_MC_LOCAL(uint64_t, u64Tmp);
4432
4433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4435 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4436 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4437
4438 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4439 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4440 IEM_MC_FPU_TO_MMX_MODE();
4441
4442 IEM_MC_ADVANCE_RIP();
4443 IEM_MC_END();
4444 }
4445 }
4446 else
4447 {
4448 /**
4449 * @opdone
4450 * @opcode 0x7e
4451 * @opcodesub rex.w=0
4452 * @oppfx none
4453 * @opcpuid mmx
4454 * @opgroup og_mmx_datamove
4455 * @opxcpttype 5
4456 * @opfunction iemOp_movd_q_Pd_Ey
4457 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4458 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4459 */
4460 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4462 {
4463 /* greg32, MMX */
4464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4465 IEM_MC_BEGIN(0, 1);
4466 IEM_MC_LOCAL(uint32_t, u32Tmp);
4467
4468 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4469 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4470
4471 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4472 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4473 IEM_MC_FPU_TO_MMX_MODE();
4474
4475 IEM_MC_ADVANCE_RIP();
4476 IEM_MC_END();
4477 }
4478 else
4479 {
4480 /* [mem32], MMX */
4481 IEM_MC_BEGIN(0, 2);
4482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4483 IEM_MC_LOCAL(uint32_t, u32Tmp);
4484
4485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4487 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4488 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4489
4490 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4491 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4492 IEM_MC_FPU_TO_MMX_MODE();
4493
4494 IEM_MC_ADVANCE_RIP();
4495 IEM_MC_END();
4496 }
4497 }
4498 return VINF_SUCCESS;
4499
4500}
4501
4502
4503FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4504{
4505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4506 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4507 {
4508 /**
4509 * @opcode 0x7e
4510 * @opcodesub rex.w=1
4511 * @oppfx 0x66
4512 * @opcpuid sse2
4513 * @opgroup og_sse2_simdint_datamove
4514 * @opxcpttype 5
4515 * @optest 64-bit / op1=1 op2=2 -> op1=2
4516 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4517 */
4518 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4520 {
4521 /* greg64, XMM */
4522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4523 IEM_MC_BEGIN(0, 1);
4524 IEM_MC_LOCAL(uint64_t, u64Tmp);
4525
4526 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4528
4529 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4530 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4531
4532 IEM_MC_ADVANCE_RIP();
4533 IEM_MC_END();
4534 }
4535 else
4536 {
4537 /* [mem64], XMM */
4538 IEM_MC_BEGIN(0, 2);
4539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4540 IEM_MC_LOCAL(uint64_t, u64Tmp);
4541
4542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4544 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4546
4547 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4548 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4549
4550 IEM_MC_ADVANCE_RIP();
4551 IEM_MC_END();
4552 }
4553 }
4554 else
4555 {
4556 /**
4557 * @opdone
4558 * @opcode 0x7e
4559 * @opcodesub rex.w=0
4560 * @oppfx 0x66
4561 * @opcpuid sse2
4562 * @opgroup og_sse2_simdint_datamove
4563 * @opxcpttype 5
4564 * @opfunction iemOp_movd_q_Vy_Ey
4565 * @optest op1=1 op2=2 -> op1=2
4566 * @optest op1=0 op2=-42 -> op1=-42
4567 */
4568 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4569 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4570 {
4571 /* greg32, XMM */
4572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4573 IEM_MC_BEGIN(0, 1);
4574 IEM_MC_LOCAL(uint32_t, u32Tmp);
4575
4576 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4577 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4578
4579 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4580 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4581
4582 IEM_MC_ADVANCE_RIP();
4583 IEM_MC_END();
4584 }
4585 else
4586 {
4587 /* [mem32], XMM */
4588 IEM_MC_BEGIN(0, 2);
4589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4590 IEM_MC_LOCAL(uint32_t, u32Tmp);
4591
4592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4594 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4595 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4596
4597 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4598 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4599
4600 IEM_MC_ADVANCE_RIP();
4601 IEM_MC_END();
4602 }
4603 }
4604 return VINF_SUCCESS;
4605
4606}
4607
4608/**
4609 * @opcode 0x7e
4610 * @oppfx 0xf3
4611 * @opcpuid sse2
4612 * @opgroup og_sse2_pcksclr_datamove
4613 * @opxcpttype none
4614 * @optest op1=1 op2=2 -> op1=2
4615 * @optest op1=0 op2=-42 -> op1=-42
4616 */
4617FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4618{
4619 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4622 {
4623 /*
4624 * Register, register.
4625 */
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627 IEM_MC_BEGIN(0, 2);
4628 IEM_MC_LOCAL(uint64_t, uSrc);
4629
4630 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4631 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4632
4633 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4634 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4635
4636 IEM_MC_ADVANCE_RIP();
4637 IEM_MC_END();
4638 }
4639 else
4640 {
4641 /*
4642 * Memory, register.
4643 */
4644 IEM_MC_BEGIN(0, 2);
4645 IEM_MC_LOCAL(uint64_t, uSrc);
4646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4647
4648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4651 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4652
4653 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4654 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4655
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 }
4659 return VINF_SUCCESS;
4660}
4661
4662/* Opcode 0xf2 0x0f 0x7e - invalid */
4663
4664
4665/** Opcode 0x0f 0x7f - movq Qq, Pq */
4666FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4667{
4668 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4671 {
4672 /*
4673 * Register, register.
4674 */
4675 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4676 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4678 IEM_MC_BEGIN(0, 1);
4679 IEM_MC_LOCAL(uint64_t, u64Tmp);
4680 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4681 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4682 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4683 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4684 IEM_MC_ADVANCE_RIP();
4685 IEM_MC_END();
4686 }
4687 else
4688 {
4689 /*
4690 * Register, memory.
4691 */
4692 IEM_MC_BEGIN(0, 2);
4693 IEM_MC_LOCAL(uint64_t, u64Tmp);
4694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4695
4696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4699 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4700
4701 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4702 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4703
4704 IEM_MC_ADVANCE_RIP();
4705 IEM_MC_END();
4706 }
4707 return VINF_SUCCESS;
4708}
4709
4710/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4711FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4712{
4713 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4716 {
4717 /*
4718 * Register, register.
4719 */
4720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4721 IEM_MC_BEGIN(0, 0);
4722 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4723 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4724 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4725 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4726 IEM_MC_ADVANCE_RIP();
4727 IEM_MC_END();
4728 }
4729 else
4730 {
4731 /*
4732 * Register, memory.
4733 */
4734 IEM_MC_BEGIN(0, 2);
4735 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4737
4738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4740 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4741 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4742
4743 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4744 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4745
4746 IEM_MC_ADVANCE_RIP();
4747 IEM_MC_END();
4748 }
4749 return VINF_SUCCESS;
4750}
4751
4752/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4753FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4754{
4755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4756 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4757 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4758 {
4759 /*
4760 * Register, register.
4761 */
4762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4763 IEM_MC_BEGIN(0, 0);
4764 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4765 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4766 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4767 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4768 IEM_MC_ADVANCE_RIP();
4769 IEM_MC_END();
4770 }
4771 else
4772 {
4773 /*
4774 * Register, memory.
4775 */
4776 IEM_MC_BEGIN(0, 2);
4777 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4779
4780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4783 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4784
4785 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4786 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4787
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 }
4791 return VINF_SUCCESS;
4792}
4793
4794/* Opcode 0xf2 0x0f 0x7f - invalid */
4795
4796
4797
4798/** Opcode 0x0f 0x80. */
4799FNIEMOP_DEF(iemOp_jo_Jv)
4800{
4801 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4802 IEMOP_HLP_MIN_386();
4803 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4804 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4805 {
4806 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4808
4809 IEM_MC_BEGIN(0, 0);
4810 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4811 IEM_MC_REL_JMP_S16(i16Imm);
4812 } IEM_MC_ELSE() {
4813 IEM_MC_ADVANCE_RIP();
4814 } IEM_MC_ENDIF();
4815 IEM_MC_END();
4816 }
4817 else
4818 {
4819 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4821
4822 IEM_MC_BEGIN(0, 0);
4823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4824 IEM_MC_REL_JMP_S32(i32Imm);
4825 } IEM_MC_ELSE() {
4826 IEM_MC_ADVANCE_RIP();
4827 } IEM_MC_ENDIF();
4828 IEM_MC_END();
4829 }
4830 return VINF_SUCCESS;
4831}
4832
4833
4834/** Opcode 0x0f 0x81. */
4835FNIEMOP_DEF(iemOp_jno_Jv)
4836{
4837 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4838 IEMOP_HLP_MIN_386();
4839 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4840 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4841 {
4842 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4844
4845 IEM_MC_BEGIN(0, 0);
4846 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4847 IEM_MC_ADVANCE_RIP();
4848 } IEM_MC_ELSE() {
4849 IEM_MC_REL_JMP_S16(i16Imm);
4850 } IEM_MC_ENDIF();
4851 IEM_MC_END();
4852 }
4853 else
4854 {
4855 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4857
4858 IEM_MC_BEGIN(0, 0);
4859 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4860 IEM_MC_ADVANCE_RIP();
4861 } IEM_MC_ELSE() {
4862 IEM_MC_REL_JMP_S32(i32Imm);
4863 } IEM_MC_ENDIF();
4864 IEM_MC_END();
4865 }
4866 return VINF_SUCCESS;
4867}
4868
4869
4870/** Opcode 0x0f 0x82. */
4871FNIEMOP_DEF(iemOp_jc_Jv)
4872{
4873 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4874 IEMOP_HLP_MIN_386();
4875 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4876 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4877 {
4878 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4880
4881 IEM_MC_BEGIN(0, 0);
4882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4883 IEM_MC_REL_JMP_S16(i16Imm);
4884 } IEM_MC_ELSE() {
4885 IEM_MC_ADVANCE_RIP();
4886 } IEM_MC_ENDIF();
4887 IEM_MC_END();
4888 }
4889 else
4890 {
4891 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4893
4894 IEM_MC_BEGIN(0, 0);
4895 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4896 IEM_MC_REL_JMP_S32(i32Imm);
4897 } IEM_MC_ELSE() {
4898 IEM_MC_ADVANCE_RIP();
4899 } IEM_MC_ENDIF();
4900 IEM_MC_END();
4901 }
4902 return VINF_SUCCESS;
4903}
4904
4905
4906/** Opcode 0x0f 0x83. */
4907FNIEMOP_DEF(iemOp_jnc_Jv)
4908{
4909 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4910 IEMOP_HLP_MIN_386();
4911 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4912 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4913 {
4914 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4916
4917 IEM_MC_BEGIN(0, 0);
4918 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4919 IEM_MC_ADVANCE_RIP();
4920 } IEM_MC_ELSE() {
4921 IEM_MC_REL_JMP_S16(i16Imm);
4922 } IEM_MC_ENDIF();
4923 IEM_MC_END();
4924 }
4925 else
4926 {
4927 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4929
4930 IEM_MC_BEGIN(0, 0);
4931 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4932 IEM_MC_ADVANCE_RIP();
4933 } IEM_MC_ELSE() {
4934 IEM_MC_REL_JMP_S32(i32Imm);
4935 } IEM_MC_ENDIF();
4936 IEM_MC_END();
4937 }
4938 return VINF_SUCCESS;
4939}
4940
4941
4942/** Opcode 0x0f 0x84. */
4943FNIEMOP_DEF(iemOp_je_Jv)
4944{
4945 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4946 IEMOP_HLP_MIN_386();
4947 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4948 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4949 {
4950 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4952
4953 IEM_MC_BEGIN(0, 0);
4954 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4955 IEM_MC_REL_JMP_S16(i16Imm);
4956 } IEM_MC_ELSE() {
4957 IEM_MC_ADVANCE_RIP();
4958 } IEM_MC_ENDIF();
4959 IEM_MC_END();
4960 }
4961 else
4962 {
4963 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4965
4966 IEM_MC_BEGIN(0, 0);
4967 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4968 IEM_MC_REL_JMP_S32(i32Imm);
4969 } IEM_MC_ELSE() {
4970 IEM_MC_ADVANCE_RIP();
4971 } IEM_MC_ENDIF();
4972 IEM_MC_END();
4973 }
4974 return VINF_SUCCESS;
4975}
4976
4977
4978/** Opcode 0x0f 0x85. */
4979FNIEMOP_DEF(iemOp_jne_Jv)
4980{
4981 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4982 IEMOP_HLP_MIN_386();
4983 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4984 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4985 {
4986 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4988
4989 IEM_MC_BEGIN(0, 0);
4990 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4991 IEM_MC_ADVANCE_RIP();
4992 } IEM_MC_ELSE() {
4993 IEM_MC_REL_JMP_S16(i16Imm);
4994 } IEM_MC_ENDIF();
4995 IEM_MC_END();
4996 }
4997 else
4998 {
4999 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5001
5002 IEM_MC_BEGIN(0, 0);
5003 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5004 IEM_MC_ADVANCE_RIP();
5005 } IEM_MC_ELSE() {
5006 IEM_MC_REL_JMP_S32(i32Imm);
5007 } IEM_MC_ENDIF();
5008 IEM_MC_END();
5009 }
5010 return VINF_SUCCESS;
5011}
5012
5013
5014/** Opcode 0x0f 0x86. */
5015FNIEMOP_DEF(iemOp_jbe_Jv)
5016{
5017 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5018 IEMOP_HLP_MIN_386();
5019 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5020 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5021 {
5022 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5024
5025 IEM_MC_BEGIN(0, 0);
5026 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5027 IEM_MC_REL_JMP_S16(i16Imm);
5028 } IEM_MC_ELSE() {
5029 IEM_MC_ADVANCE_RIP();
5030 } IEM_MC_ENDIF();
5031 IEM_MC_END();
5032 }
5033 else
5034 {
5035 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5037
5038 IEM_MC_BEGIN(0, 0);
5039 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5040 IEM_MC_REL_JMP_S32(i32Imm);
5041 } IEM_MC_ELSE() {
5042 IEM_MC_ADVANCE_RIP();
5043 } IEM_MC_ENDIF();
5044 IEM_MC_END();
5045 }
5046 return VINF_SUCCESS;
5047}
5048
5049
5050/** Opcode 0x0f 0x87. */
5051FNIEMOP_DEF(iemOp_jnbe_Jv)
5052{
5053 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5054 IEMOP_HLP_MIN_386();
5055 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5056 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5057 {
5058 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5060
5061 IEM_MC_BEGIN(0, 0);
5062 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5063 IEM_MC_ADVANCE_RIP();
5064 } IEM_MC_ELSE() {
5065 IEM_MC_REL_JMP_S16(i16Imm);
5066 } IEM_MC_ENDIF();
5067 IEM_MC_END();
5068 }
5069 else
5070 {
5071 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5073
5074 IEM_MC_BEGIN(0, 0);
5075 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5076 IEM_MC_ADVANCE_RIP();
5077 } IEM_MC_ELSE() {
5078 IEM_MC_REL_JMP_S32(i32Imm);
5079 } IEM_MC_ENDIF();
5080 IEM_MC_END();
5081 }
5082 return VINF_SUCCESS;
5083}
5084
5085
5086/** Opcode 0x0f 0x88. */
5087FNIEMOP_DEF(iemOp_js_Jv)
5088{
5089 IEMOP_MNEMONIC(js_Jv, "js Jv");
5090 IEMOP_HLP_MIN_386();
5091 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5092 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5093 {
5094 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5096
5097 IEM_MC_BEGIN(0, 0);
5098 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5099 IEM_MC_REL_JMP_S16(i16Imm);
5100 } IEM_MC_ELSE() {
5101 IEM_MC_ADVANCE_RIP();
5102 } IEM_MC_ENDIF();
5103 IEM_MC_END();
5104 }
5105 else
5106 {
5107 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5109
5110 IEM_MC_BEGIN(0, 0);
5111 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5112 IEM_MC_REL_JMP_S32(i32Imm);
5113 } IEM_MC_ELSE() {
5114 IEM_MC_ADVANCE_RIP();
5115 } IEM_MC_ENDIF();
5116 IEM_MC_END();
5117 }
5118 return VINF_SUCCESS;
5119}
5120
5121
5122/** Opcode 0x0f 0x89. */
5123FNIEMOP_DEF(iemOp_jns_Jv)
5124{
5125 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5126 IEMOP_HLP_MIN_386();
5127 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5128 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5129 {
5130 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5132
5133 IEM_MC_BEGIN(0, 0);
5134 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5135 IEM_MC_ADVANCE_RIP();
5136 } IEM_MC_ELSE() {
5137 IEM_MC_REL_JMP_S16(i16Imm);
5138 } IEM_MC_ENDIF();
5139 IEM_MC_END();
5140 }
5141 else
5142 {
5143 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5145
5146 IEM_MC_BEGIN(0, 0);
5147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5148 IEM_MC_ADVANCE_RIP();
5149 } IEM_MC_ELSE() {
5150 IEM_MC_REL_JMP_S32(i32Imm);
5151 } IEM_MC_ENDIF();
5152 IEM_MC_END();
5153 }
5154 return VINF_SUCCESS;
5155}
5156
5157
5158/** Opcode 0x0f 0x8a. */
5159FNIEMOP_DEF(iemOp_jp_Jv)
5160{
5161 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5162 IEMOP_HLP_MIN_386();
5163 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5164 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5165 {
5166 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5168
5169 IEM_MC_BEGIN(0, 0);
5170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5171 IEM_MC_REL_JMP_S16(i16Imm);
5172 } IEM_MC_ELSE() {
5173 IEM_MC_ADVANCE_RIP();
5174 } IEM_MC_ENDIF();
5175 IEM_MC_END();
5176 }
5177 else
5178 {
5179 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5181
5182 IEM_MC_BEGIN(0, 0);
5183 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5184 IEM_MC_REL_JMP_S32(i32Imm);
5185 } IEM_MC_ELSE() {
5186 IEM_MC_ADVANCE_RIP();
5187 } IEM_MC_ENDIF();
5188 IEM_MC_END();
5189 }
5190 return VINF_SUCCESS;
5191}
5192
5193
5194/** Opcode 0x0f 0x8b. */
5195FNIEMOP_DEF(iemOp_jnp_Jv)
5196{
5197 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5198 IEMOP_HLP_MIN_386();
5199 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5200 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5201 {
5202 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5204
5205 IEM_MC_BEGIN(0, 0);
5206 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5207 IEM_MC_ADVANCE_RIP();
5208 } IEM_MC_ELSE() {
5209 IEM_MC_REL_JMP_S16(i16Imm);
5210 } IEM_MC_ENDIF();
5211 IEM_MC_END();
5212 }
5213 else
5214 {
5215 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5217
5218 IEM_MC_BEGIN(0, 0);
5219 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5220 IEM_MC_ADVANCE_RIP();
5221 } IEM_MC_ELSE() {
5222 IEM_MC_REL_JMP_S32(i32Imm);
5223 } IEM_MC_ENDIF();
5224 IEM_MC_END();
5225 }
5226 return VINF_SUCCESS;
5227}
5228
5229
5230/** Opcode 0x0f 0x8c. */
5231FNIEMOP_DEF(iemOp_jl_Jv)
5232{
5233 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5234 IEMOP_HLP_MIN_386();
5235 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5236 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5237 {
5238 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5240
5241 IEM_MC_BEGIN(0, 0);
5242 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5243 IEM_MC_REL_JMP_S16(i16Imm);
5244 } IEM_MC_ELSE() {
5245 IEM_MC_ADVANCE_RIP();
5246 } IEM_MC_ENDIF();
5247 IEM_MC_END();
5248 }
5249 else
5250 {
5251 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5253
5254 IEM_MC_BEGIN(0, 0);
5255 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5256 IEM_MC_REL_JMP_S32(i32Imm);
5257 } IEM_MC_ELSE() {
5258 IEM_MC_ADVANCE_RIP();
5259 } IEM_MC_ENDIF();
5260 IEM_MC_END();
5261 }
5262 return VINF_SUCCESS;
5263}
5264
5265
5266/** Opcode 0x0f 0x8d. */
5267FNIEMOP_DEF(iemOp_jnl_Jv)
5268{
5269 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5270 IEMOP_HLP_MIN_386();
5271 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5272 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5273 {
5274 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5276
5277 IEM_MC_BEGIN(0, 0);
5278 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5279 IEM_MC_ADVANCE_RIP();
5280 } IEM_MC_ELSE() {
5281 IEM_MC_REL_JMP_S16(i16Imm);
5282 } IEM_MC_ENDIF();
5283 IEM_MC_END();
5284 }
5285 else
5286 {
5287 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5289
5290 IEM_MC_BEGIN(0, 0);
5291 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5292 IEM_MC_ADVANCE_RIP();
5293 } IEM_MC_ELSE() {
5294 IEM_MC_REL_JMP_S32(i32Imm);
5295 } IEM_MC_ENDIF();
5296 IEM_MC_END();
5297 }
5298 return VINF_SUCCESS;
5299}
5300
5301
5302/** Opcode 0x0f 0x8e. */
5303FNIEMOP_DEF(iemOp_jle_Jv)
5304{
5305 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5306 IEMOP_HLP_MIN_386();
5307 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5308 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5309 {
5310 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5312
5313 IEM_MC_BEGIN(0, 0);
5314 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5315 IEM_MC_REL_JMP_S16(i16Imm);
5316 } IEM_MC_ELSE() {
5317 IEM_MC_ADVANCE_RIP();
5318 } IEM_MC_ENDIF();
5319 IEM_MC_END();
5320 }
5321 else
5322 {
5323 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5325
5326 IEM_MC_BEGIN(0, 0);
5327 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5328 IEM_MC_REL_JMP_S32(i32Imm);
5329 } IEM_MC_ELSE() {
5330 IEM_MC_ADVANCE_RIP();
5331 } IEM_MC_ENDIF();
5332 IEM_MC_END();
5333 }
5334 return VINF_SUCCESS;
5335}
5336
5337
5338/** Opcode 0x0f 0x8f. */
5339FNIEMOP_DEF(iemOp_jnle_Jv)
5340{
5341 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5342 IEMOP_HLP_MIN_386();
5343 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5344 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5345 {
5346 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5348
5349 IEM_MC_BEGIN(0, 0);
5350 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5351 IEM_MC_ADVANCE_RIP();
5352 } IEM_MC_ELSE() {
5353 IEM_MC_REL_JMP_S16(i16Imm);
5354 } IEM_MC_ENDIF();
5355 IEM_MC_END();
5356 }
5357 else
5358 {
5359 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5361
5362 IEM_MC_BEGIN(0, 0);
5363 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5364 IEM_MC_ADVANCE_RIP();
5365 } IEM_MC_ELSE() {
5366 IEM_MC_REL_JMP_S32(i32Imm);
5367 } IEM_MC_ENDIF();
5368 IEM_MC_END();
5369 }
5370 return VINF_SUCCESS;
5371}
5372
5373
5374/** Opcode 0x0f 0x90. */
5375FNIEMOP_DEF(iemOp_seto_Eb)
5376{
5377 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5378 IEMOP_HLP_MIN_386();
5379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5380
5381 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5382 * any way. AMD says it's "unused", whatever that means. We're
5383 * ignoring for now. */
5384 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5385 {
5386 /* register target */
5387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5388 IEM_MC_BEGIN(0, 0);
5389 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5390 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5391 } IEM_MC_ELSE() {
5392 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5393 } IEM_MC_ENDIF();
5394 IEM_MC_ADVANCE_RIP();
5395 IEM_MC_END();
5396 }
5397 else
5398 {
5399 /* memory target */
5400 IEM_MC_BEGIN(0, 1);
5401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5404 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5405 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5406 } IEM_MC_ELSE() {
5407 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5408 } IEM_MC_ENDIF();
5409 IEM_MC_ADVANCE_RIP();
5410 IEM_MC_END();
5411 }
5412 return VINF_SUCCESS;
5413}
5414
5415
5416/** Opcode 0x0f 0x91. */
5417FNIEMOP_DEF(iemOp_setno_Eb)
5418{
5419 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5420 IEMOP_HLP_MIN_386();
5421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5422
5423 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5424 * any way. AMD says it's "unused", whatever that means. We're
5425 * ignoring for now. */
5426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5427 {
5428 /* register target */
5429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5430 IEM_MC_BEGIN(0, 0);
5431 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5432 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5433 } IEM_MC_ELSE() {
5434 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5435 } IEM_MC_ENDIF();
5436 IEM_MC_ADVANCE_RIP();
5437 IEM_MC_END();
5438 }
5439 else
5440 {
5441 /* memory target */
5442 IEM_MC_BEGIN(0, 1);
5443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5446 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5447 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5448 } IEM_MC_ELSE() {
5449 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5450 } IEM_MC_ENDIF();
5451 IEM_MC_ADVANCE_RIP();
5452 IEM_MC_END();
5453 }
5454 return VINF_SUCCESS;
5455}
5456
5457
5458/** Opcode 0x0f 0x92. */
5459FNIEMOP_DEF(iemOp_setc_Eb)
5460{
5461 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5462 IEMOP_HLP_MIN_386();
5463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5464
5465 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5466 * any way. AMD says it's "unused", whatever that means. We're
5467 * ignoring for now. */
5468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5469 {
5470 /* register target */
5471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5472 IEM_MC_BEGIN(0, 0);
5473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5474 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5475 } IEM_MC_ELSE() {
5476 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5477 } IEM_MC_ENDIF();
5478 IEM_MC_ADVANCE_RIP();
5479 IEM_MC_END();
5480 }
5481 else
5482 {
5483 /* memory target */
5484 IEM_MC_BEGIN(0, 1);
5485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5488 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5489 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5490 } IEM_MC_ELSE() {
5491 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5492 } IEM_MC_ENDIF();
5493 IEM_MC_ADVANCE_RIP();
5494 IEM_MC_END();
5495 }
5496 return VINF_SUCCESS;
5497}
5498
5499
5500/** Opcode 0x0f 0x93. */
5501FNIEMOP_DEF(iemOp_setnc_Eb)
5502{
5503 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5504 IEMOP_HLP_MIN_386();
5505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5506
5507 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5508 * any way. AMD says it's "unused", whatever that means. We're
5509 * ignoring for now. */
5510 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5511 {
5512 /* register target */
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_BEGIN(0, 0);
5515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5516 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5517 } IEM_MC_ELSE() {
5518 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5519 } IEM_MC_ENDIF();
5520 IEM_MC_ADVANCE_RIP();
5521 IEM_MC_END();
5522 }
5523 else
5524 {
5525 /* memory target */
5526 IEM_MC_BEGIN(0, 1);
5527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5530 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5531 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5532 } IEM_MC_ELSE() {
5533 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5534 } IEM_MC_ENDIF();
5535 IEM_MC_ADVANCE_RIP();
5536 IEM_MC_END();
5537 }
5538 return VINF_SUCCESS;
5539}
5540
5541
5542/** Opcode 0x0f 0x94. */
5543FNIEMOP_DEF(iemOp_sete_Eb)
5544{
5545 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5546 IEMOP_HLP_MIN_386();
5547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5548
5549 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5550 * any way. AMD says it's "unused", whatever that means. We're
5551 * ignoring for now. */
5552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5553 {
5554 /* register target */
5555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5556 IEM_MC_BEGIN(0, 0);
5557 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5558 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5559 } IEM_MC_ELSE() {
5560 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5561 } IEM_MC_ENDIF();
5562 IEM_MC_ADVANCE_RIP();
5563 IEM_MC_END();
5564 }
5565 else
5566 {
5567 /* memory target */
5568 IEM_MC_BEGIN(0, 1);
5569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5573 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5574 } IEM_MC_ELSE() {
5575 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5576 } IEM_MC_ENDIF();
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 }
5580 return VINF_SUCCESS;
5581}
5582
5583
5584/** Opcode 0x0f 0x95. */
5585FNIEMOP_DEF(iemOp_setne_Eb)
5586{
5587 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5588 IEMOP_HLP_MIN_386();
5589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5590
5591 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5592 * any way. AMD says it's "unused", whatever that means. We're
5593 * ignoring for now. */
5594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5595 {
5596 /* register target */
5597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5598 IEM_MC_BEGIN(0, 0);
5599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5600 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5601 } IEM_MC_ELSE() {
5602 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5603 } IEM_MC_ENDIF();
5604 IEM_MC_ADVANCE_RIP();
5605 IEM_MC_END();
5606 }
5607 else
5608 {
5609 /* memory target */
5610 IEM_MC_BEGIN(0, 1);
5611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5615 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5616 } IEM_MC_ELSE() {
5617 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5618 } IEM_MC_ENDIF();
5619 IEM_MC_ADVANCE_RIP();
5620 IEM_MC_END();
5621 }
5622 return VINF_SUCCESS;
5623}
5624
5625
5626/** Opcode 0x0f 0x96. */
5627FNIEMOP_DEF(iemOp_setbe_Eb)
5628{
5629 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5630 IEMOP_HLP_MIN_386();
5631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5632
5633 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5634 * any way. AMD says it's "unused", whatever that means. We're
5635 * ignoring for now. */
5636 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5637 {
5638 /* register target */
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_BEGIN(0, 0);
5641 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5642 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5643 } IEM_MC_ELSE() {
5644 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5645 } IEM_MC_ENDIF();
5646 IEM_MC_ADVANCE_RIP();
5647 IEM_MC_END();
5648 }
5649 else
5650 {
5651 /* memory target */
5652 IEM_MC_BEGIN(0, 1);
5653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5657 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5658 } IEM_MC_ELSE() {
5659 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5660 } IEM_MC_ENDIF();
5661 IEM_MC_ADVANCE_RIP();
5662 IEM_MC_END();
5663 }
5664 return VINF_SUCCESS;
5665}
5666
5667
5668/** Opcode 0x0f 0x97. */
5669FNIEMOP_DEF(iemOp_setnbe_Eb)
5670{
5671 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5672 IEMOP_HLP_MIN_386();
5673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5674
5675 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5676 * any way. AMD says it's "unused", whatever that means. We're
5677 * ignoring for now. */
5678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5679 {
5680 /* register target */
5681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5682 IEM_MC_BEGIN(0, 0);
5683 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5684 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5685 } IEM_MC_ELSE() {
5686 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5687 } IEM_MC_ENDIF();
5688 IEM_MC_ADVANCE_RIP();
5689 IEM_MC_END();
5690 }
5691 else
5692 {
5693 /* memory target */
5694 IEM_MC_BEGIN(0, 1);
5695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5699 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5700 } IEM_MC_ELSE() {
5701 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5702 } IEM_MC_ENDIF();
5703 IEM_MC_ADVANCE_RIP();
5704 IEM_MC_END();
5705 }
5706 return VINF_SUCCESS;
5707}
5708
5709
5710/** Opcode 0x0f 0x98. */
5711FNIEMOP_DEF(iemOp_sets_Eb)
5712{
5713 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5714 IEMOP_HLP_MIN_386();
5715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5716
5717 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5718 * any way. AMD says it's "unused", whatever that means. We're
5719 * ignoring for now. */
5720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5721 {
5722 /* register target */
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5724 IEM_MC_BEGIN(0, 0);
5725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5726 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5727 } IEM_MC_ELSE() {
5728 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5729 } IEM_MC_ENDIF();
5730 IEM_MC_ADVANCE_RIP();
5731 IEM_MC_END();
5732 }
5733 else
5734 {
5735 /* memory target */
5736 IEM_MC_BEGIN(0, 1);
5737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5741 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5742 } IEM_MC_ELSE() {
5743 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5744 } IEM_MC_ENDIF();
5745 IEM_MC_ADVANCE_RIP();
5746 IEM_MC_END();
5747 }
5748 return VINF_SUCCESS;
5749}
5750
5751
5752/** Opcode 0x0f 0x99. */
5753FNIEMOP_DEF(iemOp_setns_Eb)
5754{
5755 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5756 IEMOP_HLP_MIN_386();
5757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5758
5759 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5760 * any way. AMD says it's "unused", whatever that means. We're
5761 * ignoring for now. */
5762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5763 {
5764 /* register target */
5765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5766 IEM_MC_BEGIN(0, 0);
5767 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5768 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5769 } IEM_MC_ELSE() {
5770 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5771 } IEM_MC_ENDIF();
5772 IEM_MC_ADVANCE_RIP();
5773 IEM_MC_END();
5774 }
5775 else
5776 {
5777 /* memory target */
5778 IEM_MC_BEGIN(0, 1);
5779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5782 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5783 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5784 } IEM_MC_ELSE() {
5785 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5786 } IEM_MC_ENDIF();
5787 IEM_MC_ADVANCE_RIP();
5788 IEM_MC_END();
5789 }
5790 return VINF_SUCCESS;
5791}
5792
5793
5794/** Opcode 0x0f 0x9a. */
5795FNIEMOP_DEF(iemOp_setp_Eb)
5796{
5797 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5798 IEMOP_HLP_MIN_386();
5799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5800
5801 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5802 * any way. AMD says it's "unused", whatever that means. We're
5803 * ignoring for now. */
5804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5805 {
5806 /* register target */
5807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5808 IEM_MC_BEGIN(0, 0);
5809 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5810 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5811 } IEM_MC_ELSE() {
5812 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5813 } IEM_MC_ENDIF();
5814 IEM_MC_ADVANCE_RIP();
5815 IEM_MC_END();
5816 }
5817 else
5818 {
5819 /* memory target */
5820 IEM_MC_BEGIN(0, 1);
5821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5824 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5825 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5826 } IEM_MC_ELSE() {
5827 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5828 } IEM_MC_ENDIF();
5829 IEM_MC_ADVANCE_RIP();
5830 IEM_MC_END();
5831 }
5832 return VINF_SUCCESS;
5833}
5834
5835
5836/** Opcode 0x0f 0x9b. */
5837FNIEMOP_DEF(iemOp_setnp_Eb)
5838{
5839 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5840 IEMOP_HLP_MIN_386();
5841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5842
5843 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5844 * any way. AMD says it's "unused", whatever that means. We're
5845 * ignoring for now. */
5846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5847 {
5848 /* register target */
5849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5850 IEM_MC_BEGIN(0, 0);
5851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5852 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5853 } IEM_MC_ELSE() {
5854 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5855 } IEM_MC_ENDIF();
5856 IEM_MC_ADVANCE_RIP();
5857 IEM_MC_END();
5858 }
5859 else
5860 {
5861 /* memory target */
5862 IEM_MC_BEGIN(0, 1);
5863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5867 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5868 } IEM_MC_ELSE() {
5869 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5870 } IEM_MC_ENDIF();
5871 IEM_MC_ADVANCE_RIP();
5872 IEM_MC_END();
5873 }
5874 return VINF_SUCCESS;
5875}
5876
5877
5878/** Opcode 0x0f 0x9c. */
5879FNIEMOP_DEF(iemOp_setl_Eb)
5880{
5881 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5882 IEMOP_HLP_MIN_386();
5883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5884
5885 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5886 * any way. AMD says it's "unused", whatever that means. We're
5887 * ignoring for now. */
5888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5889 {
5890 /* register target */
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892 IEM_MC_BEGIN(0, 0);
5893 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5894 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5895 } IEM_MC_ELSE() {
5896 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5897 } IEM_MC_ENDIF();
5898 IEM_MC_ADVANCE_RIP();
5899 IEM_MC_END();
5900 }
5901 else
5902 {
5903 /* memory target */
5904 IEM_MC_BEGIN(0, 1);
5905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5908 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5909 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5910 } IEM_MC_ELSE() {
5911 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5912 } IEM_MC_ENDIF();
5913 IEM_MC_ADVANCE_RIP();
5914 IEM_MC_END();
5915 }
5916 return VINF_SUCCESS;
5917}
5918
5919
5920/** Opcode 0x0f 0x9d. */
5921FNIEMOP_DEF(iemOp_setnl_Eb)
5922{
5923 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5924 IEMOP_HLP_MIN_386();
5925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5926
5927 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5928 * any way. AMD says it's "unused", whatever that means. We're
5929 * ignoring for now. */
5930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5931 {
5932 /* register target */
5933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5934 IEM_MC_BEGIN(0, 0);
5935 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5936 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5937 } IEM_MC_ELSE() {
5938 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5939 } IEM_MC_ENDIF();
5940 IEM_MC_ADVANCE_RIP();
5941 IEM_MC_END();
5942 }
5943 else
5944 {
5945 /* memory target */
5946 IEM_MC_BEGIN(0, 1);
5947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5950 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5951 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5952 } IEM_MC_ELSE() {
5953 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5954 } IEM_MC_ENDIF();
5955 IEM_MC_ADVANCE_RIP();
5956 IEM_MC_END();
5957 }
5958 return VINF_SUCCESS;
5959}
5960
5961
5962/** Opcode 0x0f 0x9e. */
5963FNIEMOP_DEF(iemOp_setle_Eb)
5964{
5965 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5966 IEMOP_HLP_MIN_386();
5967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5968
5969 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5970 * any way. AMD says it's "unused", whatever that means. We're
5971 * ignoring for now. */
5972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5973 {
5974 /* register target */
5975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5976 IEM_MC_BEGIN(0, 0);
5977 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5978 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5979 } IEM_MC_ELSE() {
5980 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5981 } IEM_MC_ENDIF();
5982 IEM_MC_ADVANCE_RIP();
5983 IEM_MC_END();
5984 }
5985 else
5986 {
5987 /* memory target */
5988 IEM_MC_BEGIN(0, 1);
5989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5992 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5993 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5994 } IEM_MC_ELSE() {
5995 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5996 } IEM_MC_ENDIF();
5997 IEM_MC_ADVANCE_RIP();
5998 IEM_MC_END();
5999 }
6000 return VINF_SUCCESS;
6001}
6002
6003
6004/** Opcode 0x0f 0x9f. */
6005FNIEMOP_DEF(iemOp_setnle_Eb)
6006{
6007 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6008 IEMOP_HLP_MIN_386();
6009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6010
6011 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6012 * any way. AMD says it's "unused", whatever that means. We're
6013 * ignoring for now. */
6014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6015 {
6016 /* register target */
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 IEM_MC_BEGIN(0, 0);
6019 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6020 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6021 } IEM_MC_ELSE() {
6022 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6023 } IEM_MC_ENDIF();
6024 IEM_MC_ADVANCE_RIP();
6025 IEM_MC_END();
6026 }
6027 else
6028 {
6029 /* memory target */
6030 IEM_MC_BEGIN(0, 1);
6031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6034 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6035 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6036 } IEM_MC_ELSE() {
6037 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6038 } IEM_MC_ENDIF();
6039 IEM_MC_ADVANCE_RIP();
6040 IEM_MC_END();
6041 }
6042 return VINF_SUCCESS;
6043}
6044
6045
6046/**
6047 * Common 'push segment-register' helper.
6048 */
6049FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6050{
6051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6052 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6053 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6054
6055 switch (pVCpu->iem.s.enmEffOpSize)
6056 {
6057 case IEMMODE_16BIT:
6058 IEM_MC_BEGIN(0, 1);
6059 IEM_MC_LOCAL(uint16_t, u16Value);
6060 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6061 IEM_MC_PUSH_U16(u16Value);
6062 IEM_MC_ADVANCE_RIP();
6063 IEM_MC_END();
6064 break;
6065
6066 case IEMMODE_32BIT:
6067 IEM_MC_BEGIN(0, 1);
6068 IEM_MC_LOCAL(uint32_t, u32Value);
6069 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6070 IEM_MC_PUSH_U32_SREG(u32Value);
6071 IEM_MC_ADVANCE_RIP();
6072 IEM_MC_END();
6073 break;
6074
6075 case IEMMODE_64BIT:
6076 IEM_MC_BEGIN(0, 1);
6077 IEM_MC_LOCAL(uint64_t, u64Value);
6078 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6079 IEM_MC_PUSH_U64(u64Value);
6080 IEM_MC_ADVANCE_RIP();
6081 IEM_MC_END();
6082 break;
6083 }
6084
6085 return VINF_SUCCESS;
6086}
6087
6088
6089/** Opcode 0x0f 0xa0. */
6090FNIEMOP_DEF(iemOp_push_fs)
6091{
6092 IEMOP_MNEMONIC(push_fs, "push fs");
6093 IEMOP_HLP_MIN_386();
6094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6095 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6096}
6097
6098
6099/** Opcode 0x0f 0xa1. */
6100FNIEMOP_DEF(iemOp_pop_fs)
6101{
6102 IEMOP_MNEMONIC(pop_fs, "pop fs");
6103 IEMOP_HLP_MIN_386();
6104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6105 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6106}
6107
6108
6109/** Opcode 0x0f 0xa2. */
6110FNIEMOP_DEF(iemOp_cpuid)
6111{
6112 IEMOP_MNEMONIC(cpuid, "cpuid");
6113 IEMOP_HLP_MIN_486(); /* not all 486es. */
6114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6115 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6116}
6117
6118
6119/**
6120 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6121 * iemOp_bts_Ev_Gv.
6122 */
6123FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6124{
6125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6126 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6127
6128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6129 {
6130 /* register destination. */
6131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6132 switch (pVCpu->iem.s.enmEffOpSize)
6133 {
6134 case IEMMODE_16BIT:
6135 IEM_MC_BEGIN(3, 0);
6136 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6137 IEM_MC_ARG(uint16_t, u16Src, 1);
6138 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6139
6140 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6141 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6142 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6143 IEM_MC_REF_EFLAGS(pEFlags);
6144 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6145
6146 IEM_MC_ADVANCE_RIP();
6147 IEM_MC_END();
6148 return VINF_SUCCESS;
6149
6150 case IEMMODE_32BIT:
6151 IEM_MC_BEGIN(3, 0);
6152 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6153 IEM_MC_ARG(uint32_t, u32Src, 1);
6154 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6155
6156 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6157 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6158 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6159 IEM_MC_REF_EFLAGS(pEFlags);
6160 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6161
6162 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6163 IEM_MC_ADVANCE_RIP();
6164 IEM_MC_END();
6165 return VINF_SUCCESS;
6166
6167 case IEMMODE_64BIT:
6168 IEM_MC_BEGIN(3, 0);
6169 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6170 IEM_MC_ARG(uint64_t, u64Src, 1);
6171 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6172
6173 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6174 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6175 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6176 IEM_MC_REF_EFLAGS(pEFlags);
6177 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6178
6179 IEM_MC_ADVANCE_RIP();
6180 IEM_MC_END();
6181 return VINF_SUCCESS;
6182
6183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6184 }
6185 }
6186 else
6187 {
6188 /* memory destination. */
6189
6190 uint32_t fAccess;
6191 if (pImpl->pfnLockedU16)
6192 fAccess = IEM_ACCESS_DATA_RW;
6193 else /* BT */
6194 fAccess = IEM_ACCESS_DATA_R;
6195
6196 /** @todo test negative bit offsets! */
6197 switch (pVCpu->iem.s.enmEffOpSize)
6198 {
6199 case IEMMODE_16BIT:
6200 IEM_MC_BEGIN(3, 2);
6201 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6202 IEM_MC_ARG(uint16_t, u16Src, 1);
6203 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6205 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6206
6207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6208 if (pImpl->pfnLockedU16)
6209 IEMOP_HLP_DONE_DECODING();
6210 else
6211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6212 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6213 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6214 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6215 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6216 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6217 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6218 IEM_MC_FETCH_EFLAGS(EFlags);
6219
6220 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6221 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6223 else
6224 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6225 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6226
6227 IEM_MC_COMMIT_EFLAGS(EFlags);
6228 IEM_MC_ADVANCE_RIP();
6229 IEM_MC_END();
6230 return VINF_SUCCESS;
6231
6232 case IEMMODE_32BIT:
6233 IEM_MC_BEGIN(3, 2);
6234 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6235 IEM_MC_ARG(uint32_t, u32Src, 1);
6236 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6238 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6239
6240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6241 if (pImpl->pfnLockedU16)
6242 IEMOP_HLP_DONE_DECODING();
6243 else
6244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6245 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6246 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6247 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6248 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6249 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6250 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6251 IEM_MC_FETCH_EFLAGS(EFlags);
6252
6253 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6254 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6255 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6256 else
6257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6258 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6259
6260 IEM_MC_COMMIT_EFLAGS(EFlags);
6261 IEM_MC_ADVANCE_RIP();
6262 IEM_MC_END();
6263 return VINF_SUCCESS;
6264
6265 case IEMMODE_64BIT:
6266 IEM_MC_BEGIN(3, 2);
6267 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6268 IEM_MC_ARG(uint64_t, u64Src, 1);
6269 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6271 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6272
6273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6274 if (pImpl->pfnLockedU16)
6275 IEMOP_HLP_DONE_DECODING();
6276 else
6277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6278 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6279 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6280 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6281 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6282 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6283 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6284 IEM_MC_FETCH_EFLAGS(EFlags);
6285
6286 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6287 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6289 else
6290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6291 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6292
6293 IEM_MC_COMMIT_EFLAGS(EFlags);
6294 IEM_MC_ADVANCE_RIP();
6295 IEM_MC_END();
6296 return VINF_SUCCESS;
6297
6298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6299 }
6300 }
6301}
6302
6303
6304/** Opcode 0x0f 0xa3. */
6305FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6306{
6307 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6308 IEMOP_HLP_MIN_386();
6309 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6310}
6311
6312
6313/**
6314 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6315 */
6316FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6317{
6318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6319 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6320
6321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6322 {
6323 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6325
6326 switch (pVCpu->iem.s.enmEffOpSize)
6327 {
6328 case IEMMODE_16BIT:
6329 IEM_MC_BEGIN(4, 0);
6330 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6331 IEM_MC_ARG(uint16_t, u16Src, 1);
6332 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6333 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6334
6335 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6336 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6337 IEM_MC_REF_EFLAGS(pEFlags);
6338 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6339
6340 IEM_MC_ADVANCE_RIP();
6341 IEM_MC_END();
6342 return VINF_SUCCESS;
6343
6344 case IEMMODE_32BIT:
6345 IEM_MC_BEGIN(4, 0);
6346 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6347 IEM_MC_ARG(uint32_t, u32Src, 1);
6348 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6349 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6350
6351 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6352 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6353 IEM_MC_REF_EFLAGS(pEFlags);
6354 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6355
6356 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6357 IEM_MC_ADVANCE_RIP();
6358 IEM_MC_END();
6359 return VINF_SUCCESS;
6360
6361 case IEMMODE_64BIT:
6362 IEM_MC_BEGIN(4, 0);
6363 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6364 IEM_MC_ARG(uint64_t, u64Src, 1);
6365 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6366 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6367
6368 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6369 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6370 IEM_MC_REF_EFLAGS(pEFlags);
6371 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6372
6373 IEM_MC_ADVANCE_RIP();
6374 IEM_MC_END();
6375 return VINF_SUCCESS;
6376
6377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6378 }
6379 }
6380 else
6381 {
6382 switch (pVCpu->iem.s.enmEffOpSize)
6383 {
6384 case IEMMODE_16BIT:
6385 IEM_MC_BEGIN(4, 2);
6386 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6387 IEM_MC_ARG(uint16_t, u16Src, 1);
6388 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6389 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6391
6392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6393 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6394 IEM_MC_ASSIGN(cShiftArg, cShift);
6395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6396 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6397 IEM_MC_FETCH_EFLAGS(EFlags);
6398 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6399 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6400
6401 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6402 IEM_MC_COMMIT_EFLAGS(EFlags);
6403 IEM_MC_ADVANCE_RIP();
6404 IEM_MC_END();
6405 return VINF_SUCCESS;
6406
6407 case IEMMODE_32BIT:
6408 IEM_MC_BEGIN(4, 2);
6409 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6410 IEM_MC_ARG(uint32_t, u32Src, 1);
6411 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6412 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6414
6415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6416 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6417 IEM_MC_ASSIGN(cShiftArg, cShift);
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6420 IEM_MC_FETCH_EFLAGS(EFlags);
6421 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6422 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6423
6424 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6425 IEM_MC_COMMIT_EFLAGS(EFlags);
6426 IEM_MC_ADVANCE_RIP();
6427 IEM_MC_END();
6428 return VINF_SUCCESS;
6429
6430 case IEMMODE_64BIT:
6431 IEM_MC_BEGIN(4, 2);
6432 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6433 IEM_MC_ARG(uint64_t, u64Src, 1);
6434 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6435 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6437
6438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6439 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6440 IEM_MC_ASSIGN(cShiftArg, cShift);
6441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6442 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6443 IEM_MC_FETCH_EFLAGS(EFlags);
6444 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6445 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6446
6447 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6448 IEM_MC_COMMIT_EFLAGS(EFlags);
6449 IEM_MC_ADVANCE_RIP();
6450 IEM_MC_END();
6451 return VINF_SUCCESS;
6452
6453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6454 }
6455 }
6456}
6457
6458
6459/**
6460 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6461 */
6462FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6463{
6464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6465 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6466
6467 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6468 {
6469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6470
6471 switch (pVCpu->iem.s.enmEffOpSize)
6472 {
6473 case IEMMODE_16BIT:
6474 IEM_MC_BEGIN(4, 0);
6475 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6476 IEM_MC_ARG(uint16_t, u16Src, 1);
6477 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6478 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6479
6480 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6481 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6482 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6483 IEM_MC_REF_EFLAGS(pEFlags);
6484 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6485
6486 IEM_MC_ADVANCE_RIP();
6487 IEM_MC_END();
6488 return VINF_SUCCESS;
6489
6490 case IEMMODE_32BIT:
6491 IEM_MC_BEGIN(4, 0);
6492 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6493 IEM_MC_ARG(uint32_t, u32Src, 1);
6494 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6495 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6496
6497 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6498 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6499 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6500 IEM_MC_REF_EFLAGS(pEFlags);
6501 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6502
6503 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6504 IEM_MC_ADVANCE_RIP();
6505 IEM_MC_END();
6506 return VINF_SUCCESS;
6507
6508 case IEMMODE_64BIT:
6509 IEM_MC_BEGIN(4, 0);
6510 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6511 IEM_MC_ARG(uint64_t, u64Src, 1);
6512 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6513 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6514
6515 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6516 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6517 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6518 IEM_MC_REF_EFLAGS(pEFlags);
6519 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6520
6521 IEM_MC_ADVANCE_RIP();
6522 IEM_MC_END();
6523 return VINF_SUCCESS;
6524
6525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6526 }
6527 }
6528 else
6529 {
6530 switch (pVCpu->iem.s.enmEffOpSize)
6531 {
6532 case IEMMODE_16BIT:
6533 IEM_MC_BEGIN(4, 2);
6534 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6535 IEM_MC_ARG(uint16_t, u16Src, 1);
6536 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6537 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6539
6540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6542 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6543 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6544 IEM_MC_FETCH_EFLAGS(EFlags);
6545 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6546 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6547
6548 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6549 IEM_MC_COMMIT_EFLAGS(EFlags);
6550 IEM_MC_ADVANCE_RIP();
6551 IEM_MC_END();
6552 return VINF_SUCCESS;
6553
6554 case IEMMODE_32BIT:
6555 IEM_MC_BEGIN(4, 2);
6556 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6557 IEM_MC_ARG(uint32_t, u32Src, 1);
6558 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6559 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6561
6562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6564 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6565 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6566 IEM_MC_FETCH_EFLAGS(EFlags);
6567 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6568 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6569
6570 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6571 IEM_MC_COMMIT_EFLAGS(EFlags);
6572 IEM_MC_ADVANCE_RIP();
6573 IEM_MC_END();
6574 return VINF_SUCCESS;
6575
6576 case IEMMODE_64BIT:
6577 IEM_MC_BEGIN(4, 2);
6578 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6579 IEM_MC_ARG(uint64_t, u64Src, 1);
6580 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6581 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6583
6584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6586 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6587 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6588 IEM_MC_FETCH_EFLAGS(EFlags);
6589 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6590 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6591
6592 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6593 IEM_MC_COMMIT_EFLAGS(EFlags);
6594 IEM_MC_ADVANCE_RIP();
6595 IEM_MC_END();
6596 return VINF_SUCCESS;
6597
6598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6599 }
6600 }
6601}
6602
6603
6604
6605/** Opcode 0x0f 0xa4. */
6606FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6607{
6608 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6609 IEMOP_HLP_MIN_386();
6610 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6611}
6612
6613
6614/** Opcode 0x0f 0xa5. */
6615FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6616{
6617 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6618 IEMOP_HLP_MIN_386();
6619 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6620}
6621
6622
6623/** Opcode 0x0f 0xa8. */
6624FNIEMOP_DEF(iemOp_push_gs)
6625{
6626 IEMOP_MNEMONIC(push_gs, "push gs");
6627 IEMOP_HLP_MIN_386();
6628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6629 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6630}
6631
6632
6633/** Opcode 0x0f 0xa9. */
6634FNIEMOP_DEF(iemOp_pop_gs)
6635{
6636 IEMOP_MNEMONIC(pop_gs, "pop gs");
6637 IEMOP_HLP_MIN_386();
6638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6639 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6640}
6641
6642
6643/** Opcode 0x0f 0xaa. */
6644FNIEMOP_DEF(iemOp_rsm)
6645{
6646 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6647 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6649 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6650}
6651
6652
6653
6654/** Opcode 0x0f 0xab. */
6655FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6656{
6657 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6658 IEMOP_HLP_MIN_386();
6659 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6660}
6661
6662
6663/** Opcode 0x0f 0xac. */
6664FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6665{
6666 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6667 IEMOP_HLP_MIN_386();
6668 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6669}
6670
6671
6672/** Opcode 0x0f 0xad. */
6673FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6674{
6675 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6676 IEMOP_HLP_MIN_386();
6677 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6678}
6679
6680
6681/** Opcode 0x0f 0xae mem/0. */
6682FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6683{
6684 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6685 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6686 return IEMOP_RAISE_INVALID_OPCODE();
6687
6688 IEM_MC_BEGIN(3, 1);
6689 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6690 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6691 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6694 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6695 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6696 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6697 IEM_MC_END();
6698 return VINF_SUCCESS;
6699}
6700
6701
6702/** Opcode 0x0f 0xae mem/1. */
6703FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6704{
6705 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6706 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6707 return IEMOP_RAISE_INVALID_OPCODE();
6708
6709 IEM_MC_BEGIN(3, 1);
6710 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6711 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6712 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6715 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6716 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6717 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6718 IEM_MC_END();
6719 return VINF_SUCCESS;
6720}
6721
6722
6723/**
6724 * @opmaps grp15
6725 * @opcode !11/2
6726 * @oppfx none
6727 * @opcpuid sse
6728 * @opgroup og_sse_mxcsrsm
6729 * @opxcpttype 5
6730 * @optest op1=0 -> mxcsr=0
6731 * @optest op1=0x2083 -> mxcsr=0x2083
6732 * @optest op1=0xfffffffe -> value.xcpt=0xd
6733 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6734 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6735 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6736 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6737 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6738 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6739 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6740 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6741 */
6742FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6743{
6744 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6745 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6746 return IEMOP_RAISE_INVALID_OPCODE();
6747
6748 IEM_MC_BEGIN(2, 0);
6749 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6750 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6753 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6754 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6755 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6756 IEM_MC_END();
6757 return VINF_SUCCESS;
6758}
6759
6760
6761/**
6762 * @opmaps grp15
6763 * @opcode !11/3
6764 * @oppfx none
6765 * @opcpuid sse
6766 * @opgroup og_sse_mxcsrsm
6767 * @opxcpttype 5
6768 * @optest mxcsr=0 -> op1=0
6769 * @optest mxcsr=0x2083 -> op1=0x2083
6770 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6771 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6772 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6773 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6774 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6775 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6776 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6777 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6778 */
6779FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6780{
6781 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6782 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6783 return IEMOP_RAISE_INVALID_OPCODE();
6784
6785 IEM_MC_BEGIN(2, 0);
6786 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6787 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6790 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6791 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6792 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6793 IEM_MC_END();
6794 return VINF_SUCCESS;
6795}
6796
6797
6798/**
6799 * @opmaps grp15
6800 * @opcode !11/4
6801 * @oppfx none
6802 * @opcpuid xsave
6803 * @opgroup og_system
6804 * @opxcpttype none
6805 */
6806FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6807{
6808 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6809 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6810 return IEMOP_RAISE_INVALID_OPCODE();
6811
6812 IEM_MC_BEGIN(3, 0);
6813 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6814 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6815 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6818 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6819 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6820 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6821 IEM_MC_END();
6822 return VINF_SUCCESS;
6823}
6824
6825
6826/**
6827 * @opmaps grp15
6828 * @opcode !11/5
6829 * @oppfx none
6830 * @opcpuid xsave
6831 * @opgroup og_system
6832 * @opxcpttype none
6833 */
6834FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6835{
6836 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6837 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6838 return IEMOP_RAISE_INVALID_OPCODE();
6839
6840 IEM_MC_BEGIN(3, 0);
6841 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6842 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6843 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6846 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6847 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6848 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6849 IEM_MC_END();
6850 return VINF_SUCCESS;
6851}
6852
6853/** Opcode 0x0f 0xae mem/6. */
6854FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6855
6856/**
6857 * @opmaps grp15
6858 * @opcode !11/7
6859 * @oppfx none
6860 * @opcpuid clfsh
6861 * @opgroup og_cachectl
6862 * @optest op1=1 ->
6863 */
6864FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6865{
6866 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6867 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6868 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6869
6870 IEM_MC_BEGIN(2, 0);
6871 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6872 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6875 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6876 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6877 IEM_MC_END();
6878 return VINF_SUCCESS;
6879}
6880
6881/**
6882 * @opmaps grp15
6883 * @opcode !11/7
6884 * @oppfx 0x66
6885 * @opcpuid clflushopt
6886 * @opgroup og_cachectl
6887 * @optest op1=1 ->
6888 */
6889FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6890{
6891 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6892 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6893 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6894
6895 IEM_MC_BEGIN(2, 0);
6896 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6897 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6900 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6901 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6902 IEM_MC_END();
6903 return VINF_SUCCESS;
6904}
6905
6906
6907/** Opcode 0x0f 0xae 11b/5. */
6908FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6909{
6910 RT_NOREF_PV(bRm);
6911 IEMOP_MNEMONIC(lfence, "lfence");
6912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6913 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6914 return IEMOP_RAISE_INVALID_OPCODE();
6915
6916 IEM_MC_BEGIN(0, 0);
6917 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6918 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6919 else
6920 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6921 IEM_MC_ADVANCE_RIP();
6922 IEM_MC_END();
6923 return VINF_SUCCESS;
6924}
6925
6926
6927/** Opcode 0x0f 0xae 11b/6. */
6928FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6929{
6930 RT_NOREF_PV(bRm);
6931 IEMOP_MNEMONIC(mfence, "mfence");
6932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6933 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6934 return IEMOP_RAISE_INVALID_OPCODE();
6935
6936 IEM_MC_BEGIN(0, 0);
6937 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6938 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6939 else
6940 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6941 IEM_MC_ADVANCE_RIP();
6942 IEM_MC_END();
6943 return VINF_SUCCESS;
6944}
6945
6946
6947/** Opcode 0x0f 0xae 11b/7. */
6948FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6949{
6950 RT_NOREF_PV(bRm);
6951 IEMOP_MNEMONIC(sfence, "sfence");
6952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6953 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6954 return IEMOP_RAISE_INVALID_OPCODE();
6955
6956 IEM_MC_BEGIN(0, 0);
6957 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6958 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6959 else
6960 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6961 IEM_MC_ADVANCE_RIP();
6962 IEM_MC_END();
6963 return VINF_SUCCESS;
6964}
6965
6966
6967/** Opcode 0xf3 0x0f 0xae 11b/0. */
6968FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
6969{
6970 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
6971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6972 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
6973 {
6974 IEM_MC_BEGIN(1, 0);
6975 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6976 IEM_MC_ARG(uint64_t, u64Dst, 0);
6977 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
6978 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
6979 IEM_MC_ADVANCE_RIP();
6980 IEM_MC_END();
6981 }
6982 else
6983 {
6984 IEM_MC_BEGIN(1, 0);
6985 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
6986 IEM_MC_ARG(uint32_t, u32Dst, 0);
6987 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
6988 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
6989 IEM_MC_ADVANCE_RIP();
6990 IEM_MC_END();
6991 }
6992 return VINF_SUCCESS;
6993}
6994
6995
6996/** Opcode 0xf3 0x0f 0xae 11b/1. */
6997FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
6998{
6999 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7001 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7002 {
7003 IEM_MC_BEGIN(1, 0);
7004 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7005 IEM_MC_ARG(uint64_t, u64Dst, 0);
7006 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7007 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7008 IEM_MC_ADVANCE_RIP();
7009 IEM_MC_END();
7010 }
7011 else
7012 {
7013 IEM_MC_BEGIN(1, 0);
7014 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7015 IEM_MC_ARG(uint32_t, u32Dst, 0);
7016 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7017 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7018 IEM_MC_ADVANCE_RIP();
7019 IEM_MC_END();
7020 }
7021 return VINF_SUCCESS;
7022}
7023
7024
7025/** Opcode 0xf3 0x0f 0xae 11b/2. */
7026FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7027{
7028 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7030 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7031 {
7032 IEM_MC_BEGIN(1, 0);
7033 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7034 IEM_MC_ARG(uint64_t, u64Dst, 0);
7035 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7036 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7037 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7038 IEM_MC_ADVANCE_RIP();
7039 IEM_MC_END();
7040 }
7041 else
7042 {
7043 IEM_MC_BEGIN(1, 0);
7044 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7045 IEM_MC_ARG(uint32_t, u32Dst, 0);
7046 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7047 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7048 IEM_MC_ADVANCE_RIP();
7049 IEM_MC_END();
7050 }
7051 return VINF_SUCCESS;
7052}
7053
7054
7055/** Opcode 0xf3 0x0f 0xae 11b/3. */
7056FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7057{
7058 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7060 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7061 {
7062 IEM_MC_BEGIN(1, 0);
7063 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7064 IEM_MC_ARG(uint64_t, u64Dst, 0);
7065 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7066 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7067 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7068 IEM_MC_ADVANCE_RIP();
7069 IEM_MC_END();
7070 }
7071 else
7072 {
7073 IEM_MC_BEGIN(1, 0);
7074 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7075 IEM_MC_ARG(uint32_t, u32Dst, 0);
7076 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7077 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7078 IEM_MC_ADVANCE_RIP();
7079 IEM_MC_END();
7080 }
7081 return VINF_SUCCESS;
7082}
7083
7084
7085/**
7086 * Group 15 jump table for register variant.
7087 */
7088IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7089{ /* pfx: none, 066h, 0f3h, 0f2h */
7090 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7091 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7092 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7093 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7094 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7095 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7096 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7097 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7098};
7099AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7100
7101
7102/**
7103 * Group 15 jump table for memory variant.
7104 */
7105IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7106{ /* pfx: none, 066h, 0f3h, 0f2h */
7107 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7108 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7109 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7110 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7111 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7112 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7113 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7114 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7115};
7116AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7117
7118
7119/** Opcode 0x0f 0xae. */
7120FNIEMOP_DEF(iemOp_Grp15)
7121{
7122 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7124 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7125 /* register, register */
7126 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7127 + pVCpu->iem.s.idxPrefix], bRm);
7128 /* memory, register */
7129 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7130 + pVCpu->iem.s.idxPrefix], bRm);
7131}
7132
7133
7134/** Opcode 0x0f 0xaf. */
7135FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7136{
7137 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7138 IEMOP_HLP_MIN_386();
7139 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7140 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7141}
7142
7143
7144/** Opcode 0x0f 0xb0. */
7145FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7146{
7147 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7148 IEMOP_HLP_MIN_486();
7149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7150
7151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7152 {
7153 IEMOP_HLP_DONE_DECODING();
7154 IEM_MC_BEGIN(4, 0);
7155 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7156 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7157 IEM_MC_ARG(uint8_t, u8Src, 2);
7158 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7159
7160 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7161 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7162 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7163 IEM_MC_REF_EFLAGS(pEFlags);
7164 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7165 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7166 else
7167 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7168
7169 IEM_MC_ADVANCE_RIP();
7170 IEM_MC_END();
7171 }
7172 else
7173 {
7174 IEM_MC_BEGIN(4, 3);
7175 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7176 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7177 IEM_MC_ARG(uint8_t, u8Src, 2);
7178 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7180 IEM_MC_LOCAL(uint8_t, u8Al);
7181
7182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7183 IEMOP_HLP_DONE_DECODING();
7184 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7185 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7186 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7187 IEM_MC_FETCH_EFLAGS(EFlags);
7188 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7189 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7190 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7191 else
7192 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7193
7194 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7195 IEM_MC_COMMIT_EFLAGS(EFlags);
7196 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7197 IEM_MC_ADVANCE_RIP();
7198 IEM_MC_END();
7199 }
7200 return VINF_SUCCESS;
7201}
7202
7203/** Opcode 0x0f 0xb1. */
7204FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7205{
7206 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7207 IEMOP_HLP_MIN_486();
7208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7209
7210 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7211 {
7212 IEMOP_HLP_DONE_DECODING();
7213 switch (pVCpu->iem.s.enmEffOpSize)
7214 {
7215 case IEMMODE_16BIT:
7216 IEM_MC_BEGIN(4, 0);
7217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7218 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7219 IEM_MC_ARG(uint16_t, u16Src, 2);
7220 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7221
7222 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7223 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7224 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7225 IEM_MC_REF_EFLAGS(pEFlags);
7226 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7227 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7228 else
7229 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7230
7231 IEM_MC_ADVANCE_RIP();
7232 IEM_MC_END();
7233 return VINF_SUCCESS;
7234
7235 case IEMMODE_32BIT:
7236 IEM_MC_BEGIN(4, 0);
7237 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7238 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7239 IEM_MC_ARG(uint32_t, u32Src, 2);
7240 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7241
7242 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7243 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7244 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7245 IEM_MC_REF_EFLAGS(pEFlags);
7246 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7248 else
7249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7250
7251 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7252 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7253 IEM_MC_ADVANCE_RIP();
7254 IEM_MC_END();
7255 return VINF_SUCCESS;
7256
7257 case IEMMODE_64BIT:
7258 IEM_MC_BEGIN(4, 0);
7259 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7260 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7261#ifdef RT_ARCH_X86
7262 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7263#else
7264 IEM_MC_ARG(uint64_t, u64Src, 2);
7265#endif
7266 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7267
7268 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7269 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7270 IEM_MC_REF_EFLAGS(pEFlags);
7271#ifdef RT_ARCH_X86
7272 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7273 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7275 else
7276 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7277#else
7278 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7279 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7280 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7281 else
7282 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7283#endif
7284
7285 IEM_MC_ADVANCE_RIP();
7286 IEM_MC_END();
7287 return VINF_SUCCESS;
7288
7289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7290 }
7291 }
7292 else
7293 {
7294 switch (pVCpu->iem.s.enmEffOpSize)
7295 {
7296 case IEMMODE_16BIT:
7297 IEM_MC_BEGIN(4, 3);
7298 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7299 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7300 IEM_MC_ARG(uint16_t, u16Src, 2);
7301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7303 IEM_MC_LOCAL(uint16_t, u16Ax);
7304
7305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7306 IEMOP_HLP_DONE_DECODING();
7307 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7308 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7309 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7310 IEM_MC_FETCH_EFLAGS(EFlags);
7311 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7312 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7313 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7314 else
7315 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7316
7317 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7318 IEM_MC_COMMIT_EFLAGS(EFlags);
7319 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7320 IEM_MC_ADVANCE_RIP();
7321 IEM_MC_END();
7322 return VINF_SUCCESS;
7323
7324 case IEMMODE_32BIT:
7325 IEM_MC_BEGIN(4, 3);
7326 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7327 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7328 IEM_MC_ARG(uint32_t, u32Src, 2);
7329 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7331 IEM_MC_LOCAL(uint32_t, u32Eax);
7332
7333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7334 IEMOP_HLP_DONE_DECODING();
7335 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7336 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7337 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7338 IEM_MC_FETCH_EFLAGS(EFlags);
7339 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7340 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7341 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7342 else
7343 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7344
7345 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7346 IEM_MC_COMMIT_EFLAGS(EFlags);
7347 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7348 IEM_MC_ADVANCE_RIP();
7349 IEM_MC_END();
7350 return VINF_SUCCESS;
7351
7352 case IEMMODE_64BIT:
7353 IEM_MC_BEGIN(4, 3);
7354 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7355 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7356#ifdef RT_ARCH_X86
7357 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7358#else
7359 IEM_MC_ARG(uint64_t, u64Src, 2);
7360#endif
7361 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7363 IEM_MC_LOCAL(uint64_t, u64Rax);
7364
7365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7366 IEMOP_HLP_DONE_DECODING();
7367 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7368 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7369 IEM_MC_FETCH_EFLAGS(EFlags);
7370 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7371#ifdef RT_ARCH_X86
7372 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7373 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7374 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7375 else
7376 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7377#else
7378 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7379 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7380 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7381 else
7382 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7383#endif
7384
7385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7386 IEM_MC_COMMIT_EFLAGS(EFlags);
7387 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7388 IEM_MC_ADVANCE_RIP();
7389 IEM_MC_END();
7390 return VINF_SUCCESS;
7391
7392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7393 }
7394 }
7395}
7396
7397
7398FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7399{
7400 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7401 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7402
7403 switch (pVCpu->iem.s.enmEffOpSize)
7404 {
7405 case IEMMODE_16BIT:
7406 IEM_MC_BEGIN(5, 1);
7407 IEM_MC_ARG(uint16_t, uSel, 0);
7408 IEM_MC_ARG(uint16_t, offSeg, 1);
7409 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7410 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7411 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7412 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7415 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7416 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7417 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7418 IEM_MC_END();
7419 return VINF_SUCCESS;
7420
7421 case IEMMODE_32BIT:
7422 IEM_MC_BEGIN(5, 1);
7423 IEM_MC_ARG(uint16_t, uSel, 0);
7424 IEM_MC_ARG(uint32_t, offSeg, 1);
7425 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7426 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7427 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7428 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7431 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7432 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7433 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7434 IEM_MC_END();
7435 return VINF_SUCCESS;
7436
7437 case IEMMODE_64BIT:
7438 IEM_MC_BEGIN(5, 1);
7439 IEM_MC_ARG(uint16_t, uSel, 0);
7440 IEM_MC_ARG(uint64_t, offSeg, 1);
7441 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7442 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7443 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7444 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7447 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7448 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7449 else
7450 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7451 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7452 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7453 IEM_MC_END();
7454 return VINF_SUCCESS;
7455
7456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7457 }
7458}
7459
7460
7461/** Opcode 0x0f 0xb2. */
7462FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7463{
7464 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7465 IEMOP_HLP_MIN_386();
7466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7467 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7468 return IEMOP_RAISE_INVALID_OPCODE();
7469 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7470}
7471
7472
7473/** Opcode 0x0f 0xb3. */
7474FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7475{
7476 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7477 IEMOP_HLP_MIN_386();
7478 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7479}
7480
7481
7482/** Opcode 0x0f 0xb4. */
7483FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7484{
7485 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7486 IEMOP_HLP_MIN_386();
7487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7488 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7489 return IEMOP_RAISE_INVALID_OPCODE();
7490 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7491}
7492
7493
7494/** Opcode 0x0f 0xb5. */
7495FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7496{
7497 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7498 IEMOP_HLP_MIN_386();
7499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7500 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7501 return IEMOP_RAISE_INVALID_OPCODE();
7502 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7503}
7504
7505
7506/** Opcode 0x0f 0xb6. */
7507FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7508{
7509 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7510 IEMOP_HLP_MIN_386();
7511
7512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7513
7514 /*
7515 * If rm is denoting a register, no more instruction bytes.
7516 */
7517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7518 {
7519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7520 switch (pVCpu->iem.s.enmEffOpSize)
7521 {
7522 case IEMMODE_16BIT:
7523 IEM_MC_BEGIN(0, 1);
7524 IEM_MC_LOCAL(uint16_t, u16Value);
7525 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7526 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7527 IEM_MC_ADVANCE_RIP();
7528 IEM_MC_END();
7529 return VINF_SUCCESS;
7530
7531 case IEMMODE_32BIT:
7532 IEM_MC_BEGIN(0, 1);
7533 IEM_MC_LOCAL(uint32_t, u32Value);
7534 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7535 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7536 IEM_MC_ADVANCE_RIP();
7537 IEM_MC_END();
7538 return VINF_SUCCESS;
7539
7540 case IEMMODE_64BIT:
7541 IEM_MC_BEGIN(0, 1);
7542 IEM_MC_LOCAL(uint64_t, u64Value);
7543 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7544 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7545 IEM_MC_ADVANCE_RIP();
7546 IEM_MC_END();
7547 return VINF_SUCCESS;
7548
7549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7550 }
7551 }
7552 else
7553 {
7554 /*
7555 * We're loading a register from memory.
7556 */
7557 switch (pVCpu->iem.s.enmEffOpSize)
7558 {
7559 case IEMMODE_16BIT:
7560 IEM_MC_BEGIN(0, 2);
7561 IEM_MC_LOCAL(uint16_t, u16Value);
7562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7565 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7566 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7567 IEM_MC_ADVANCE_RIP();
7568 IEM_MC_END();
7569 return VINF_SUCCESS;
7570
7571 case IEMMODE_32BIT:
7572 IEM_MC_BEGIN(0, 2);
7573 IEM_MC_LOCAL(uint32_t, u32Value);
7574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7577 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7578 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7579 IEM_MC_ADVANCE_RIP();
7580 IEM_MC_END();
7581 return VINF_SUCCESS;
7582
7583 case IEMMODE_64BIT:
7584 IEM_MC_BEGIN(0, 2);
7585 IEM_MC_LOCAL(uint64_t, u64Value);
7586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7589 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7590 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7591 IEM_MC_ADVANCE_RIP();
7592 IEM_MC_END();
7593 return VINF_SUCCESS;
7594
7595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7596 }
7597 }
7598}
7599
7600
7601/** Opcode 0x0f 0xb7. */
7602FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7603{
7604 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7605 IEMOP_HLP_MIN_386();
7606
7607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7608
7609 /** @todo Not entirely sure how the operand size prefix is handled here,
7610 * assuming that it will be ignored. Would be nice to have a few
7611 * test for this. */
7612 /*
7613 * If rm is denoting a register, no more instruction bytes.
7614 */
7615 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7616 {
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7618 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7619 {
7620 IEM_MC_BEGIN(0, 1);
7621 IEM_MC_LOCAL(uint32_t, u32Value);
7622 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7623 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7624 IEM_MC_ADVANCE_RIP();
7625 IEM_MC_END();
7626 }
7627 else
7628 {
7629 IEM_MC_BEGIN(0, 1);
7630 IEM_MC_LOCAL(uint64_t, u64Value);
7631 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7632 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7633 IEM_MC_ADVANCE_RIP();
7634 IEM_MC_END();
7635 }
7636 }
7637 else
7638 {
7639 /*
7640 * We're loading a register from memory.
7641 */
7642 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7643 {
7644 IEM_MC_BEGIN(0, 2);
7645 IEM_MC_LOCAL(uint32_t, u32Value);
7646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7649 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7650 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7651 IEM_MC_ADVANCE_RIP();
7652 IEM_MC_END();
7653 }
7654 else
7655 {
7656 IEM_MC_BEGIN(0, 2);
7657 IEM_MC_LOCAL(uint64_t, u64Value);
7658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7661 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7662 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7663 IEM_MC_ADVANCE_RIP();
7664 IEM_MC_END();
7665 }
7666 }
7667 return VINF_SUCCESS;
7668}
7669
7670
7671/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7672FNIEMOP_UD_STUB(iemOp_jmpe);
7673/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7674FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7675
7676
7677/**
7678 * @opcode 0xb9
7679 * @opinvalid intel-modrm
7680 * @optest ->
7681 */
7682FNIEMOP_DEF(iemOp_Grp10)
7683{
7684 /*
7685 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7686 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7687 */
7688 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7689 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7690 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7691}
7692
7693
7694/** Opcode 0x0f 0xba. */
7695FNIEMOP_DEF(iemOp_Grp8)
7696{
7697 IEMOP_HLP_MIN_386();
7698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7699 PCIEMOPBINSIZES pImpl;
7700 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7701 {
7702 case 0: case 1: case 2: case 3:
7703 /* Both AMD and Intel want full modr/m decoding and imm8. */
7704 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7705 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7706 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7707 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7708 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7710 }
7711 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7712
7713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7714 {
7715 /* register destination. */
7716 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7718
7719 switch (pVCpu->iem.s.enmEffOpSize)
7720 {
7721 case IEMMODE_16BIT:
7722 IEM_MC_BEGIN(3, 0);
7723 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7724 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7725 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7726
7727 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7728 IEM_MC_REF_EFLAGS(pEFlags);
7729 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7730
7731 IEM_MC_ADVANCE_RIP();
7732 IEM_MC_END();
7733 return VINF_SUCCESS;
7734
7735 case IEMMODE_32BIT:
7736 IEM_MC_BEGIN(3, 0);
7737 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7738 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7739 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7740
7741 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7742 IEM_MC_REF_EFLAGS(pEFlags);
7743 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7744
7745 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7746 IEM_MC_ADVANCE_RIP();
7747 IEM_MC_END();
7748 return VINF_SUCCESS;
7749
7750 case IEMMODE_64BIT:
7751 IEM_MC_BEGIN(3, 0);
7752 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7753 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7754 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7755
7756 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7757 IEM_MC_REF_EFLAGS(pEFlags);
7758 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7759
7760 IEM_MC_ADVANCE_RIP();
7761 IEM_MC_END();
7762 return VINF_SUCCESS;
7763
7764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7765 }
7766 }
7767 else
7768 {
7769 /* memory destination. */
7770
7771 uint32_t fAccess;
7772 if (pImpl->pfnLockedU16)
7773 fAccess = IEM_ACCESS_DATA_RW;
7774 else /* BT */
7775 fAccess = IEM_ACCESS_DATA_R;
7776
7777 /** @todo test negative bit offsets! */
7778 switch (pVCpu->iem.s.enmEffOpSize)
7779 {
7780 case IEMMODE_16BIT:
7781 IEM_MC_BEGIN(3, 1);
7782 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7783 IEM_MC_ARG(uint16_t, u16Src, 1);
7784 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7786
7787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7788 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7789 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7790 if (pImpl->pfnLockedU16)
7791 IEMOP_HLP_DONE_DECODING();
7792 else
7793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7794 IEM_MC_FETCH_EFLAGS(EFlags);
7795 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7796 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7797 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7798 else
7799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7800 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7801
7802 IEM_MC_COMMIT_EFLAGS(EFlags);
7803 IEM_MC_ADVANCE_RIP();
7804 IEM_MC_END();
7805 return VINF_SUCCESS;
7806
7807 case IEMMODE_32BIT:
7808 IEM_MC_BEGIN(3, 1);
7809 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7810 IEM_MC_ARG(uint32_t, u32Src, 1);
7811 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7813
7814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7815 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7816 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7817 if (pImpl->pfnLockedU16)
7818 IEMOP_HLP_DONE_DECODING();
7819 else
7820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7821 IEM_MC_FETCH_EFLAGS(EFlags);
7822 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7823 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7824 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7825 else
7826 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7827 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7828
7829 IEM_MC_COMMIT_EFLAGS(EFlags);
7830 IEM_MC_ADVANCE_RIP();
7831 IEM_MC_END();
7832 return VINF_SUCCESS;
7833
7834 case IEMMODE_64BIT:
7835 IEM_MC_BEGIN(3, 1);
7836 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7837 IEM_MC_ARG(uint64_t, u64Src, 1);
7838 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7840
7841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7842 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7843 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7844 if (pImpl->pfnLockedU16)
7845 IEMOP_HLP_DONE_DECODING();
7846 else
7847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7848 IEM_MC_FETCH_EFLAGS(EFlags);
7849 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7850 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7852 else
7853 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7854 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7855
7856 IEM_MC_COMMIT_EFLAGS(EFlags);
7857 IEM_MC_ADVANCE_RIP();
7858 IEM_MC_END();
7859 return VINF_SUCCESS;
7860
7861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7862 }
7863 }
7864}
7865
7866
7867/** Opcode 0x0f 0xbb. */
7868FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7869{
7870 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7871 IEMOP_HLP_MIN_386();
7872 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7873}
7874
7875
7876/** Opcode 0x0f 0xbc. */
7877FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7878{
7879 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7880 IEMOP_HLP_MIN_386();
7881 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7882 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7883}
7884
7885
7886/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7887FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7888
7889
7890/** Opcode 0x0f 0xbd. */
7891FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7892{
7893 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7894 IEMOP_HLP_MIN_386();
7895 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7896 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7897}
7898
7899
7900/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7901FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7902
7903
7904/** Opcode 0x0f 0xbe. */
7905FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7906{
7907 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7908 IEMOP_HLP_MIN_386();
7909
7910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7911
7912 /*
7913 * If rm is denoting a register, no more instruction bytes.
7914 */
7915 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7916 {
7917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7918 switch (pVCpu->iem.s.enmEffOpSize)
7919 {
7920 case IEMMODE_16BIT:
7921 IEM_MC_BEGIN(0, 1);
7922 IEM_MC_LOCAL(uint16_t, u16Value);
7923 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7924 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7925 IEM_MC_ADVANCE_RIP();
7926 IEM_MC_END();
7927 return VINF_SUCCESS;
7928
7929 case IEMMODE_32BIT:
7930 IEM_MC_BEGIN(0, 1);
7931 IEM_MC_LOCAL(uint32_t, u32Value);
7932 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7933 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7934 IEM_MC_ADVANCE_RIP();
7935 IEM_MC_END();
7936 return VINF_SUCCESS;
7937
7938 case IEMMODE_64BIT:
7939 IEM_MC_BEGIN(0, 1);
7940 IEM_MC_LOCAL(uint64_t, u64Value);
7941 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7942 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7943 IEM_MC_ADVANCE_RIP();
7944 IEM_MC_END();
7945 return VINF_SUCCESS;
7946
7947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7948 }
7949 }
7950 else
7951 {
7952 /*
7953 * We're loading a register from memory.
7954 */
7955 switch (pVCpu->iem.s.enmEffOpSize)
7956 {
7957 case IEMMODE_16BIT:
7958 IEM_MC_BEGIN(0, 2);
7959 IEM_MC_LOCAL(uint16_t, u16Value);
7960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7963 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7964 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7965 IEM_MC_ADVANCE_RIP();
7966 IEM_MC_END();
7967 return VINF_SUCCESS;
7968
7969 case IEMMODE_32BIT:
7970 IEM_MC_BEGIN(0, 2);
7971 IEM_MC_LOCAL(uint32_t, u32Value);
7972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7975 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7976 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7977 IEM_MC_ADVANCE_RIP();
7978 IEM_MC_END();
7979 return VINF_SUCCESS;
7980
7981 case IEMMODE_64BIT:
7982 IEM_MC_BEGIN(0, 2);
7983 IEM_MC_LOCAL(uint64_t, u64Value);
7984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7987 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7988 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7989 IEM_MC_ADVANCE_RIP();
7990 IEM_MC_END();
7991 return VINF_SUCCESS;
7992
7993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7994 }
7995 }
7996}
7997
7998
7999/** Opcode 0x0f 0xbf. */
8000FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8001{
8002 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8003 IEMOP_HLP_MIN_386();
8004
8005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8006
8007 /** @todo Not entirely sure how the operand size prefix is handled here,
8008 * assuming that it will be ignored. Would be nice to have a few
8009 * test for this. */
8010 /*
8011 * If rm is denoting a register, no more instruction bytes.
8012 */
8013 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8014 {
8015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8016 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8017 {
8018 IEM_MC_BEGIN(0, 1);
8019 IEM_MC_LOCAL(uint32_t, u32Value);
8020 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8021 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8022 IEM_MC_ADVANCE_RIP();
8023 IEM_MC_END();
8024 }
8025 else
8026 {
8027 IEM_MC_BEGIN(0, 1);
8028 IEM_MC_LOCAL(uint64_t, u64Value);
8029 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8030 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8031 IEM_MC_ADVANCE_RIP();
8032 IEM_MC_END();
8033 }
8034 }
8035 else
8036 {
8037 /*
8038 * We're loading a register from memory.
8039 */
8040 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8041 {
8042 IEM_MC_BEGIN(0, 2);
8043 IEM_MC_LOCAL(uint32_t, u32Value);
8044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8047 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8048 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8049 IEM_MC_ADVANCE_RIP();
8050 IEM_MC_END();
8051 }
8052 else
8053 {
8054 IEM_MC_BEGIN(0, 2);
8055 IEM_MC_LOCAL(uint64_t, u64Value);
8056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8059 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8060 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8061 IEM_MC_ADVANCE_RIP();
8062 IEM_MC_END();
8063 }
8064 }
8065 return VINF_SUCCESS;
8066}
8067
8068
8069/** Opcode 0x0f 0xc0. */
8070FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8071{
8072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8073 IEMOP_HLP_MIN_486();
8074 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8075
8076 /*
8077 * If rm is denoting a register, no more instruction bytes.
8078 */
8079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8080 {
8081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8082
8083 IEM_MC_BEGIN(3, 0);
8084 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8085 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8086 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8087
8088 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8089 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8090 IEM_MC_REF_EFLAGS(pEFlags);
8091 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8092
8093 IEM_MC_ADVANCE_RIP();
8094 IEM_MC_END();
8095 }
8096 else
8097 {
8098 /*
8099 * We're accessing memory.
8100 */
8101 IEM_MC_BEGIN(3, 3);
8102 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8103 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8104 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8105 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8107
8108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8109 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8110 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8111 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8112 IEM_MC_FETCH_EFLAGS(EFlags);
8113 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8114 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8115 else
8116 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8117
8118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8119 IEM_MC_COMMIT_EFLAGS(EFlags);
8120 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8121 IEM_MC_ADVANCE_RIP();
8122 IEM_MC_END();
8123 return VINF_SUCCESS;
8124 }
8125 return VINF_SUCCESS;
8126}
8127
8128
8129/** Opcode 0x0f 0xc1. */
8130FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8131{
8132 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8133 IEMOP_HLP_MIN_486();
8134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8135
8136 /*
8137 * If rm is denoting a register, no more instruction bytes.
8138 */
8139 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8140 {
8141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8142
8143 switch (pVCpu->iem.s.enmEffOpSize)
8144 {
8145 case IEMMODE_16BIT:
8146 IEM_MC_BEGIN(3, 0);
8147 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8148 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8150
8151 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8152 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8153 IEM_MC_REF_EFLAGS(pEFlags);
8154 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8155
8156 IEM_MC_ADVANCE_RIP();
8157 IEM_MC_END();
8158 return VINF_SUCCESS;
8159
8160 case IEMMODE_32BIT:
8161 IEM_MC_BEGIN(3, 0);
8162 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8163 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8165
8166 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8167 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8168 IEM_MC_REF_EFLAGS(pEFlags);
8169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8170
8171 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8172 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8173 IEM_MC_ADVANCE_RIP();
8174 IEM_MC_END();
8175 return VINF_SUCCESS;
8176
8177 case IEMMODE_64BIT:
8178 IEM_MC_BEGIN(3, 0);
8179 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8180 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8182
8183 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8184 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8185 IEM_MC_REF_EFLAGS(pEFlags);
8186 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8187
8188 IEM_MC_ADVANCE_RIP();
8189 IEM_MC_END();
8190 return VINF_SUCCESS;
8191
8192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8193 }
8194 }
8195 else
8196 {
8197 /*
8198 * We're accessing memory.
8199 */
8200 switch (pVCpu->iem.s.enmEffOpSize)
8201 {
8202 case IEMMODE_16BIT:
8203 IEM_MC_BEGIN(3, 3);
8204 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8205 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8206 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8207 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8209
8210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8211 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8212 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8213 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8214 IEM_MC_FETCH_EFLAGS(EFlags);
8215 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8216 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8217 else
8218 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8219
8220 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8221 IEM_MC_COMMIT_EFLAGS(EFlags);
8222 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8223 IEM_MC_ADVANCE_RIP();
8224 IEM_MC_END();
8225 return VINF_SUCCESS;
8226
8227 case IEMMODE_32BIT:
8228 IEM_MC_BEGIN(3, 3);
8229 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8230 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8231 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8232 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8234
8235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8236 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8237 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8238 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8239 IEM_MC_FETCH_EFLAGS(EFlags);
8240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8241 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8242 else
8243 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8244
8245 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8246 IEM_MC_COMMIT_EFLAGS(EFlags);
8247 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8248 IEM_MC_ADVANCE_RIP();
8249 IEM_MC_END();
8250 return VINF_SUCCESS;
8251
8252 case IEMMODE_64BIT:
8253 IEM_MC_BEGIN(3, 3);
8254 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8255 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8256 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8257 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8259
8260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8261 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8262 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8263 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8264 IEM_MC_FETCH_EFLAGS(EFlags);
8265 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8266 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8267 else
8268 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8269
8270 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8271 IEM_MC_COMMIT_EFLAGS(EFlags);
8272 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8273 IEM_MC_ADVANCE_RIP();
8274 IEM_MC_END();
8275 return VINF_SUCCESS;
8276
8277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8278 }
8279 }
8280}
8281
8282
8283/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8284FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8285/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8286FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8287/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8288FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8289/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8290FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8291
8292
8293/** Opcode 0x0f 0xc3. */
8294FNIEMOP_DEF(iemOp_movnti_My_Gy)
8295{
8296 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8297
8298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8299
8300 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8301 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8302 {
8303 switch (pVCpu->iem.s.enmEffOpSize)
8304 {
8305 case IEMMODE_32BIT:
8306 IEM_MC_BEGIN(0, 2);
8307 IEM_MC_LOCAL(uint32_t, u32Value);
8308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8309
8310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8312 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8313 return IEMOP_RAISE_INVALID_OPCODE();
8314
8315 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8316 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8317 IEM_MC_ADVANCE_RIP();
8318 IEM_MC_END();
8319 break;
8320
8321 case IEMMODE_64BIT:
8322 IEM_MC_BEGIN(0, 2);
8323 IEM_MC_LOCAL(uint64_t, u64Value);
8324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8325
8326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8328 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8329 return IEMOP_RAISE_INVALID_OPCODE();
8330
8331 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8332 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8333 IEM_MC_ADVANCE_RIP();
8334 IEM_MC_END();
8335 break;
8336
8337 case IEMMODE_16BIT:
8338 /** @todo check this form. */
8339 return IEMOP_RAISE_INVALID_OPCODE();
8340 }
8341 }
8342 else
8343 return IEMOP_RAISE_INVALID_OPCODE();
8344 return VINF_SUCCESS;
8345}
8346/* Opcode 0x66 0x0f 0xc3 - invalid */
8347/* Opcode 0xf3 0x0f 0xc3 - invalid */
8348/* Opcode 0xf2 0x0f 0xc3 - invalid */
8349
8350/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8351FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8352/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8353FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8354/* Opcode 0xf3 0x0f 0xc4 - invalid */
8355/* Opcode 0xf2 0x0f 0xc4 - invalid */
8356
8357/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8358FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8359/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8360FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8361/* Opcode 0xf3 0x0f 0xc5 - invalid */
8362/* Opcode 0xf2 0x0f 0xc5 - invalid */
8363
8364/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8365FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8366/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8367FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8368/* Opcode 0xf3 0x0f 0xc6 - invalid */
8369/* Opcode 0xf2 0x0f 0xc6 - invalid */
8370
8371
8372/** Opcode 0x0f 0xc7 !11/1. */
8373FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8374{
8375 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8376
8377 IEM_MC_BEGIN(4, 3);
8378 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8379 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8380 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8381 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8382 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8383 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8385
8386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8387 IEMOP_HLP_DONE_DECODING();
8388 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8389
8390 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8391 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8392 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8393
8394 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8395 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8396 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8397
8398 IEM_MC_FETCH_EFLAGS(EFlags);
8399 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8400 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8401 else
8402 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8403
8404 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8405 IEM_MC_COMMIT_EFLAGS(EFlags);
8406 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8407 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8408 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8409 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8410 IEM_MC_ENDIF();
8411 IEM_MC_ADVANCE_RIP();
8412
8413 IEM_MC_END();
8414 return VINF_SUCCESS;
8415}
8416
8417
8418/** Opcode REX.W 0x0f 0xc7 !11/1. */
8419FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8420{
8421 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8422 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8423 {
8424#if 0
8425 RT_NOREF(bRm);
8426 IEMOP_BITCH_ABOUT_STUB();
8427 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8428#else
8429 IEM_MC_BEGIN(4, 3);
8430 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8431 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8432 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8433 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8434 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8435 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8437
8438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8439 IEMOP_HLP_DONE_DECODING();
8440 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8441 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8442
8443 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8444 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8445 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8446
8447 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8448 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8449 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8450
8451 IEM_MC_FETCH_EFLAGS(EFlags);
8452# ifdef RT_ARCH_AMD64
8453 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8454 {
8455 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8456 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8457 else
8458 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8459 }
8460 else
8461# endif
8462 {
8463 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8464 accesses and not all all atomic, which works fine on in UNI CPU guest
8465 configuration (ignoring DMA). If guest SMP is active we have no choice
8466 but to use a rendezvous callback here. Sigh. */
8467 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8468 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8469 else
8470 {
8471 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8472 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8473 }
8474 }
8475
8476 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8477 IEM_MC_COMMIT_EFLAGS(EFlags);
8478 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8479 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8480 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8481 IEM_MC_ENDIF();
8482 IEM_MC_ADVANCE_RIP();
8483
8484 IEM_MC_END();
8485 return VINF_SUCCESS;
8486#endif
8487 }
8488 Log(("cmpxchg16b -> #UD\n"));
8489 return IEMOP_RAISE_INVALID_OPCODE();
8490}
8491
8492FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8493{
8494 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8495 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8496 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8497}
8498
8499/** Opcode 0x0f 0xc7 11/6. */
8500FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8501
8502/** Opcode 0x0f 0xc7 !11/6. */
8503#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8504FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8505{
8506 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8507 IEMOP_HLP_IN_VMX_OPERATION();
8508 IEMOP_HLP_VMX_INSTR();
8509 IEM_MC_BEGIN(2, 0);
8510 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8511 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8513 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8514 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8515 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
8516 IEM_MC_END();
8517 return VINF_SUCCESS;
8518}
8519#else
8520FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8521#endif
8522
8523/** Opcode 0x66 0x0f 0xc7 !11/6. */
8524#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8525FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8526{
8527 IEMOP_MNEMONIC(vmclear, "vmclear");
8528 IEMOP_HLP_IN_VMX_OPERATION();
8529 IEMOP_HLP_VMX_INSTR();
8530 IEM_MC_BEGIN(2, 0);
8531 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8532 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8534 IEMOP_HLP_DONE_DECODING();
8535 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8536 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
8537 IEM_MC_END();
8538 return VINF_SUCCESS;
8539}
8540#else
8541FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8542#endif
8543
8544/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8545#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8546FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8547{
8548 IEMOP_MNEMONIC(vmxon, "vmxon");
8549 IEMOP_HLP_VMX_INSTR();
8550 IEM_MC_BEGIN(2, 0);
8551 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8552 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8554 IEMOP_HLP_DONE_DECODING();
8555 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8556 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
8557 IEM_MC_END();
8558 return VINF_SUCCESS;
8559}
8560#else
8561FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8562#endif
8563
8564/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8565#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8566FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
8567{
8568 IEMOP_MNEMONIC(vmptrst, "vmptrst");
8569 IEMOP_HLP_IN_VMX_OPERATION();
8570 IEMOP_HLP_VMX_INSTR();
8571 IEM_MC_BEGIN(2, 0);
8572 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8573 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8575 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8576 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8577 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
8578 IEM_MC_END();
8579 return VINF_SUCCESS;
8580}
8581#else
8582FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8583#endif
8584
8585/** Opcode 0x0f 0xc7 11/7. */
8586FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8587
8588
8589/**
8590 * Group 9 jump table for register variant.
8591 */
8592IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8593{ /* pfx: none, 066h, 0f3h, 0f2h */
8594 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8595 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8596 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8597 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8598 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8599 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8600 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8601 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8602};
8603AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8604
8605
8606/**
8607 * Group 9 jump table for memory variant.
8608 */
8609IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8610{ /* pfx: none, 066h, 0f3h, 0f2h */
8611 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8612 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8613 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8614 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8615 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8616 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8617 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8618 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8619};
8620AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8621
8622
8623/** Opcode 0x0f 0xc7. */
8624FNIEMOP_DEF(iemOp_Grp9)
8625{
8626 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
8627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8628 /* register, register */
8629 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8630 + pVCpu->iem.s.idxPrefix], bRm);
8631 /* memory, register */
8632 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8633 + pVCpu->iem.s.idxPrefix], bRm);
8634}
8635
8636
8637/**
8638 * Common 'bswap register' helper.
8639 */
8640FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8641{
8642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8643 switch (pVCpu->iem.s.enmEffOpSize)
8644 {
8645 case IEMMODE_16BIT:
8646 IEM_MC_BEGIN(1, 0);
8647 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8648 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8649 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8650 IEM_MC_ADVANCE_RIP();
8651 IEM_MC_END();
8652 return VINF_SUCCESS;
8653
8654 case IEMMODE_32BIT:
8655 IEM_MC_BEGIN(1, 0);
8656 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8657 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8658 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8659 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8660 IEM_MC_ADVANCE_RIP();
8661 IEM_MC_END();
8662 return VINF_SUCCESS;
8663
8664 case IEMMODE_64BIT:
8665 IEM_MC_BEGIN(1, 0);
8666 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8667 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8668 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8669 IEM_MC_ADVANCE_RIP();
8670 IEM_MC_END();
8671 return VINF_SUCCESS;
8672
8673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8674 }
8675}
8676
8677
8678/** Opcode 0x0f 0xc8. */
8679FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8680{
8681 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8682 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8683 prefix. REX.B is the correct prefix it appears. For a parallel
8684 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8685 IEMOP_HLP_MIN_486();
8686 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8687}
8688
8689
8690/** Opcode 0x0f 0xc9. */
8691FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8692{
8693 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8694 IEMOP_HLP_MIN_486();
8695 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8696}
8697
8698
8699/** Opcode 0x0f 0xca. */
8700FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8701{
8702 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8703 IEMOP_HLP_MIN_486();
8704 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8705}
8706
8707
8708/** Opcode 0x0f 0xcb. */
8709FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8710{
8711 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8712 IEMOP_HLP_MIN_486();
8713 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8714}
8715
8716
8717/** Opcode 0x0f 0xcc. */
8718FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8719{
8720 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8721 IEMOP_HLP_MIN_486();
8722 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8723}
8724
8725
8726/** Opcode 0x0f 0xcd. */
8727FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8728{
8729 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8730 IEMOP_HLP_MIN_486();
8731 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8732}
8733
8734
8735/** Opcode 0x0f 0xce. */
8736FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8737{
8738 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8739 IEMOP_HLP_MIN_486();
8740 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8741}
8742
8743
8744/** Opcode 0x0f 0xcf. */
8745FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8746{
8747 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8748 IEMOP_HLP_MIN_486();
8749 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8750}
8751
8752
8753/* Opcode 0x0f 0xd0 - invalid */
8754/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8755FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8756/* Opcode 0xf3 0x0f 0xd0 - invalid */
8757/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8758FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8759
8760/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8761FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8762/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8763FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8764/* Opcode 0xf3 0x0f 0xd1 - invalid */
8765/* Opcode 0xf2 0x0f 0xd1 - invalid */
8766
8767/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8768FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8769/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8770FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8771/* Opcode 0xf3 0x0f 0xd2 - invalid */
8772/* Opcode 0xf2 0x0f 0xd2 - invalid */
8773
8774/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8775FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8776/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8777FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8778/* Opcode 0xf3 0x0f 0xd3 - invalid */
8779/* Opcode 0xf2 0x0f 0xd3 - invalid */
8780
8781/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8782FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8783/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8784FNIEMOP_STUB(iemOp_paddq_Vx_W);
8785/* Opcode 0xf3 0x0f 0xd4 - invalid */
8786/* Opcode 0xf2 0x0f 0xd4 - invalid */
8787
8788/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8789FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8790/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8791FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8792/* Opcode 0xf3 0x0f 0xd5 - invalid */
8793/* Opcode 0xf2 0x0f 0xd5 - invalid */
8794
8795/* Opcode 0x0f 0xd6 - invalid */
8796
8797/**
8798 * @opcode 0xd6
8799 * @oppfx 0x66
8800 * @opcpuid sse2
8801 * @opgroup og_sse2_pcksclr_datamove
8802 * @opxcpttype none
8803 * @optest op1=-1 op2=2 -> op1=2
8804 * @optest op1=0 op2=-42 -> op1=-42
8805 */
8806FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8807{
8808 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8811 {
8812 /*
8813 * Register, register.
8814 */
8815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8816 IEM_MC_BEGIN(0, 2);
8817 IEM_MC_LOCAL(uint64_t, uSrc);
8818
8819 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8820 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8821
8822 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8823 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8824
8825 IEM_MC_ADVANCE_RIP();
8826 IEM_MC_END();
8827 }
8828 else
8829 {
8830 /*
8831 * Memory, register.
8832 */
8833 IEM_MC_BEGIN(0, 2);
8834 IEM_MC_LOCAL(uint64_t, uSrc);
8835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8836
8837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8839 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8840 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8841
8842 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8843 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8844
8845 IEM_MC_ADVANCE_RIP();
8846 IEM_MC_END();
8847 }
8848 return VINF_SUCCESS;
8849}
8850
8851
8852/**
8853 * @opcode 0xd6
8854 * @opcodesub 11 mr/reg
8855 * @oppfx f3
8856 * @opcpuid sse2
8857 * @opgroup og_sse2_simdint_datamove
8858 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8859 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8860 */
8861FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8862{
8863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8865 {
8866 /*
8867 * Register, register.
8868 */
8869 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8871 IEM_MC_BEGIN(0, 1);
8872 IEM_MC_LOCAL(uint64_t, uSrc);
8873
8874 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8875 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8876
8877 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8878 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8879 IEM_MC_FPU_TO_MMX_MODE();
8880
8881 IEM_MC_ADVANCE_RIP();
8882 IEM_MC_END();
8883 return VINF_SUCCESS;
8884 }
8885
8886 /**
8887 * @opdone
8888 * @opmnemonic udf30fd6mem
8889 * @opcode 0xd6
8890 * @opcodesub !11 mr/reg
8891 * @oppfx f3
8892 * @opunused intel-modrm
8893 * @opcpuid sse
8894 * @optest ->
8895 */
8896 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8897}
8898
8899
8900/**
8901 * @opcode 0xd6
8902 * @opcodesub 11 mr/reg
8903 * @oppfx f2
8904 * @opcpuid sse2
8905 * @opgroup og_sse2_simdint_datamove
8906 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8907 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8908 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8909 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8910 * @optest op1=-42 op2=0xfedcba9876543210
8911 * -> op1=0xfedcba9876543210 ftw=0xff
8912 */
8913FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8914{
8915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8917 {
8918 /*
8919 * Register, register.
8920 */
8921 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8923 IEM_MC_BEGIN(0, 1);
8924 IEM_MC_LOCAL(uint64_t, uSrc);
8925
8926 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8927 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8928
8929 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8930 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8931 IEM_MC_FPU_TO_MMX_MODE();
8932
8933 IEM_MC_ADVANCE_RIP();
8934 IEM_MC_END();
8935 return VINF_SUCCESS;
8936 }
8937
8938 /**
8939 * @opdone
8940 * @opmnemonic udf20fd6mem
8941 * @opcode 0xd6
8942 * @opcodesub !11 mr/reg
8943 * @oppfx f2
8944 * @opunused intel-modrm
8945 * @opcpuid sse
8946 * @optest ->
8947 */
8948 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8949}
8950
8951/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8952FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8953{
8954 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8955 /** @todo testcase: Check that the instruction implicitly clears the high
8956 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8957 * and opcode modifications are made to work with the whole width (not
8958 * just 128). */
8959 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8960 /* Docs says register only. */
8961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8963 {
8964 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8965 IEM_MC_BEGIN(2, 0);
8966 IEM_MC_ARG(uint64_t *, pDst, 0);
8967 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8968 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8969 IEM_MC_PREPARE_FPU_USAGE();
8970 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8971 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8972 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8973 IEM_MC_ADVANCE_RIP();
8974 IEM_MC_END();
8975 return VINF_SUCCESS;
8976 }
8977 return IEMOP_RAISE_INVALID_OPCODE();
8978}
8979
8980/** Opcode 0x66 0x0f 0xd7 - */
8981FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8982{
8983 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8984 /** @todo testcase: Check that the instruction implicitly clears the high
8985 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8986 * and opcode modifications are made to work with the whole width (not
8987 * just 128). */
8988 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8989 /* Docs says register only. */
8990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8992 {
8993 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8994 IEM_MC_BEGIN(2, 0);
8995 IEM_MC_ARG(uint64_t *, pDst, 0);
8996 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8997 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8998 IEM_MC_PREPARE_SSE_USAGE();
8999 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9000 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9001 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
9002 IEM_MC_ADVANCE_RIP();
9003 IEM_MC_END();
9004 return VINF_SUCCESS;
9005 }
9006 return IEMOP_RAISE_INVALID_OPCODE();
9007}
9008
9009/* Opcode 0xf3 0x0f 0xd7 - invalid */
9010/* Opcode 0xf2 0x0f 0xd7 - invalid */
9011
9012
9013/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9014FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
9015/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
9016FNIEMOP_STUB(iemOp_psubusb_Vx_W);
9017/* Opcode 0xf3 0x0f 0xd8 - invalid */
9018/* Opcode 0xf2 0x0f 0xd8 - invalid */
9019
9020/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9021FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
9022/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9023FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
9024/* Opcode 0xf3 0x0f 0xd9 - invalid */
9025/* Opcode 0xf2 0x0f 0xd9 - invalid */
9026
9027/** Opcode 0x0f 0xda - pminub Pq, Qq */
9028FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
9029/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9030FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9031/* Opcode 0xf3 0x0f 0xda - invalid */
9032/* Opcode 0xf2 0x0f 0xda - invalid */
9033
9034/** Opcode 0x0f 0xdb - pand Pq, Qq */
9035FNIEMOP_STUB(iemOp_pand_Pq_Qq);
9036/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
9037FNIEMOP_STUB(iemOp_pand_Vx_W);
9038/* Opcode 0xf3 0x0f 0xdb - invalid */
9039/* Opcode 0xf2 0x0f 0xdb - invalid */
9040
9041/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9042FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9043/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9044FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9045/* Opcode 0xf3 0x0f 0xdc - invalid */
9046/* Opcode 0xf2 0x0f 0xdc - invalid */
9047
9048/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9049FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9050/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9051FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9052/* Opcode 0xf3 0x0f 0xdd - invalid */
9053/* Opcode 0xf2 0x0f 0xdd - invalid */
9054
9055/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9056FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9057/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9058FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9059/* Opcode 0xf3 0x0f 0xde - invalid */
9060/* Opcode 0xf2 0x0f 0xde - invalid */
9061
9062/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9063FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
9064/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9065FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
9066/* Opcode 0xf3 0x0f 0xdf - invalid */
9067/* Opcode 0xf2 0x0f 0xdf - invalid */
9068
9069/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9070FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9071/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9072FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9073/* Opcode 0xf3 0x0f 0xe0 - invalid */
9074/* Opcode 0xf2 0x0f 0xe0 - invalid */
9075
9076/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9077FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9078/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9079FNIEMOP_STUB(iemOp_psraw_Vx_W);
9080/* Opcode 0xf3 0x0f 0xe1 - invalid */
9081/* Opcode 0xf2 0x0f 0xe1 - invalid */
9082
9083/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9084FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9085/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9086FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9087/* Opcode 0xf3 0x0f 0xe2 - invalid */
9088/* Opcode 0xf2 0x0f 0xe2 - invalid */
9089
9090/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9091FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9092/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9093FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9094/* Opcode 0xf3 0x0f 0xe3 - invalid */
9095/* Opcode 0xf2 0x0f 0xe3 - invalid */
9096
9097/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9098FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9099/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9100FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9101/* Opcode 0xf3 0x0f 0xe4 - invalid */
9102/* Opcode 0xf2 0x0f 0xe4 - invalid */
9103
9104/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9105FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9106/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9107FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9108/* Opcode 0xf3 0x0f 0xe5 - invalid */
9109/* Opcode 0xf2 0x0f 0xe5 - invalid */
9110
9111/* Opcode 0x0f 0xe6 - invalid */
9112/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9113FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9114/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9115FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9116/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9117FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9118
9119
9120/**
9121 * @opcode 0xe7
9122 * @opcodesub !11 mr/reg
9123 * @oppfx none
9124 * @opcpuid sse
9125 * @opgroup og_sse1_cachect
9126 * @opxcpttype none
9127 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9128 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9129 */
9130FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9131{
9132 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9134 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9135 {
9136 /* Register, memory. */
9137 IEM_MC_BEGIN(0, 2);
9138 IEM_MC_LOCAL(uint64_t, uSrc);
9139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9140
9141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9144 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9145
9146 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9147 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9148 IEM_MC_FPU_TO_MMX_MODE();
9149
9150 IEM_MC_ADVANCE_RIP();
9151 IEM_MC_END();
9152 return VINF_SUCCESS;
9153 }
9154 /**
9155 * @opdone
9156 * @opmnemonic ud0fe7reg
9157 * @opcode 0xe7
9158 * @opcodesub 11 mr/reg
9159 * @oppfx none
9160 * @opunused immediate
9161 * @opcpuid sse
9162 * @optest ->
9163 */
9164 return IEMOP_RAISE_INVALID_OPCODE();
9165}
9166
9167/**
9168 * @opcode 0xe7
9169 * @opcodesub !11 mr/reg
9170 * @oppfx 0x66
9171 * @opcpuid sse2
9172 * @opgroup og_sse2_cachect
9173 * @opxcpttype 1
9174 * @optest op1=-1 op2=2 -> op1=2
9175 * @optest op1=0 op2=-42 -> op1=-42
9176 */
9177FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9178{
9179 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9181 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9182 {
9183 /* Register, memory. */
9184 IEM_MC_BEGIN(0, 2);
9185 IEM_MC_LOCAL(RTUINT128U, uSrc);
9186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9187
9188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9192
9193 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9194 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9195
9196 IEM_MC_ADVANCE_RIP();
9197 IEM_MC_END();
9198 return VINF_SUCCESS;
9199 }
9200
9201 /**
9202 * @opdone
9203 * @opmnemonic ud660fe7reg
9204 * @opcode 0xe7
9205 * @opcodesub 11 mr/reg
9206 * @oppfx 0x66
9207 * @opunused immediate
9208 * @opcpuid sse
9209 * @optest ->
9210 */
9211 return IEMOP_RAISE_INVALID_OPCODE();
9212}
9213
9214/* Opcode 0xf3 0x0f 0xe7 - invalid */
9215/* Opcode 0xf2 0x0f 0xe7 - invalid */
9216
9217
9218/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9219FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9220/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9221FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9222/* Opcode 0xf3 0x0f 0xe8 - invalid */
9223/* Opcode 0xf2 0x0f 0xe8 - invalid */
9224
9225/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9226FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9227/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9228FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9229/* Opcode 0xf3 0x0f 0xe9 - invalid */
9230/* Opcode 0xf2 0x0f 0xe9 - invalid */
9231
9232/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9233FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9234/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9235FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9236/* Opcode 0xf3 0x0f 0xea - invalid */
9237/* Opcode 0xf2 0x0f 0xea - invalid */
9238
9239/** Opcode 0x0f 0xeb - por Pq, Qq */
9240FNIEMOP_STUB(iemOp_por_Pq_Qq);
9241/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9242FNIEMOP_STUB(iemOp_por_Vx_W);
9243/* Opcode 0xf3 0x0f 0xeb - invalid */
9244/* Opcode 0xf2 0x0f 0xeb - invalid */
9245
9246/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9247FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9248/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9249FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9250/* Opcode 0xf3 0x0f 0xec - invalid */
9251/* Opcode 0xf2 0x0f 0xec - invalid */
9252
9253/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9254FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9255/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9256FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9257/* Opcode 0xf3 0x0f 0xed - invalid */
9258/* Opcode 0xf2 0x0f 0xed - invalid */
9259
9260/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9261FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9262/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9263FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9264/* Opcode 0xf3 0x0f 0xee - invalid */
9265/* Opcode 0xf2 0x0f 0xee - invalid */
9266
9267
9268/** Opcode 0x0f 0xef - pxor Pq, Qq */
9269FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9270{
9271 IEMOP_MNEMONIC(pxor, "pxor");
9272 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9273}
9274
9275/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9276FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9277{
9278 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9279 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9280}
9281
9282/* Opcode 0xf3 0x0f 0xef - invalid */
9283/* Opcode 0xf2 0x0f 0xef - invalid */
9284
9285/* Opcode 0x0f 0xf0 - invalid */
9286/* Opcode 0x66 0x0f 0xf0 - invalid */
9287/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9288FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9289
9290/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9291FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9292/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9293FNIEMOP_STUB(iemOp_psllw_Vx_W);
9294/* Opcode 0xf2 0x0f 0xf1 - invalid */
9295
9296/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9297FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9298/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9299FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9300/* Opcode 0xf2 0x0f 0xf2 - invalid */
9301
9302/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9303FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9304/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9305FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9306/* Opcode 0xf2 0x0f 0xf3 - invalid */
9307
9308/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9309FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9310/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9311FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9312/* Opcode 0xf2 0x0f 0xf4 - invalid */
9313
9314/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9315FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9316/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9317FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9318/* Opcode 0xf2 0x0f 0xf5 - invalid */
9319
9320/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9321FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9322/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9323FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9324/* Opcode 0xf2 0x0f 0xf6 - invalid */
9325
9326/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9327FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9328/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9329FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9330/* Opcode 0xf2 0x0f 0xf7 - invalid */
9331
9332/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9333FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9334/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9335FNIEMOP_STUB(iemOp_psubb_Vx_W);
9336/* Opcode 0xf2 0x0f 0xf8 - invalid */
9337
9338/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9339FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9340/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9341FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9342/* Opcode 0xf2 0x0f 0xf9 - invalid */
9343
9344/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9345FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9346/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9347FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9348/* Opcode 0xf2 0x0f 0xfa - invalid */
9349
9350/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9351FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9352/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9353FNIEMOP_STUB(iemOp_psubq_Vx_W);
9354/* Opcode 0xf2 0x0f 0xfb - invalid */
9355
9356/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9357FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9358/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9359FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9360/* Opcode 0xf2 0x0f 0xfc - invalid */
9361
9362/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9363FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9364/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9365FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9366/* Opcode 0xf2 0x0f 0xfd - invalid */
9367
9368/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9369FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9370/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9371FNIEMOP_STUB(iemOp_paddd_Vx_W);
9372/* Opcode 0xf2 0x0f 0xfe - invalid */
9373
9374
9375/** Opcode **** 0x0f 0xff - UD0 */
9376FNIEMOP_DEF(iemOp_ud0)
9377{
9378 IEMOP_MNEMONIC(ud0, "ud0");
9379 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9380 {
9381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9382#ifndef TST_IEM_CHECK_MC
9383 RTGCPTR GCPtrEff;
9384 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9385 if (rcStrict != VINF_SUCCESS)
9386 return rcStrict;
9387#endif
9388 IEMOP_HLP_DONE_DECODING();
9389 }
9390 return IEMOP_RAISE_INVALID_OPCODE();
9391}
9392
9393
9394
9395/**
9396 * Two byte opcode map, first byte 0x0f.
9397 *
9398 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9399 * check if it needs updating as well when making changes.
9400 */
9401IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9402{
9403 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9404 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9405 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9406 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9407 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9408 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9409 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9410 /* 0x06 */ IEMOP_X4(iemOp_clts),
9411 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9412 /* 0x08 */ IEMOP_X4(iemOp_invd),
9413 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9414 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9415 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9416 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9417 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9418 /* 0x0e */ IEMOP_X4(iemOp_femms),
9419 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9420
9421 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9422 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9423 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9424 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9425 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9426 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9427 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9428 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9429 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9430 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9431 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9432 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9433 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9434 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9435 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9436 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9437
9438 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9439 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9440 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9441 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9442 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9443 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9444 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9445 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9446 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9447 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9448 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9449 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9450 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9451 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9452 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9453 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9454
9455 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9456 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9457 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9458 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9459 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9460 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9461 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9462 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9463 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9464 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9465 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9466 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9467 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9468 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9469 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9470 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9471
9472 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9473 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9474 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9475 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9476 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9477 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9478 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9479 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9480 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9481 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9482 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9483 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9484 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9485 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9486 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9487 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9488
9489 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9490 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9491 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9492 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9493 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9494 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9495 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9496 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9497 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9498 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9499 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9500 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9501 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9502 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9503 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9504 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9505
9506 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9507 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9508 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9509 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9510 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9511 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9512 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9513 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9514 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9515 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9516 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9517 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9518 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9519 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9520 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9521 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9522
9523 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9524 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9525 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9526 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9527 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9528 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9529 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9530 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9531
9532 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9533 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9534 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9535 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9536 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9537 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9538 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9539 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9540
9541 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9542 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9543 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9544 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9545 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9546 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9547 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9548 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9549 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9550 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9551 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9552 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9553 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9554 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9555 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9556 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9557
9558 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9559 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9560 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9561 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9562 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9563 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9564 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9565 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9566 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9567 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9568 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9569 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9570 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9571 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9572 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9573 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9574
9575 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9576 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9577 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9578 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9579 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9580 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9581 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9582 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9583 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9584 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9585 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9586 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9587 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9588 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9589 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9590 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9591
9592 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9593 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9594 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9595 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9596 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9597 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9598 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9599 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9600 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9601 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9602 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9603 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9604 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9605 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9606 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9607 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9608
9609 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9610 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9611 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9612 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9613 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9614 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9615 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9616 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9617 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9618 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9619 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9620 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9621 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9622 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9623 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9624 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9625
9626 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9627 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9628 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9629 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9630 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9631 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9632 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9633 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9634 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9635 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9636 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9637 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9638 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9639 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9640 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9641 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9642
9643 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9644 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9645 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9646 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9647 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9648 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9649 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9650 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9651 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9652 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9653 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9654 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9655 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9656 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9657 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9658 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9659
9660 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9661 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9662 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9663 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9664 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9665 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9666 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9667 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9668 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9669 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9670 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9671 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9672 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9673 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9674 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9675 /* 0xff */ IEMOP_X4(iemOp_ud0),
9676};
9677AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9678
9679/** @} */
9680
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette