VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 73983

最後變更 在這個檔案從73983是 73983,由 vboxsync 提交於 7 年 前

VMM/IEM, HM: Nested VMX: bugref:9180 Implement VMREAD, added using decoded IEM APIs for
VMXON, VMREAD, VMWRITE in VMX R0 code.

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 341.3 KB
 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 73983 2018-08-31 08:17:31Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.alldomusa.eu.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
38 }
39
40 /* Ignore operand size here, memory refs are always 16-bit. */
41 IEM_MC_BEGIN(2, 0);
42 IEM_MC_ARG(uint16_t, iEffSeg, 0);
43 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
44 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
45 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
46 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
47 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
48 IEM_MC_END();
49 return VINF_SUCCESS;
50}
51
52
53/** Opcode 0x0f 0x00 /1. */
54FNIEMOPRM_DEF(iemOp_Grp6_str)
55{
56 IEMOP_MNEMONIC(str, "str Rv/Mw");
57 IEMOP_HLP_MIN_286();
58 IEMOP_HLP_NO_REAL_OR_V86_MODE();
59
60
61 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
62 {
63 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
64 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
65 }
66
67 /* Ignore operand size here, memory refs are always 16-bit. */
68 IEM_MC_BEGIN(2, 0);
69 IEM_MC_ARG(uint16_t, iEffSeg, 0);
70 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
71 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
72 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
73 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
74 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
75 IEM_MC_END();
76 return VINF_SUCCESS;
77}
78
79
80/** Opcode 0x0f 0x00 /2. */
81FNIEMOPRM_DEF(iemOp_Grp6_lldt)
82{
83 IEMOP_MNEMONIC(lldt, "lldt Ew");
84 IEMOP_HLP_MIN_286();
85 IEMOP_HLP_NO_REAL_OR_V86_MODE();
86
87 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
88 {
89 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
90 IEM_MC_BEGIN(1, 0);
91 IEM_MC_ARG(uint16_t, u16Sel, 0);
92 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
93 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
94 IEM_MC_END();
95 }
96 else
97 {
98 IEM_MC_BEGIN(1, 1);
99 IEM_MC_ARG(uint16_t, u16Sel, 0);
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
102 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
103 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
104 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
105 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
106 IEM_MC_END();
107 }
108 return VINF_SUCCESS;
109}
110
111
112/** Opcode 0x0f 0x00 /3. */
113FNIEMOPRM_DEF(iemOp_Grp6_ltr)
114{
115 IEMOP_MNEMONIC(ltr, "ltr Ew");
116 IEMOP_HLP_MIN_286();
117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
118
119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
120 {
121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
122 IEM_MC_BEGIN(1, 0);
123 IEM_MC_ARG(uint16_t, u16Sel, 0);
124 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
125 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
126 IEM_MC_END();
127 }
128 else
129 {
130 IEM_MC_BEGIN(1, 1);
131 IEM_MC_ARG(uint16_t, u16Sel, 0);
132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
138 IEM_MC_END();
139 }
140 return VINF_SUCCESS;
141}
142
143
144/** Opcode 0x0f 0x00 /3. */
145FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
146{
147 IEMOP_HLP_MIN_286();
148 IEMOP_HLP_NO_REAL_OR_V86_MODE();
149
150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
151 {
152 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
153 IEM_MC_BEGIN(2, 0);
154 IEM_MC_ARG(uint16_t, u16Sel, 0);
155 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
156 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
158 IEM_MC_END();
159 }
160 else
161 {
162 IEM_MC_BEGIN(2, 1);
163 IEM_MC_ARG(uint16_t, u16Sel, 0);
164 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
167 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
169 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
170 IEM_MC_END();
171 }
172 return VINF_SUCCESS;
173}
174
175
176/** Opcode 0x0f 0x00 /4. */
177FNIEMOPRM_DEF(iemOp_Grp6_verr)
178{
179 IEMOP_MNEMONIC(verr, "verr Ew");
180 IEMOP_HLP_MIN_286();
181 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
182}
183
184
185/** Opcode 0x0f 0x00 /5. */
186FNIEMOPRM_DEF(iemOp_Grp6_verw)
187{
188 IEMOP_MNEMONIC(verw, "verw Ew");
189 IEMOP_HLP_MIN_286();
190 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
191}
192
193
194/**
195 * Group 6 jump table.
196 */
197IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
198{
199 iemOp_Grp6_sldt,
200 iemOp_Grp6_str,
201 iemOp_Grp6_lldt,
202 iemOp_Grp6_ltr,
203 iemOp_Grp6_verr,
204 iemOp_Grp6_verw,
205 iemOp_InvalidWithRM,
206 iemOp_InvalidWithRM
207};
208
209/** Opcode 0x0f 0x00. */
210FNIEMOP_DEF(iemOp_Grp6)
211{
212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
213 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
214}
215
216
217/** Opcode 0x0f 0x01 /0. */
218FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
219{
220 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_64BIT_OP_SIZE();
223 IEM_MC_BEGIN(2, 1);
224 IEM_MC_ARG(uint8_t, iEffSeg, 0);
225 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
229 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
230 IEM_MC_END();
231 return VINF_SUCCESS;
232}
233
234
235/** Opcode 0x0f 0x01 /0. */
236FNIEMOP_DEF(iemOp_Grp7_vmcall)
237{
238 IEMOP_MNEMONIC(vmcall, "vmcall");
239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
240
241 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
242 want all hypercalls regardless of instruction used, and if a
243 hypercall isn't handled by GIM or HMSvm will raise an #UD.
244 (NEM/win makes ASSUMPTIONS about this behavior.) */
245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
246}
247
248
249/** Opcode 0x0f 0x01 /0. */
250FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
251{
252 IEMOP_BITCH_ABOUT_STUB();
253 return IEMOP_RAISE_INVALID_OPCODE();
254}
255
256
257/** Opcode 0x0f 0x01 /0. */
258FNIEMOP_DEF(iemOp_Grp7_vmresume)
259{
260 IEMOP_BITCH_ABOUT_STUB();
261 return IEMOP_RAISE_INVALID_OPCODE();
262}
263
264
265/** Opcode 0x0f 0x01 /0. */
266#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
267FNIEMOP_DEF(iemOp_Grp7_vmxoff)
268{
269 IEMOP_MNEMONIC(vmxoff, "vmxoff");
270 IEMOP_HLP_DONE_DECODING();
271 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
272}
273#else
274FNIEMOP_DEF(iemOp_Grp7_vmxoff)
275{
276 IEMOP_BITCH_ABOUT_STUB();
277 return IEMOP_RAISE_INVALID_OPCODE();
278}
279#endif
280
281
282/** Opcode 0x0f 0x01 /1. */
283FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
284{
285 IEMOP_MNEMONIC(sidt, "sidt Ms");
286 IEMOP_HLP_MIN_286();
287 IEMOP_HLP_64BIT_OP_SIZE();
288 IEM_MC_BEGIN(2, 1);
289 IEM_MC_ARG(uint8_t, iEffSeg, 0);
290 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
293 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
294 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
295 IEM_MC_END();
296 return VINF_SUCCESS;
297}
298
299
300/** Opcode 0x0f 0x01 /1. */
301FNIEMOP_DEF(iemOp_Grp7_monitor)
302{
303 IEMOP_MNEMONIC(monitor, "monitor");
304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
305 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
306}
307
308
309/** Opcode 0x0f 0x01 /1. */
310FNIEMOP_DEF(iemOp_Grp7_mwait)
311{
312 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
315}
316
317
318/** Opcode 0x0f 0x01 /2. */
319FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
320{
321 IEMOP_MNEMONIC(lgdt, "lgdt");
322 IEMOP_HLP_64BIT_OP_SIZE();
323 IEM_MC_BEGIN(3, 1);
324 IEM_MC_ARG(uint8_t, iEffSeg, 0);
325 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
326 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
329 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
330 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
331 IEM_MC_END();
332 return VINF_SUCCESS;
333}
334
335
336/** Opcode 0x0f 0x01 0xd0. */
337FNIEMOP_DEF(iemOp_Grp7_xgetbv)
338{
339 IEMOP_MNEMONIC(xgetbv, "xgetbv");
340 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
341 {
342 /** @todo r=ramshankar: We should use
343 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
344 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
345 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
346 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
347 }
348 return IEMOP_RAISE_INVALID_OPCODE();
349}
350
351
352/** Opcode 0x0f 0x01 0xd1. */
353FNIEMOP_DEF(iemOp_Grp7_xsetbv)
354{
355 IEMOP_MNEMONIC(xsetbv, "xsetbv");
356 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
357 {
358 /** @todo r=ramshankar: We should use
359 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
360 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
361 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
362 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
363 }
364 return IEMOP_RAISE_INVALID_OPCODE();
365}
366
367
368/** Opcode 0x0f 0x01 /3. */
369FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
370{
371 IEMOP_MNEMONIC(lidt, "lidt");
372 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
373 ? IEMMODE_64BIT
374 : pVCpu->iem.s.enmEffOpSize;
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd8. */
389#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
390FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
391{
392 IEMOP_MNEMONIC(vmrun, "vmrun");
393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
394 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
395}
396#else
397FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
398#endif
399
400/** Opcode 0x0f 0x01 0xd9. */
401FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
402{
403 IEMOP_MNEMONIC(vmmcall, "vmmcall");
404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
405
406 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
407 want all hypercalls regardless of instruction used, and if a
408 hypercall isn't handled by GIM or HMSvm will raise an #UD.
409 (NEM/win makes ASSUMPTIONS about this behavior.) */
410 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
411}
412
413/** Opcode 0x0f 0x01 0xda. */
414#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
415FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
416{
417 IEMOP_MNEMONIC(vmload, "vmload");
418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
419 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
420}
421#else
422FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
423#endif
424
425
426/** Opcode 0x0f 0x01 0xdb. */
427#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
428FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
429{
430 IEMOP_MNEMONIC(vmsave, "vmsave");
431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
432 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
433}
434#else
435FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
436#endif
437
438
439/** Opcode 0x0f 0x01 0xdc. */
440#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
441FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
442{
443 IEMOP_MNEMONIC(stgi, "stgi");
444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
445 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
446}
447#else
448FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
449#endif
450
451
452/** Opcode 0x0f 0x01 0xdd. */
453#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
454FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
455{
456 IEMOP_MNEMONIC(clgi, "clgi");
457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
459}
460#else
461FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
462#endif
463
464
465/** Opcode 0x0f 0x01 0xdf. */
466#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
467FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
468{
469 IEMOP_MNEMONIC(invlpga, "invlpga");
470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
471 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
472}
473#else
474FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
475#endif
476
477
478/** Opcode 0x0f 0x01 0xde. */
479#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
480FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
481{
482 IEMOP_MNEMONIC(skinit, "skinit");
483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
484 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
485}
486#else
487FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
488#endif
489
490
491/** Opcode 0x0f 0x01 /4. */
492FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
493{
494 IEMOP_MNEMONIC(smsw, "smsw");
495 IEMOP_HLP_MIN_286();
496 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
497 {
498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
499 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, pVCpu->iem.s.enmEffOpSize);
500 }
501
502 /* Ignore operand size here, memory refs are always 16-bit. */
503 IEM_MC_BEGIN(2, 0);
504 IEM_MC_ARG(uint16_t, iEffSeg, 0);
505 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
508 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
509 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
510 IEM_MC_END();
511 return VINF_SUCCESS;
512}
513
514
515/** Opcode 0x0f 0x01 /6. */
516FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
517{
518 /* The operand size is effectively ignored, all is 16-bit and only the
519 lower 3-bits are used. */
520 IEMOP_MNEMONIC(lmsw, "lmsw");
521 IEMOP_HLP_MIN_286();
522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
523 {
524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
525 IEM_MC_BEGIN(1, 0);
526 IEM_MC_ARG(uint16_t, u16Tmp, 0);
527 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
528 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
529 IEM_MC_END();
530 }
531 else
532 {
533 IEM_MC_BEGIN(1, 1);
534 IEM_MC_ARG(uint16_t, u16Tmp, 0);
535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
538 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
539 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
540 IEM_MC_END();
541 }
542 return VINF_SUCCESS;
543}
544
545
546/** Opcode 0x0f 0x01 /7. */
547FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
548{
549 IEMOP_MNEMONIC(invlpg, "invlpg");
550 IEMOP_HLP_MIN_486();
551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
552 IEM_MC_BEGIN(1, 1);
553 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
555 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
556 IEM_MC_END();
557 return VINF_SUCCESS;
558}
559
560
561/** Opcode 0x0f 0x01 /7. */
562FNIEMOP_DEF(iemOp_Grp7_swapgs)
563{
564 IEMOP_MNEMONIC(swapgs, "swapgs");
565 IEMOP_HLP_ONLY_64BIT();
566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
567 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
568}
569
570
571/** Opcode 0x0f 0x01 /7. */
572FNIEMOP_DEF(iemOp_Grp7_rdtscp)
573{
574 IEMOP_MNEMONIC(rdtscp, "rdtscp");
575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
576 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
577}
578
579
580/**
581 * Group 7 jump table, memory variant.
582 */
583IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
584{
585 iemOp_Grp7_sgdt,
586 iemOp_Grp7_sidt,
587 iemOp_Grp7_lgdt,
588 iemOp_Grp7_lidt,
589 iemOp_Grp7_smsw,
590 iemOp_InvalidWithRM,
591 iemOp_Grp7_lmsw,
592 iemOp_Grp7_invlpg
593};
594
595
596/** Opcode 0x0f 0x01. */
597FNIEMOP_DEF(iemOp_Grp7)
598{
599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
600 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
601 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
602
603 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
604 {
605 case 0:
606 switch (bRm & X86_MODRM_RM_MASK)
607 {
608 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
609 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
610 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
611 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
612 }
613 return IEMOP_RAISE_INVALID_OPCODE();
614
615 case 1:
616 switch (bRm & X86_MODRM_RM_MASK)
617 {
618 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
619 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
620 }
621 return IEMOP_RAISE_INVALID_OPCODE();
622
623 case 2:
624 switch (bRm & X86_MODRM_RM_MASK)
625 {
626 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
627 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
628 }
629 return IEMOP_RAISE_INVALID_OPCODE();
630
631 case 3:
632 switch (bRm & X86_MODRM_RM_MASK)
633 {
634 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
635 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
636 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
637 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
638 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
639 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
640 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
641 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
643 }
644
645 case 4:
646 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
647
648 case 5:
649 return IEMOP_RAISE_INVALID_OPCODE();
650
651 case 6:
652 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
653
654 case 7:
655 switch (bRm & X86_MODRM_RM_MASK)
656 {
657 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
658 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
659 }
660 return IEMOP_RAISE_INVALID_OPCODE();
661
662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
663 }
664}
665
666/** Opcode 0x0f 0x00 /3. */
667FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
668{
669 IEMOP_HLP_NO_REAL_OR_V86_MODE();
670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
671
672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
673 {
674 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
675 switch (pVCpu->iem.s.enmEffOpSize)
676 {
677 case IEMMODE_16BIT:
678 {
679 IEM_MC_BEGIN(3, 0);
680 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
681 IEM_MC_ARG(uint16_t, u16Sel, 1);
682 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
683
684 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
685 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
686 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
687
688 IEM_MC_END();
689 return VINF_SUCCESS;
690 }
691
692 case IEMMODE_32BIT:
693 case IEMMODE_64BIT:
694 {
695 IEM_MC_BEGIN(3, 0);
696 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
697 IEM_MC_ARG(uint16_t, u16Sel, 1);
698 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
699
700 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
701 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
702 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
703
704 IEM_MC_END();
705 return VINF_SUCCESS;
706 }
707
708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
709 }
710 }
711 else
712 {
713 switch (pVCpu->iem.s.enmEffOpSize)
714 {
715 case IEMMODE_16BIT:
716 {
717 IEM_MC_BEGIN(3, 1);
718 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
719 IEM_MC_ARG(uint16_t, u16Sel, 1);
720 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
722
723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
724 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
725
726 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
727 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
728 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
729
730 IEM_MC_END();
731 return VINF_SUCCESS;
732 }
733
734 case IEMMODE_32BIT:
735 case IEMMODE_64BIT:
736 {
737 IEM_MC_BEGIN(3, 1);
738 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
739 IEM_MC_ARG(uint16_t, u16Sel, 1);
740 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
742
743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
744 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
745/** @todo testcase: make sure it's a 16-bit read. */
746
747 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
748 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
749 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
750
751 IEM_MC_END();
752 return VINF_SUCCESS;
753 }
754
755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
756 }
757 }
758}
759
760
761
762/** Opcode 0x0f 0x02. */
763FNIEMOP_DEF(iemOp_lar_Gv_Ew)
764{
765 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
766 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
767}
768
769
770/** Opcode 0x0f 0x03. */
771FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
772{
773 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
774 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
775}
776
777
778/** Opcode 0x0f 0x05. */
779FNIEMOP_DEF(iemOp_syscall)
780{
781 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
783 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
784}
785
786
787/** Opcode 0x0f 0x06. */
788FNIEMOP_DEF(iemOp_clts)
789{
790 IEMOP_MNEMONIC(clts, "clts");
791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
792 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
793}
794
795
796/** Opcode 0x0f 0x07. */
797FNIEMOP_DEF(iemOp_sysret)
798{
799 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
801 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
802}
803
804
805/** Opcode 0x0f 0x08. */
806FNIEMOP_DEF(iemOp_invd)
807{
808 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
809 IEMOP_HLP_MIN_486();
810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
812}
813
814
815/** Opcode 0x0f 0x09. */
816FNIEMOP_DEF(iemOp_wbinvd)
817{
818 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
819 IEMOP_HLP_MIN_486();
820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
821 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
822}
823
824
825/** Opcode 0x0f 0x0b. */
826FNIEMOP_DEF(iemOp_ud2)
827{
828 IEMOP_MNEMONIC(ud2, "ud2");
829 return IEMOP_RAISE_INVALID_OPCODE();
830}
831
832/** Opcode 0x0f 0x0d. */
833FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
834{
835 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
836 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
837 {
838 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
839 return IEMOP_RAISE_INVALID_OPCODE();
840 }
841
842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
843 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
844 {
845 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
846 return IEMOP_RAISE_INVALID_OPCODE();
847 }
848
849 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
850 {
851 case 2: /* Aliased to /0 for the time being. */
852 case 4: /* Aliased to /0 for the time being. */
853 case 5: /* Aliased to /0 for the time being. */
854 case 6: /* Aliased to /0 for the time being. */
855 case 7: /* Aliased to /0 for the time being. */
856 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
857 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
858 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
860 }
861
862 IEM_MC_BEGIN(0, 1);
863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
866 /* Currently a NOP. */
867 NOREF(GCPtrEffSrc);
868 IEM_MC_ADVANCE_RIP();
869 IEM_MC_END();
870 return VINF_SUCCESS;
871}
872
873
874/** Opcode 0x0f 0x0e. */
875FNIEMOP_DEF(iemOp_femms)
876{
877 IEMOP_MNEMONIC(femms, "femms");
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879
880 IEM_MC_BEGIN(0,0);
881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
883 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
884 IEM_MC_FPU_FROM_MMX_MODE();
885 IEM_MC_ADVANCE_RIP();
886 IEM_MC_END();
887 return VINF_SUCCESS;
888}
889
890
891/** Opcode 0x0f 0x0f. */
892FNIEMOP_DEF(iemOp_3Dnow)
893{
894 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
895 {
896 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
897 return IEMOP_RAISE_INVALID_OPCODE();
898 }
899
900#ifdef IEM_WITH_3DNOW
901 /* This is pretty sparse, use switch instead of table. */
902 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
903 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
904#else
905 IEMOP_BITCH_ABOUT_STUB();
906 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
907#endif
908}
909
910
911/**
912 * @opcode 0x10
913 * @oppfx none
914 * @opcpuid sse
915 * @opgroup og_sse_simdfp_datamove
916 * @opxcpttype 4UA
917 * @optest op1=1 op2=2 -> op1=2
918 * @optest op1=0 op2=-22 -> op1=-22
919 */
920FNIEMOP_DEF(iemOp_movups_Vps_Wps)
921{
922 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
925 {
926 /*
927 * Register, register.
928 */
929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
930 IEM_MC_BEGIN(0, 0);
931 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
932 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
933 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
934 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
935 IEM_MC_ADVANCE_RIP();
936 IEM_MC_END();
937 }
938 else
939 {
940 /*
941 * Memory, register.
942 */
943 IEM_MC_BEGIN(0, 2);
944 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
946
947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
949 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
950 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
951
952 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
953 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
954
955 IEM_MC_ADVANCE_RIP();
956 IEM_MC_END();
957 }
958 return VINF_SUCCESS;
959
960}
961
962
963/**
964 * @opcode 0x10
965 * @oppfx 0x66
966 * @opcpuid sse2
967 * @opgroup og_sse2_pcksclr_datamove
968 * @opxcpttype 4UA
969 * @optest op1=1 op2=2 -> op1=2
970 * @optest op1=0 op2=-42 -> op1=-42
971 */
972FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
973{
974 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
977 {
978 /*
979 * Register, register.
980 */
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_BEGIN(0, 0);
983 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
984 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
985 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
986 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
987 IEM_MC_ADVANCE_RIP();
988 IEM_MC_END();
989 }
990 else
991 {
992 /*
993 * Memory, register.
994 */
995 IEM_MC_BEGIN(0, 2);
996 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
998
999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1001 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1002 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1003
1004 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1005 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1006
1007 IEM_MC_ADVANCE_RIP();
1008 IEM_MC_END();
1009 }
1010 return VINF_SUCCESS;
1011}
1012
1013
1014/**
1015 * @opcode 0x10
1016 * @oppfx 0xf3
1017 * @opcpuid sse
1018 * @opgroup og_sse_simdfp_datamove
1019 * @opxcpttype 5
1020 * @optest op1=1 op2=2 -> op1=2
1021 * @optest op1=0 op2=-22 -> op1=-22
1022 */
1023FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1024{
1025 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1028 {
1029 /*
1030 * Register, register.
1031 */
1032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1033 IEM_MC_BEGIN(0, 1);
1034 IEM_MC_LOCAL(uint32_t, uSrc);
1035
1036 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1038 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1039 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1040
1041 IEM_MC_ADVANCE_RIP();
1042 IEM_MC_END();
1043 }
1044 else
1045 {
1046 /*
1047 * Memory, register.
1048 */
1049 IEM_MC_BEGIN(0, 2);
1050 IEM_MC_LOCAL(uint32_t, uSrc);
1051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1052
1053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1055 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1056 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1057
1058 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1059 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1060
1061 IEM_MC_ADVANCE_RIP();
1062 IEM_MC_END();
1063 }
1064 return VINF_SUCCESS;
1065}
1066
1067
1068/**
1069 * @opcode 0x10
1070 * @oppfx 0xf2
1071 * @opcpuid sse2
1072 * @opgroup og_sse2_pcksclr_datamove
1073 * @opxcpttype 5
1074 * @optest op1=1 op2=2 -> op1=2
1075 * @optest op1=0 op2=-42 -> op1=-42
1076 */
1077FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1078{
1079 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1082 {
1083 /*
1084 * Register, register.
1085 */
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1087 IEM_MC_BEGIN(0, 1);
1088 IEM_MC_LOCAL(uint64_t, uSrc);
1089
1090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1091 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1092 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1093 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1094
1095 IEM_MC_ADVANCE_RIP();
1096 IEM_MC_END();
1097 }
1098 else
1099 {
1100 /*
1101 * Memory, register.
1102 */
1103 IEM_MC_BEGIN(0, 2);
1104 IEM_MC_LOCAL(uint64_t, uSrc);
1105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1106
1107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1109 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1110 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1111
1112 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1113 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1114
1115 IEM_MC_ADVANCE_RIP();
1116 IEM_MC_END();
1117 }
1118 return VINF_SUCCESS;
1119}
1120
1121
1122/**
1123 * @opcode 0x11
1124 * @oppfx none
1125 * @opcpuid sse
1126 * @opgroup og_sse_simdfp_datamove
1127 * @opxcpttype 4UA
1128 * @optest op1=1 op2=2 -> op1=2
1129 * @optest op1=0 op2=-42 -> op1=-42
1130 */
1131FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1132{
1133 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1136 {
1137 /*
1138 * Register, register.
1139 */
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_BEGIN(0, 0);
1142 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1144 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1145 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1146 IEM_MC_ADVANCE_RIP();
1147 IEM_MC_END();
1148 }
1149 else
1150 {
1151 /*
1152 * Memory, register.
1153 */
1154 IEM_MC_BEGIN(0, 2);
1155 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1157
1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1160 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1161 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1162
1163 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1164 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1165
1166 IEM_MC_ADVANCE_RIP();
1167 IEM_MC_END();
1168 }
1169 return VINF_SUCCESS;
1170}
1171
1172
1173/**
1174 * @opcode 0x11
1175 * @oppfx 0x66
1176 * @opcpuid sse2
1177 * @opgroup og_sse2_pcksclr_datamove
1178 * @opxcpttype 4UA
1179 * @optest op1=1 op2=2 -> op1=2
1180 * @optest op1=0 op2=-42 -> op1=-42
1181 */
1182FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1183{
1184 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1187 {
1188 /*
1189 * Register, register.
1190 */
1191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1192 IEM_MC_BEGIN(0, 0);
1193 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1194 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1195 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1196 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1197 IEM_MC_ADVANCE_RIP();
1198 IEM_MC_END();
1199 }
1200 else
1201 {
1202 /*
1203 * Memory, register.
1204 */
1205 IEM_MC_BEGIN(0, 2);
1206 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1208
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1211 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1212 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1213
1214 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1215 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1216
1217 IEM_MC_ADVANCE_RIP();
1218 IEM_MC_END();
1219 }
1220 return VINF_SUCCESS;
1221}
1222
1223
1224/**
1225 * @opcode 0x11
1226 * @oppfx 0xf3
1227 * @opcpuid sse
1228 * @opgroup og_sse_simdfp_datamove
1229 * @opxcpttype 5
1230 * @optest op1=1 op2=2 -> op1=2
1231 * @optest op1=0 op2=-22 -> op1=-22
1232 */
1233FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1234{
1235 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1238 {
1239 /*
1240 * Register, register.
1241 */
1242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1243 IEM_MC_BEGIN(0, 1);
1244 IEM_MC_LOCAL(uint32_t, uSrc);
1245
1246 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1248 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1249 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1250
1251 IEM_MC_ADVANCE_RIP();
1252 IEM_MC_END();
1253 }
1254 else
1255 {
1256 /*
1257 * Memory, register.
1258 */
1259 IEM_MC_BEGIN(0, 2);
1260 IEM_MC_LOCAL(uint32_t, uSrc);
1261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1262
1263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1266 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1267
1268 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1269 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1270
1271 IEM_MC_ADVANCE_RIP();
1272 IEM_MC_END();
1273 }
1274 return VINF_SUCCESS;
1275}
1276
1277
1278/**
1279 * @opcode 0x11
1280 * @oppfx 0xf2
1281 * @opcpuid sse2
1282 * @opgroup og_sse2_pcksclr_datamove
1283 * @opxcpttype 5
1284 * @optest op1=1 op2=2 -> op1=2
1285 * @optest op1=0 op2=-42 -> op1=-42
1286 */
1287FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1288{
1289 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1292 {
1293 /*
1294 * Register, register.
1295 */
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_BEGIN(0, 1);
1298 IEM_MC_LOCAL(uint64_t, uSrc);
1299
1300 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1301 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1302 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1303 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1304
1305 IEM_MC_ADVANCE_RIP();
1306 IEM_MC_END();
1307 }
1308 else
1309 {
1310 /*
1311 * Memory, register.
1312 */
1313 IEM_MC_BEGIN(0, 2);
1314 IEM_MC_LOCAL(uint64_t, uSrc);
1315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1316
1317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1319 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1320 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1321
1322 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1323 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1324
1325 IEM_MC_ADVANCE_RIP();
1326 IEM_MC_END();
1327 }
1328 return VINF_SUCCESS;
1329}
1330
1331
1332FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1333{
1334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1335 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1336 {
1337 /**
1338 * @opcode 0x12
1339 * @opcodesub 11 mr/reg
1340 * @oppfx none
1341 * @opcpuid sse
1342 * @opgroup og_sse_simdfp_datamove
1343 * @opxcpttype 5
1344 * @optest op1=1 op2=2 -> op1=2
1345 * @optest op1=0 op2=-42 -> op1=-42
1346 */
1347 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1348
1349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1350 IEM_MC_BEGIN(0, 1);
1351 IEM_MC_LOCAL(uint64_t, uSrc);
1352
1353 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1354 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1355 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1356 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1357
1358 IEM_MC_ADVANCE_RIP();
1359 IEM_MC_END();
1360 }
1361 else
1362 {
1363 /**
1364 * @opdone
1365 * @opcode 0x12
1366 * @opcodesub !11 mr/reg
1367 * @oppfx none
1368 * @opcpuid sse
1369 * @opgroup og_sse_simdfp_datamove
1370 * @opxcpttype 5
1371 * @optest op1=1 op2=2 -> op1=2
1372 * @optest op1=0 op2=-42 -> op1=-42
1373 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1374 */
1375 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1376
1377 IEM_MC_BEGIN(0, 2);
1378 IEM_MC_LOCAL(uint64_t, uSrc);
1379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1380
1381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1385
1386 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1387 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1388
1389 IEM_MC_ADVANCE_RIP();
1390 IEM_MC_END();
1391 }
1392 return VINF_SUCCESS;
1393}
1394
1395
1396/**
1397 * @opcode 0x12
1398 * @opcodesub !11 mr/reg
1399 * @oppfx 0x66
1400 * @opcpuid sse2
1401 * @opgroup og_sse2_pcksclr_datamove
1402 * @opxcpttype 5
1403 * @optest op1=1 op2=2 -> op1=2
1404 * @optest op1=0 op2=-42 -> op1=-42
1405 */
1406FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1407{
1408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1409 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1410 {
1411 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1412
1413 IEM_MC_BEGIN(0, 2);
1414 IEM_MC_LOCAL(uint64_t, uSrc);
1415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1416
1417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1419 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1420 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1421
1422 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1423 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1424
1425 IEM_MC_ADVANCE_RIP();
1426 IEM_MC_END();
1427 return VINF_SUCCESS;
1428 }
1429
1430 /**
1431 * @opdone
1432 * @opmnemonic ud660f12m3
1433 * @opcode 0x12
1434 * @opcodesub 11 mr/reg
1435 * @oppfx 0x66
1436 * @opunused immediate
1437 * @opcpuid sse
1438 * @optest ->
1439 */
1440 return IEMOP_RAISE_INVALID_OPCODE();
1441}
1442
1443
1444/**
1445 * @opcode 0x12
1446 * @oppfx 0xf3
1447 * @opcpuid sse3
1448 * @opgroup og_sse3_pcksclr_datamove
1449 * @opxcpttype 4
1450 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1451 * op1=0x00000002000000020000000100000001
1452 */
1453FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1454{
1455 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1458 {
1459 /*
1460 * Register, register.
1461 */
1462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1463 IEM_MC_BEGIN(2, 0);
1464 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1465 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1466
1467 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1468 IEM_MC_PREPARE_SSE_USAGE();
1469
1470 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1471 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1472 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1473
1474 IEM_MC_ADVANCE_RIP();
1475 IEM_MC_END();
1476 }
1477 else
1478 {
1479 /*
1480 * Register, memory.
1481 */
1482 IEM_MC_BEGIN(2, 2);
1483 IEM_MC_LOCAL(RTUINT128U, uSrc);
1484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1485 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1486 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1487
1488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1490 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1491 IEM_MC_PREPARE_SSE_USAGE();
1492
1493 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1494 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1495 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1496
1497 IEM_MC_ADVANCE_RIP();
1498 IEM_MC_END();
1499 }
1500 return VINF_SUCCESS;
1501}
1502
1503
1504/**
1505 * @opcode 0x12
1506 * @oppfx 0xf2
1507 * @opcpuid sse3
1508 * @opgroup og_sse3_pcksclr_datamove
1509 * @opxcpttype 5
1510 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1511 * op1=0x22222222111111112222222211111111
1512 */
1513FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1514{
1515 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1518 {
1519 /*
1520 * Register, register.
1521 */
1522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1523 IEM_MC_BEGIN(2, 0);
1524 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1525 IEM_MC_ARG(uint64_t, uSrc, 1);
1526
1527 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1528 IEM_MC_PREPARE_SSE_USAGE();
1529
1530 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1531 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1532 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1533
1534 IEM_MC_ADVANCE_RIP();
1535 IEM_MC_END();
1536 }
1537 else
1538 {
1539 /*
1540 * Register, memory.
1541 */
1542 IEM_MC_BEGIN(2, 2);
1543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1544 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1545 IEM_MC_ARG(uint64_t, uSrc, 1);
1546
1547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1549 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1550 IEM_MC_PREPARE_SSE_USAGE();
1551
1552 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1553 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1554 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1555
1556 IEM_MC_ADVANCE_RIP();
1557 IEM_MC_END();
1558 }
1559 return VINF_SUCCESS;
1560}
1561
1562
1563/**
1564 * @opcode 0x13
1565 * @opcodesub !11 mr/reg
1566 * @oppfx none
1567 * @opcpuid sse
1568 * @opgroup og_sse_simdfp_datamove
1569 * @opxcpttype 5
1570 * @optest op1=1 op2=2 -> op1=2
1571 * @optest op1=0 op2=-42 -> op1=-42
1572 */
1573FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1574{
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1579
1580 IEM_MC_BEGIN(0, 2);
1581 IEM_MC_LOCAL(uint64_t, uSrc);
1582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1583
1584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1586 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1587 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1588
1589 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1590 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1591
1592 IEM_MC_ADVANCE_RIP();
1593 IEM_MC_END();
1594 return VINF_SUCCESS;
1595 }
1596
1597 /**
1598 * @opdone
1599 * @opmnemonic ud0f13m3
1600 * @opcode 0x13
1601 * @opcodesub 11 mr/reg
1602 * @oppfx none
1603 * @opunused immediate
1604 * @opcpuid sse
1605 * @optest ->
1606 */
1607 return IEMOP_RAISE_INVALID_OPCODE();
1608}
1609
1610
1611/**
1612 * @opcode 0x13
1613 * @opcodesub !11 mr/reg
1614 * @oppfx 0x66
1615 * @opcpuid sse2
1616 * @opgroup og_sse2_pcksclr_datamove
1617 * @opxcpttype 5
1618 * @optest op1=1 op2=2 -> op1=2
1619 * @optest op1=0 op2=-42 -> op1=-42
1620 */
1621FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1622{
1623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1624 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1625 {
1626 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1627 IEM_MC_BEGIN(0, 2);
1628 IEM_MC_LOCAL(uint64_t, uSrc);
1629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1630
1631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1633 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1635
1636 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1637 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1638
1639 IEM_MC_ADVANCE_RIP();
1640 IEM_MC_END();
1641 return VINF_SUCCESS;
1642 }
1643
1644 /**
1645 * @opdone
1646 * @opmnemonic ud660f13m3
1647 * @opcode 0x13
1648 * @opcodesub 11 mr/reg
1649 * @oppfx 0x66
1650 * @opunused immediate
1651 * @opcpuid sse
1652 * @optest ->
1653 */
1654 return IEMOP_RAISE_INVALID_OPCODE();
1655}
1656
1657
1658/**
1659 * @opmnemonic udf30f13
1660 * @opcode 0x13
1661 * @oppfx 0xf3
1662 * @opunused intel-modrm
1663 * @opcpuid sse
1664 * @optest ->
1665 * @opdone
1666 */
1667
1668/**
1669 * @opmnemonic udf20f13
1670 * @opcode 0x13
1671 * @oppfx 0xf2
1672 * @opunused intel-modrm
1673 * @opcpuid sse
1674 * @optest ->
1675 * @opdone
1676 */
1677
1678/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1679FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1680/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1681FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1682
1683/**
1684 * @opdone
1685 * @opmnemonic udf30f14
1686 * @opcode 0x14
1687 * @oppfx 0xf3
1688 * @opunused intel-modrm
1689 * @opcpuid sse
1690 * @optest ->
1691 * @opdone
1692 */
1693
1694/**
1695 * @opmnemonic udf20f14
1696 * @opcode 0x14
1697 * @oppfx 0xf2
1698 * @opunused intel-modrm
1699 * @opcpuid sse
1700 * @optest ->
1701 * @opdone
1702 */
1703
1704/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1705FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1706/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1707FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1708/* Opcode 0xf3 0x0f 0x15 - invalid */
1709/* Opcode 0xf2 0x0f 0x15 - invalid */
1710
1711/**
1712 * @opdone
1713 * @opmnemonic udf30f15
1714 * @opcode 0x15
1715 * @oppfx 0xf3
1716 * @opunused intel-modrm
1717 * @opcpuid sse
1718 * @optest ->
1719 * @opdone
1720 */
1721
1722/**
1723 * @opmnemonic udf20f15
1724 * @opcode 0x15
1725 * @oppfx 0xf2
1726 * @opunused intel-modrm
1727 * @opcpuid sse
1728 * @optest ->
1729 * @opdone
1730 */
1731
1732FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1733{
1734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1736 {
1737 /**
1738 * @opcode 0x16
1739 * @opcodesub 11 mr/reg
1740 * @oppfx none
1741 * @opcpuid sse
1742 * @opgroup og_sse_simdfp_datamove
1743 * @opxcpttype 5
1744 * @optest op1=1 op2=2 -> op1=2
1745 * @optest op1=0 op2=-42 -> op1=-42
1746 */
1747 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1748
1749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1750 IEM_MC_BEGIN(0, 1);
1751 IEM_MC_LOCAL(uint64_t, uSrc);
1752
1753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1754 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1755 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1756 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1757
1758 IEM_MC_ADVANCE_RIP();
1759 IEM_MC_END();
1760 }
1761 else
1762 {
1763 /**
1764 * @opdone
1765 * @opcode 0x16
1766 * @opcodesub !11 mr/reg
1767 * @oppfx none
1768 * @opcpuid sse
1769 * @opgroup og_sse_simdfp_datamove
1770 * @opxcpttype 5
1771 * @optest op1=1 op2=2 -> op1=2
1772 * @optest op1=0 op2=-42 -> op1=-42
1773 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1774 */
1775 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1776
1777 IEM_MC_BEGIN(0, 2);
1778 IEM_MC_LOCAL(uint64_t, uSrc);
1779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1780
1781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1783 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1784 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1785
1786 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1787 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1788
1789 IEM_MC_ADVANCE_RIP();
1790 IEM_MC_END();
1791 }
1792 return VINF_SUCCESS;
1793}
1794
1795
1796/**
1797 * @opcode 0x16
1798 * @opcodesub !11 mr/reg
1799 * @oppfx 0x66
1800 * @opcpuid sse2
1801 * @opgroup og_sse2_pcksclr_datamove
1802 * @opxcpttype 5
1803 * @optest op1=1 op2=2 -> op1=2
1804 * @optest op1=0 op2=-42 -> op1=-42
1805 */
1806FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1807{
1808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1809 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1810 {
1811 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1812 IEM_MC_BEGIN(0, 2);
1813 IEM_MC_LOCAL(uint64_t, uSrc);
1814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1815
1816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1818 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1819 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1820
1821 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1822 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1823
1824 IEM_MC_ADVANCE_RIP();
1825 IEM_MC_END();
1826 return VINF_SUCCESS;
1827 }
1828
1829 /**
1830 * @opdone
1831 * @opmnemonic ud660f16m3
1832 * @opcode 0x16
1833 * @opcodesub 11 mr/reg
1834 * @oppfx 0x66
1835 * @opunused immediate
1836 * @opcpuid sse
1837 * @optest ->
1838 */
1839 return IEMOP_RAISE_INVALID_OPCODE();
1840}
1841
1842
1843/**
1844 * @opcode 0x16
1845 * @oppfx 0xf3
1846 * @opcpuid sse3
1847 * @opgroup og_sse3_pcksclr_datamove
1848 * @opxcpttype 4
1849 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1850 * op1=0x00000002000000020000000100000001
1851 */
1852FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1853{
1854 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1857 {
1858 /*
1859 * Register, register.
1860 */
1861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1862 IEM_MC_BEGIN(2, 0);
1863 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1864 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1865
1866 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1867 IEM_MC_PREPARE_SSE_USAGE();
1868
1869 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1870 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1871 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1872
1873 IEM_MC_ADVANCE_RIP();
1874 IEM_MC_END();
1875 }
1876 else
1877 {
1878 /*
1879 * Register, memory.
1880 */
1881 IEM_MC_BEGIN(2, 2);
1882 IEM_MC_LOCAL(RTUINT128U, uSrc);
1883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1884 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1885 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1886
1887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1889 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1890 IEM_MC_PREPARE_SSE_USAGE();
1891
1892 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1893 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1894 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1895
1896 IEM_MC_ADVANCE_RIP();
1897 IEM_MC_END();
1898 }
1899 return VINF_SUCCESS;
1900}
1901
1902/**
1903 * @opdone
1904 * @opmnemonic udf30f16
1905 * @opcode 0x16
1906 * @oppfx 0xf2
1907 * @opunused intel-modrm
1908 * @opcpuid sse
1909 * @optest ->
1910 * @opdone
1911 */
1912
1913
1914/**
1915 * @opcode 0x17
1916 * @opcodesub !11 mr/reg
1917 * @oppfx none
1918 * @opcpuid sse
1919 * @opgroup og_sse_simdfp_datamove
1920 * @opxcpttype 5
1921 * @optest op1=1 op2=2 -> op1=2
1922 * @optest op1=0 op2=-42 -> op1=-42
1923 */
1924FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
1925{
1926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1927 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1928 {
1929 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1930
1931 IEM_MC_BEGIN(0, 2);
1932 IEM_MC_LOCAL(uint64_t, uSrc);
1933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1934
1935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1937 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1938 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1939
1940 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1941 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1942
1943 IEM_MC_ADVANCE_RIP();
1944 IEM_MC_END();
1945 return VINF_SUCCESS;
1946 }
1947
1948 /**
1949 * @opdone
1950 * @opmnemonic ud0f17m3
1951 * @opcode 0x17
1952 * @opcodesub 11 mr/reg
1953 * @oppfx none
1954 * @opunused immediate
1955 * @opcpuid sse
1956 * @optest ->
1957 */
1958 return IEMOP_RAISE_INVALID_OPCODE();
1959}
1960
1961
1962/**
1963 * @opcode 0x17
1964 * @opcodesub !11 mr/reg
1965 * @oppfx 0x66
1966 * @opcpuid sse2
1967 * @opgroup og_sse2_pcksclr_datamove
1968 * @opxcpttype 5
1969 * @optest op1=1 op2=2 -> op1=2
1970 * @optest op1=0 op2=-42 -> op1=-42
1971 */
1972FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
1973{
1974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1975 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1976 {
1977 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1978
1979 IEM_MC_BEGIN(0, 2);
1980 IEM_MC_LOCAL(uint64_t, uSrc);
1981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1982
1983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1985 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1986 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1987
1988 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1989 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1990
1991 IEM_MC_ADVANCE_RIP();
1992 IEM_MC_END();
1993 return VINF_SUCCESS;
1994 }
1995
1996 /**
1997 * @opdone
1998 * @opmnemonic ud660f17m3
1999 * @opcode 0x17
2000 * @opcodesub 11 mr/reg
2001 * @oppfx 0x66
2002 * @opunused immediate
2003 * @opcpuid sse
2004 * @optest ->
2005 */
2006 return IEMOP_RAISE_INVALID_OPCODE();
2007}
2008
2009
2010/**
2011 * @opdone
2012 * @opmnemonic udf30f17
2013 * @opcode 0x17
2014 * @oppfx 0xf3
2015 * @opunused intel-modrm
2016 * @opcpuid sse
2017 * @optest ->
2018 * @opdone
2019 */
2020
2021/**
2022 * @opmnemonic udf20f17
2023 * @opcode 0x17
2024 * @oppfx 0xf2
2025 * @opunused intel-modrm
2026 * @opcpuid sse
2027 * @optest ->
2028 * @opdone
2029 */
2030
2031
2032/** Opcode 0x0f 0x18. */
2033FNIEMOP_DEF(iemOp_prefetch_Grp16)
2034{
2035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2036 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2037 {
2038 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2039 {
2040 case 4: /* Aliased to /0 for the time being according to AMD. */
2041 case 5: /* Aliased to /0 for the time being according to AMD. */
2042 case 6: /* Aliased to /0 for the time being according to AMD. */
2043 case 7: /* Aliased to /0 for the time being according to AMD. */
2044 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2045 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2046 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2047 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2049 }
2050
2051 IEM_MC_BEGIN(0, 1);
2052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2055 /* Currently a NOP. */
2056 NOREF(GCPtrEffSrc);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 return VINF_SUCCESS;
2060 }
2061
2062 return IEMOP_RAISE_INVALID_OPCODE();
2063}
2064
2065
2066/** Opcode 0x0f 0x19..0x1f. */
2067FNIEMOP_DEF(iemOp_nop_Ev)
2068{
2069 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2072 {
2073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2074 IEM_MC_BEGIN(0, 0);
2075 IEM_MC_ADVANCE_RIP();
2076 IEM_MC_END();
2077 }
2078 else
2079 {
2080 IEM_MC_BEGIN(0, 1);
2081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2084 /* Currently a NOP. */
2085 NOREF(GCPtrEffSrc);
2086 IEM_MC_ADVANCE_RIP();
2087 IEM_MC_END();
2088 }
2089 return VINF_SUCCESS;
2090}
2091
2092
2093/** Opcode 0x0f 0x20. */
2094FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2095{
2096 /* mod is ignored, as is operand size overrides. */
2097 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2098 IEMOP_HLP_MIN_386();
2099 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2100 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2101 else
2102 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2103
2104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2105 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2106 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2107 {
2108 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2109 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2110 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2111 iCrReg |= 8;
2112 }
2113 switch (iCrReg)
2114 {
2115 case 0: case 2: case 3: case 4: case 8:
2116 break;
2117 default:
2118 return IEMOP_RAISE_INVALID_OPCODE();
2119 }
2120 IEMOP_HLP_DONE_DECODING();
2121
2122 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2123}
2124
2125
2126/** Opcode 0x0f 0x21. */
2127FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2128{
2129 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2130 IEMOP_HLP_MIN_386();
2131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2133 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2134 return IEMOP_RAISE_INVALID_OPCODE();
2135 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2136 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2137 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2138}
2139
2140
2141/** Opcode 0x0f 0x22. */
2142FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2143{
2144 /* mod is ignored, as is operand size overrides. */
2145 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2146 IEMOP_HLP_MIN_386();
2147 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2148 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2149 else
2150 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2151
2152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2153 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2154 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2155 {
2156 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2157 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2158 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2159 iCrReg |= 8;
2160 }
2161 switch (iCrReg)
2162 {
2163 case 0: case 2: case 3: case 4: case 8:
2164 break;
2165 default:
2166 return IEMOP_RAISE_INVALID_OPCODE();
2167 }
2168 IEMOP_HLP_DONE_DECODING();
2169
2170 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2171}
2172
2173
2174/** Opcode 0x0f 0x23. */
2175FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2176{
2177 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2178 IEMOP_HLP_MIN_386();
2179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2181 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2182 return IEMOP_RAISE_INVALID_OPCODE();
2183 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2184 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2185 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2186}
2187
2188
2189/** Opcode 0x0f 0x24. */
2190FNIEMOP_DEF(iemOp_mov_Rd_Td)
2191{
2192 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2193 /** @todo works on 386 and 486. */
2194 /* The RM byte is not considered, see testcase. */
2195 return IEMOP_RAISE_INVALID_OPCODE();
2196}
2197
2198
2199/** Opcode 0x0f 0x26. */
2200FNIEMOP_DEF(iemOp_mov_Td_Rd)
2201{
2202 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2203 /** @todo works on 386 and 486. */
2204 /* The RM byte is not considered, see testcase. */
2205 return IEMOP_RAISE_INVALID_OPCODE();
2206}
2207
2208
2209/**
2210 * @opcode 0x28
2211 * @oppfx none
2212 * @opcpuid sse
2213 * @opgroup og_sse_simdfp_datamove
2214 * @opxcpttype 1
2215 * @optest op1=1 op2=2 -> op1=2
2216 * @optest op1=0 op2=-42 -> op1=-42
2217 */
2218FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2219{
2220 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2222 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2223 {
2224 /*
2225 * Register, register.
2226 */
2227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2228 IEM_MC_BEGIN(0, 0);
2229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2230 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2231 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2232 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2233 IEM_MC_ADVANCE_RIP();
2234 IEM_MC_END();
2235 }
2236 else
2237 {
2238 /*
2239 * Register, memory.
2240 */
2241 IEM_MC_BEGIN(0, 2);
2242 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2244
2245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2247 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2248 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2249
2250 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2251 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2252
2253 IEM_MC_ADVANCE_RIP();
2254 IEM_MC_END();
2255 }
2256 return VINF_SUCCESS;
2257}
2258
2259/**
2260 * @opcode 0x28
2261 * @oppfx 66
2262 * @opcpuid sse2
2263 * @opgroup og_sse2_pcksclr_datamove
2264 * @opxcpttype 1
2265 * @optest op1=1 op2=2 -> op1=2
2266 * @optest op1=0 op2=-42 -> op1=-42
2267 */
2268FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2269{
2270 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2273 {
2274 /*
2275 * Register, register.
2276 */
2277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2278 IEM_MC_BEGIN(0, 0);
2279 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2280 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2281 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2282 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2283 IEM_MC_ADVANCE_RIP();
2284 IEM_MC_END();
2285 }
2286 else
2287 {
2288 /*
2289 * Register, memory.
2290 */
2291 IEM_MC_BEGIN(0, 2);
2292 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2294
2295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2297 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2298 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2299
2300 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2301 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2302
2303 IEM_MC_ADVANCE_RIP();
2304 IEM_MC_END();
2305 }
2306 return VINF_SUCCESS;
2307}
2308
2309/* Opcode 0xf3 0x0f 0x28 - invalid */
2310/* Opcode 0xf2 0x0f 0x28 - invalid */
2311
2312/**
2313 * @opcode 0x29
2314 * @oppfx none
2315 * @opcpuid sse
2316 * @opgroup og_sse_simdfp_datamove
2317 * @opxcpttype 1
2318 * @optest op1=1 op2=2 -> op1=2
2319 * @optest op1=0 op2=-42 -> op1=-42
2320 */
2321FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2322{
2323 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2326 {
2327 /*
2328 * Register, register.
2329 */
2330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2331 IEM_MC_BEGIN(0, 0);
2332 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2333 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2334 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2335 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2336 IEM_MC_ADVANCE_RIP();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 /*
2342 * Memory, register.
2343 */
2344 IEM_MC_BEGIN(0, 2);
2345 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2347
2348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2351 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2352
2353 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2354 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2355
2356 IEM_MC_ADVANCE_RIP();
2357 IEM_MC_END();
2358 }
2359 return VINF_SUCCESS;
2360}
2361
2362/**
2363 * @opcode 0x29
2364 * @oppfx 66
2365 * @opcpuid sse2
2366 * @opgroup og_sse2_pcksclr_datamove
2367 * @opxcpttype 1
2368 * @optest op1=1 op2=2 -> op1=2
2369 * @optest op1=0 op2=-42 -> op1=-42
2370 */
2371FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2372{
2373 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2376 {
2377 /*
2378 * Register, register.
2379 */
2380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2381 IEM_MC_BEGIN(0, 0);
2382 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2384 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2385 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2386 IEM_MC_ADVANCE_RIP();
2387 IEM_MC_END();
2388 }
2389 else
2390 {
2391 /*
2392 * Memory, register.
2393 */
2394 IEM_MC_BEGIN(0, 2);
2395 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2397
2398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2402
2403 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2404 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2405
2406 IEM_MC_ADVANCE_RIP();
2407 IEM_MC_END();
2408 }
2409 return VINF_SUCCESS;
2410}
2411
2412/* Opcode 0xf3 0x0f 0x29 - invalid */
2413/* Opcode 0xf2 0x0f 0x29 - invalid */
2414
2415
2416/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2417FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2418/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2419FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2420/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2421FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2422/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2423FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2424
2425
2426/**
2427 * @opcode 0x2b
2428 * @opcodesub !11 mr/reg
2429 * @oppfx none
2430 * @opcpuid sse
2431 * @opgroup og_sse1_cachect
2432 * @opxcpttype 1
2433 * @optest op1=1 op2=2 -> op1=2
2434 * @optest op1=0 op2=-42 -> op1=-42
2435 */
2436FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2437{
2438 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2440 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2441 {
2442 /*
2443 * memory, register.
2444 */
2445 IEM_MC_BEGIN(0, 2);
2446 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2448
2449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2451 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2453
2454 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2455 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2456
2457 IEM_MC_ADVANCE_RIP();
2458 IEM_MC_END();
2459 }
2460 /* The register, register encoding is invalid. */
2461 else
2462 return IEMOP_RAISE_INVALID_OPCODE();
2463 return VINF_SUCCESS;
2464}
2465
2466/**
2467 * @opcode 0x2b
2468 * @opcodesub !11 mr/reg
2469 * @oppfx 0x66
2470 * @opcpuid sse2
2471 * @opgroup og_sse2_cachect
2472 * @opxcpttype 1
2473 * @optest op1=1 op2=2 -> op1=2
2474 * @optest op1=0 op2=-42 -> op1=-42
2475 */
2476FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2477{
2478 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2480 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2481 {
2482 /*
2483 * memory, register.
2484 */
2485 IEM_MC_BEGIN(0, 2);
2486 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2488
2489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2491 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2493
2494 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2495 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2496
2497 IEM_MC_ADVANCE_RIP();
2498 IEM_MC_END();
2499 }
2500 /* The register, register encoding is invalid. */
2501 else
2502 return IEMOP_RAISE_INVALID_OPCODE();
2503 return VINF_SUCCESS;
2504}
2505/* Opcode 0xf3 0x0f 0x2b - invalid */
2506/* Opcode 0xf2 0x0f 0x2b - invalid */
2507
2508
2509/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2510FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2511/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2512FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2513/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2514FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2515/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2516FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2517
2518/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2519FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2520/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2521FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2522/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2523FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2524/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2525FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2526
2527/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2528FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2529/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2530FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2531/* Opcode 0xf3 0x0f 0x2e - invalid */
2532/* Opcode 0xf2 0x0f 0x2e - invalid */
2533
2534/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2535FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2536/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2537FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2538/* Opcode 0xf3 0x0f 0x2f - invalid */
2539/* Opcode 0xf2 0x0f 0x2f - invalid */
2540
2541/** Opcode 0x0f 0x30. */
2542FNIEMOP_DEF(iemOp_wrmsr)
2543{
2544 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2546 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2547}
2548
2549
2550/** Opcode 0x0f 0x31. */
2551FNIEMOP_DEF(iemOp_rdtsc)
2552{
2553 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2556}
2557
2558
2559/** Opcode 0x0f 0x33. */
2560FNIEMOP_DEF(iemOp_rdmsr)
2561{
2562 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2564 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2565}
2566
2567
2568/** Opcode 0x0f 0x34. */
2569FNIEMOP_DEF(iemOp_rdpmc)
2570{
2571 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2574}
2575
2576
2577/** Opcode 0x0f 0x34. */
2578FNIEMOP_STUB(iemOp_sysenter);
2579/** Opcode 0x0f 0x35. */
2580FNIEMOP_STUB(iemOp_sysexit);
2581/** Opcode 0x0f 0x37. */
2582FNIEMOP_STUB(iemOp_getsec);
2583
2584
2585/** Opcode 0x0f 0x38. */
2586FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2587{
2588#ifdef IEM_WITH_THREE_0F_38
2589 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2590 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2591#else
2592 IEMOP_BITCH_ABOUT_STUB();
2593 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2594#endif
2595}
2596
2597
2598/** Opcode 0x0f 0x3a. */
2599FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2600{
2601#ifdef IEM_WITH_THREE_0F_3A
2602 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2603 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2604#else
2605 IEMOP_BITCH_ABOUT_STUB();
2606 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2607#endif
2608}
2609
2610
2611/**
2612 * Implements a conditional move.
2613 *
2614 * Wish there was an obvious way to do this where we could share and reduce
2615 * code bloat.
2616 *
2617 * @param a_Cnd The conditional "microcode" operation.
2618 */
2619#define CMOV_X(a_Cnd) \
2620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2622 { \
2623 switch (pVCpu->iem.s.enmEffOpSize) \
2624 { \
2625 case IEMMODE_16BIT: \
2626 IEM_MC_BEGIN(0, 1); \
2627 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2628 a_Cnd { \
2629 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2630 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2631 } IEM_MC_ENDIF(); \
2632 IEM_MC_ADVANCE_RIP(); \
2633 IEM_MC_END(); \
2634 return VINF_SUCCESS; \
2635 \
2636 case IEMMODE_32BIT: \
2637 IEM_MC_BEGIN(0, 1); \
2638 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2639 a_Cnd { \
2640 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2641 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2642 } IEM_MC_ELSE() { \
2643 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2644 } IEM_MC_ENDIF(); \
2645 IEM_MC_ADVANCE_RIP(); \
2646 IEM_MC_END(); \
2647 return VINF_SUCCESS; \
2648 \
2649 case IEMMODE_64BIT: \
2650 IEM_MC_BEGIN(0, 1); \
2651 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2652 a_Cnd { \
2653 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2654 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2655 } IEM_MC_ENDIF(); \
2656 IEM_MC_ADVANCE_RIP(); \
2657 IEM_MC_END(); \
2658 return VINF_SUCCESS; \
2659 \
2660 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2661 } \
2662 } \
2663 else \
2664 { \
2665 switch (pVCpu->iem.s.enmEffOpSize) \
2666 { \
2667 case IEMMODE_16BIT: \
2668 IEM_MC_BEGIN(0, 2); \
2669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2670 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2672 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2673 a_Cnd { \
2674 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2675 } IEM_MC_ENDIF(); \
2676 IEM_MC_ADVANCE_RIP(); \
2677 IEM_MC_END(); \
2678 return VINF_SUCCESS; \
2679 \
2680 case IEMMODE_32BIT: \
2681 IEM_MC_BEGIN(0, 2); \
2682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2683 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2685 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2686 a_Cnd { \
2687 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2688 } IEM_MC_ELSE() { \
2689 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2690 } IEM_MC_ENDIF(); \
2691 IEM_MC_ADVANCE_RIP(); \
2692 IEM_MC_END(); \
2693 return VINF_SUCCESS; \
2694 \
2695 case IEMMODE_64BIT: \
2696 IEM_MC_BEGIN(0, 2); \
2697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2698 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2700 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2701 a_Cnd { \
2702 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2703 } IEM_MC_ENDIF(); \
2704 IEM_MC_ADVANCE_RIP(); \
2705 IEM_MC_END(); \
2706 return VINF_SUCCESS; \
2707 \
2708 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2709 } \
2710 } do {} while (0)
2711
2712
2713
2714/** Opcode 0x0f 0x40. */
2715FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2716{
2717 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2718 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2719}
2720
2721
2722/** Opcode 0x0f 0x41. */
2723FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2724{
2725 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2726 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2727}
2728
2729
2730/** Opcode 0x0f 0x42. */
2731FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2732{
2733 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2734 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2735}
2736
2737
2738/** Opcode 0x0f 0x43. */
2739FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2740{
2741 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2742 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2743}
2744
2745
2746/** Opcode 0x0f 0x44. */
2747FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2748{
2749 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2750 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2751}
2752
2753
2754/** Opcode 0x0f 0x45. */
2755FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2756{
2757 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2758 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2759}
2760
2761
2762/** Opcode 0x0f 0x46. */
2763FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2764{
2765 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2766 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2767}
2768
2769
2770/** Opcode 0x0f 0x47. */
2771FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2772{
2773 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2774 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2775}
2776
2777
2778/** Opcode 0x0f 0x48. */
2779FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2780{
2781 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2782 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2783}
2784
2785
2786/** Opcode 0x0f 0x49. */
2787FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2788{
2789 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2790 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2791}
2792
2793
2794/** Opcode 0x0f 0x4a. */
2795FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2796{
2797 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2798 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2799}
2800
2801
2802/** Opcode 0x0f 0x4b. */
2803FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2804{
2805 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2806 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2807}
2808
2809
2810/** Opcode 0x0f 0x4c. */
2811FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2812{
2813 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2814 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2815}
2816
2817
2818/** Opcode 0x0f 0x4d. */
2819FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2820{
2821 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2822 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2823}
2824
2825
2826/** Opcode 0x0f 0x4e. */
2827FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2828{
2829 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2830 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2831}
2832
2833
2834/** Opcode 0x0f 0x4f. */
2835FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2836{
2837 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2838 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2839}
2840
2841#undef CMOV_X
2842
2843/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2844FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2845/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2846FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2847/* Opcode 0xf3 0x0f 0x50 - invalid */
2848/* Opcode 0xf2 0x0f 0x50 - invalid */
2849
2850/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2851FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2852/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2853FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2854/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2855FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2856/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2857FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2858
2859/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2860FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2861/* Opcode 0x66 0x0f 0x52 - invalid */
2862/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2863FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2864/* Opcode 0xf2 0x0f 0x52 - invalid */
2865
2866/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2867FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2868/* Opcode 0x66 0x0f 0x53 - invalid */
2869/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2870FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2871/* Opcode 0xf2 0x0f 0x53 - invalid */
2872
2873/** Opcode 0x0f 0x54 - andps Vps, Wps */
2874FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2875/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2876FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2877/* Opcode 0xf3 0x0f 0x54 - invalid */
2878/* Opcode 0xf2 0x0f 0x54 - invalid */
2879
2880/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2881FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2882/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2883FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2884/* Opcode 0xf3 0x0f 0x55 - invalid */
2885/* Opcode 0xf2 0x0f 0x55 - invalid */
2886
2887/** Opcode 0x0f 0x56 - orps Vps, Wps */
2888FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2889/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2890FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2891/* Opcode 0xf3 0x0f 0x56 - invalid */
2892/* Opcode 0xf2 0x0f 0x56 - invalid */
2893
2894/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2895FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2896/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2897FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2898/* Opcode 0xf3 0x0f 0x57 - invalid */
2899/* Opcode 0xf2 0x0f 0x57 - invalid */
2900
2901/** Opcode 0x0f 0x58 - addps Vps, Wps */
2902FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2903/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2904FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2905/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2906FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2907/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2908FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2909
2910/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2911FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2912/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2913FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2914/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2915FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2916/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2917FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2918
2919/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2920FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2921/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2922FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2923/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2924FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2925/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2926FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2927
2928/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2929FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2930/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2931FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2932/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2933FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2934/* Opcode 0xf2 0x0f 0x5b - invalid */
2935
2936/** Opcode 0x0f 0x5c - subps Vps, Wps */
2937FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2938/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2939FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2940/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2941FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2942/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2943FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2944
2945/** Opcode 0x0f 0x5d - minps Vps, Wps */
2946FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2947/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2948FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2949/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2950FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2951/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2952FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2953
2954/** Opcode 0x0f 0x5e - divps Vps, Wps */
2955FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2958/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2959FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2960/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2961FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2962
2963/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2964FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2965/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2966FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2967/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2968FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2969/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2970FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2971
2972/**
2973 * Common worker for MMX instructions on the forms:
2974 * pxxxx mm1, mm2/mem32
2975 *
2976 * The 2nd operand is the first half of a register, which in the memory case
2977 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2978 * memory accessed for MMX.
2979 *
2980 * Exceptions type 4.
2981 */
2982FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2983{
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2986 {
2987 /*
2988 * Register, register.
2989 */
2990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2991 IEM_MC_BEGIN(2, 0);
2992 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2993 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2994 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2995 IEM_MC_PREPARE_SSE_USAGE();
2996 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2997 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2998 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2999 IEM_MC_ADVANCE_RIP();
3000 IEM_MC_END();
3001 }
3002 else
3003 {
3004 /*
3005 * Register, memory.
3006 */
3007 IEM_MC_BEGIN(2, 2);
3008 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3009 IEM_MC_LOCAL(uint64_t, uSrc);
3010 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3012
3013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3015 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3016 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3017
3018 IEM_MC_PREPARE_SSE_USAGE();
3019 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3020 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3021
3022 IEM_MC_ADVANCE_RIP();
3023 IEM_MC_END();
3024 }
3025 return VINF_SUCCESS;
3026}
3027
3028
3029/**
3030 * Common worker for SSE2 instructions on the forms:
3031 * pxxxx xmm1, xmm2/mem128
3032 *
3033 * The 2nd operand is the first half of a register, which in the memory case
3034 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3035 * memory accessed for MMX.
3036 *
3037 * Exceptions type 4.
3038 */
3039FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3040{
3041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3042 if (!pImpl->pfnU64)
3043 return IEMOP_RAISE_INVALID_OPCODE();
3044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3045 {
3046 /*
3047 * Register, register.
3048 */
3049 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3050 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3052 IEM_MC_BEGIN(2, 0);
3053 IEM_MC_ARG(uint64_t *, pDst, 0);
3054 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3055 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3056 IEM_MC_PREPARE_FPU_USAGE();
3057 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3058 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3059 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3060 IEM_MC_ADVANCE_RIP();
3061 IEM_MC_END();
3062 }
3063 else
3064 {
3065 /*
3066 * Register, memory.
3067 */
3068 IEM_MC_BEGIN(2, 2);
3069 IEM_MC_ARG(uint64_t *, pDst, 0);
3070 IEM_MC_LOCAL(uint32_t, uSrc);
3071 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3073
3074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3077 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3078
3079 IEM_MC_PREPARE_FPU_USAGE();
3080 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3081 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3082
3083 IEM_MC_ADVANCE_RIP();
3084 IEM_MC_END();
3085 }
3086 return VINF_SUCCESS;
3087}
3088
3089
3090/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3091FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3092{
3093 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3094 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3095}
3096
3097/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3098FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3099{
3100 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3101 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3102}
3103
3104/* Opcode 0xf3 0x0f 0x60 - invalid */
3105
3106
3107/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3108FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3109{
3110 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3111 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3112}
3113
3114/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3115FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3116{
3117 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3118 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3119}
3120
3121/* Opcode 0xf3 0x0f 0x61 - invalid */
3122
3123
3124/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3125FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3126{
3127 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3128 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3129}
3130
3131/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3132FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3133{
3134 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3135 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3136}
3137
3138/* Opcode 0xf3 0x0f 0x62 - invalid */
3139
3140
3141
3142/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3143FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3144/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3145FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3146/* Opcode 0xf3 0x0f 0x63 - invalid */
3147
3148/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3149FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3150/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3151FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3152/* Opcode 0xf3 0x0f 0x64 - invalid */
3153
3154/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3155FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3156/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3157FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3158/* Opcode 0xf3 0x0f 0x65 - invalid */
3159
3160/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3161FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3162/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3163FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3164/* Opcode 0xf3 0x0f 0x66 - invalid */
3165
3166/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3167FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3168/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3169FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3170/* Opcode 0xf3 0x0f 0x67 - invalid */
3171
3172
3173/**
3174 * Common worker for MMX instructions on the form:
3175 * pxxxx mm1, mm2/mem64
3176 *
3177 * The 2nd operand is the second half of a register, which in the memory case
3178 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3179 * where it may read the full 128 bits or only the upper 64 bits.
3180 *
3181 * Exceptions type 4.
3182 */
3183FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3184{
3185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3186 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3188 {
3189 /*
3190 * Register, register.
3191 */
3192 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3193 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_BEGIN(2, 0);
3196 IEM_MC_ARG(uint64_t *, pDst, 0);
3197 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3198 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3199 IEM_MC_PREPARE_FPU_USAGE();
3200 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3201 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3202 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3203 IEM_MC_ADVANCE_RIP();
3204 IEM_MC_END();
3205 }
3206 else
3207 {
3208 /*
3209 * Register, memory.
3210 */
3211 IEM_MC_BEGIN(2, 2);
3212 IEM_MC_ARG(uint64_t *, pDst, 0);
3213 IEM_MC_LOCAL(uint64_t, uSrc);
3214 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3216
3217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3219 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3220 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3221
3222 IEM_MC_PREPARE_FPU_USAGE();
3223 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3224 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3225
3226 IEM_MC_ADVANCE_RIP();
3227 IEM_MC_END();
3228 }
3229 return VINF_SUCCESS;
3230}
3231
3232
3233/**
3234 * Common worker for SSE2 instructions on the form:
3235 * pxxxx xmm1, xmm2/mem128
3236 *
3237 * The 2nd operand is the second half of a register, which in the memory case
3238 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3239 * where it may read the full 128 bits or only the upper 64 bits.
3240 *
3241 * Exceptions type 4.
3242 */
3243FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3244{
3245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3247 {
3248 /*
3249 * Register, register.
3250 */
3251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3252 IEM_MC_BEGIN(2, 0);
3253 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3254 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3255 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3256 IEM_MC_PREPARE_SSE_USAGE();
3257 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3258 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3259 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3260 IEM_MC_ADVANCE_RIP();
3261 IEM_MC_END();
3262 }
3263 else
3264 {
3265 /*
3266 * Register, memory.
3267 */
3268 IEM_MC_BEGIN(2, 2);
3269 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3270 IEM_MC_LOCAL(RTUINT128U, uSrc);
3271 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3273
3274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3277 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3278
3279 IEM_MC_PREPARE_SSE_USAGE();
3280 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3281 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3282
3283 IEM_MC_ADVANCE_RIP();
3284 IEM_MC_END();
3285 }
3286 return VINF_SUCCESS;
3287}
3288
3289
3290/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3291FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3292{
3293 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3294 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3295}
3296
3297/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3298FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3299{
3300 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3301 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3302}
3303/* Opcode 0xf3 0x0f 0x68 - invalid */
3304
3305
3306/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3307FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3308{
3309 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3310 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3311}
3312
3313/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3314FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3315{
3316 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3317 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3318
3319}
3320/* Opcode 0xf3 0x0f 0x69 - invalid */
3321
3322
3323/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3324FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3325{
3326 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3327 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3328}
3329
3330/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3331FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3332{
3333 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3334 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3335}
3336/* Opcode 0xf3 0x0f 0x6a - invalid */
3337
3338
3339/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3340FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3341/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3342FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3343/* Opcode 0xf3 0x0f 0x6b - invalid */
3344
3345
3346/* Opcode 0x0f 0x6c - invalid */
3347
3348/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3349FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3350{
3351 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3352 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3353}
3354
3355/* Opcode 0xf3 0x0f 0x6c - invalid */
3356/* Opcode 0xf2 0x0f 0x6c - invalid */
3357
3358
3359/* Opcode 0x0f 0x6d - invalid */
3360
3361/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3362FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3363{
3364 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3365 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3366}
3367
3368/* Opcode 0xf3 0x0f 0x6d - invalid */
3369
3370
3371FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3372{
3373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3374 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3375 {
3376 /**
3377 * @opcode 0x6e
3378 * @opcodesub rex.w=1
3379 * @oppfx none
3380 * @opcpuid mmx
3381 * @opgroup og_mmx_datamove
3382 * @opxcpttype 5
3383 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3384 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3385 */
3386 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3388 {
3389 /* MMX, greg64 */
3390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3391 IEM_MC_BEGIN(0, 1);
3392 IEM_MC_LOCAL(uint64_t, u64Tmp);
3393
3394 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3395 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3396
3397 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3398 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3399 IEM_MC_FPU_TO_MMX_MODE();
3400
3401 IEM_MC_ADVANCE_RIP();
3402 IEM_MC_END();
3403 }
3404 else
3405 {
3406 /* MMX, [mem64] */
3407 IEM_MC_BEGIN(0, 2);
3408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3409 IEM_MC_LOCAL(uint64_t, u64Tmp);
3410
3411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3413 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3414 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3415
3416 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3417 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3418 IEM_MC_FPU_TO_MMX_MODE();
3419
3420 IEM_MC_ADVANCE_RIP();
3421 IEM_MC_END();
3422 }
3423 }
3424 else
3425 {
3426 /**
3427 * @opdone
3428 * @opcode 0x6e
3429 * @opcodesub rex.w=0
3430 * @oppfx none
3431 * @opcpuid mmx
3432 * @opgroup og_mmx_datamove
3433 * @opxcpttype 5
3434 * @opfunction iemOp_movd_q_Pd_Ey
3435 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3436 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3437 */
3438 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3440 {
3441 /* MMX, greg */
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 IEM_MC_BEGIN(0, 1);
3444 IEM_MC_LOCAL(uint64_t, u64Tmp);
3445
3446 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3448
3449 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3450 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3451 IEM_MC_FPU_TO_MMX_MODE();
3452
3453 IEM_MC_ADVANCE_RIP();
3454 IEM_MC_END();
3455 }
3456 else
3457 {
3458 /* MMX, [mem] */
3459 IEM_MC_BEGIN(0, 2);
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3461 IEM_MC_LOCAL(uint32_t, u32Tmp);
3462
3463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3465 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3467
3468 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3469 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3470 IEM_MC_FPU_TO_MMX_MODE();
3471
3472 IEM_MC_ADVANCE_RIP();
3473 IEM_MC_END();
3474 }
3475 }
3476 return VINF_SUCCESS;
3477}
3478
3479FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3480{
3481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3482 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3483 {
3484 /**
3485 * @opcode 0x6e
3486 * @opcodesub rex.w=1
3487 * @oppfx 0x66
3488 * @opcpuid sse2
3489 * @opgroup og_sse2_simdint_datamove
3490 * @opxcpttype 5
3491 * @optest 64-bit / op1=1 op2=2 -> op1=2
3492 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3493 */
3494 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3495 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3496 {
3497 /* XMM, greg64 */
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3499 IEM_MC_BEGIN(0, 1);
3500 IEM_MC_LOCAL(uint64_t, u64Tmp);
3501
3502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3503 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3504
3505 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3506 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3507
3508 IEM_MC_ADVANCE_RIP();
3509 IEM_MC_END();
3510 }
3511 else
3512 {
3513 /* XMM, [mem64] */
3514 IEM_MC_BEGIN(0, 2);
3515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3516 IEM_MC_LOCAL(uint64_t, u64Tmp);
3517
3518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3520 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3522
3523 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3524 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3525
3526 IEM_MC_ADVANCE_RIP();
3527 IEM_MC_END();
3528 }
3529 }
3530 else
3531 {
3532 /**
3533 * @opdone
3534 * @opcode 0x6e
3535 * @opcodesub rex.w=0
3536 * @oppfx 0x66
3537 * @opcpuid sse2
3538 * @opgroup og_sse2_simdint_datamove
3539 * @opxcpttype 5
3540 * @opfunction iemOp_movd_q_Vy_Ey
3541 * @optest op1=1 op2=2 -> op1=2
3542 * @optest op1=0 op2=-42 -> op1=-42
3543 */
3544 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3546 {
3547 /* XMM, greg32 */
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549 IEM_MC_BEGIN(0, 1);
3550 IEM_MC_LOCAL(uint32_t, u32Tmp);
3551
3552 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3553 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3554
3555 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3556 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3557
3558 IEM_MC_ADVANCE_RIP();
3559 IEM_MC_END();
3560 }
3561 else
3562 {
3563 /* XMM, [mem32] */
3564 IEM_MC_BEGIN(0, 2);
3565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3566 IEM_MC_LOCAL(uint32_t, u32Tmp);
3567
3568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3571 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3572
3573 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3574 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3575
3576 IEM_MC_ADVANCE_RIP();
3577 IEM_MC_END();
3578 }
3579 }
3580 return VINF_SUCCESS;
3581}
3582
3583/* Opcode 0xf3 0x0f 0x6e - invalid */
3584
3585
3586/**
3587 * @opcode 0x6f
3588 * @oppfx none
3589 * @opcpuid mmx
3590 * @opgroup og_mmx_datamove
3591 * @opxcpttype 5
3592 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3593 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3594 */
3595FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3596{
3597 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3600 {
3601 /*
3602 * Register, register.
3603 */
3604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3605 IEM_MC_BEGIN(0, 1);
3606 IEM_MC_LOCAL(uint64_t, u64Tmp);
3607
3608 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3609 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3610
3611 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3612 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3613 IEM_MC_FPU_TO_MMX_MODE();
3614
3615 IEM_MC_ADVANCE_RIP();
3616 IEM_MC_END();
3617 }
3618 else
3619 {
3620 /*
3621 * Register, memory.
3622 */
3623 IEM_MC_BEGIN(0, 2);
3624 IEM_MC_LOCAL(uint64_t, u64Tmp);
3625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3626
3627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3630 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3631
3632 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3633 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3634 IEM_MC_FPU_TO_MMX_MODE();
3635
3636 IEM_MC_ADVANCE_RIP();
3637 IEM_MC_END();
3638 }
3639 return VINF_SUCCESS;
3640}
3641
3642/**
3643 * @opcode 0x6f
3644 * @oppfx 0x66
3645 * @opcpuid sse2
3646 * @opgroup og_sse2_simdint_datamove
3647 * @opxcpttype 1
3648 * @optest op1=1 op2=2 -> op1=2
3649 * @optest op1=0 op2=-42 -> op1=-42
3650 */
3651FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
3652{
3653 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3656 {
3657 /*
3658 * Register, register.
3659 */
3660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3661 IEM_MC_BEGIN(0, 0);
3662
3663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3664 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3665
3666 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3667 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3668 IEM_MC_ADVANCE_RIP();
3669 IEM_MC_END();
3670 }
3671 else
3672 {
3673 /*
3674 * Register, memory.
3675 */
3676 IEM_MC_BEGIN(0, 2);
3677 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3679
3680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3682 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3683 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3684
3685 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3686 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3687
3688 IEM_MC_ADVANCE_RIP();
3689 IEM_MC_END();
3690 }
3691 return VINF_SUCCESS;
3692}
3693
3694/**
3695 * @opcode 0x6f
3696 * @oppfx 0xf3
3697 * @opcpuid sse2
3698 * @opgroup og_sse2_simdint_datamove
3699 * @opxcpttype 4UA
3700 * @optest op1=1 op2=2 -> op1=2
3701 * @optest op1=0 op2=-42 -> op1=-42
3702 */
3703FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
3704{
3705 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3708 {
3709 /*
3710 * Register, register.
3711 */
3712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3713 IEM_MC_BEGIN(0, 0);
3714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3716 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3717 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 }
3721 else
3722 {
3723 /*
3724 * Register, memory.
3725 */
3726 IEM_MC_BEGIN(0, 2);
3727 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3729
3730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3732 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3733 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3734 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3735 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3736
3737 IEM_MC_ADVANCE_RIP();
3738 IEM_MC_END();
3739 }
3740 return VINF_SUCCESS;
3741}
3742
3743
3744/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3745FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3746{
3747 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3749 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3750 {
3751 /*
3752 * Register, register.
3753 */
3754 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756
3757 IEM_MC_BEGIN(3, 0);
3758 IEM_MC_ARG(uint64_t *, pDst, 0);
3759 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3760 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3761 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3762 IEM_MC_PREPARE_FPU_USAGE();
3763 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3764 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3765 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3766 IEM_MC_ADVANCE_RIP();
3767 IEM_MC_END();
3768 }
3769 else
3770 {
3771 /*
3772 * Register, memory.
3773 */
3774 IEM_MC_BEGIN(3, 2);
3775 IEM_MC_ARG(uint64_t *, pDst, 0);
3776 IEM_MC_LOCAL(uint64_t, uSrc);
3777 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3779
3780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3781 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3782 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3784 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3785
3786 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3787 IEM_MC_PREPARE_FPU_USAGE();
3788 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3789 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3790
3791 IEM_MC_ADVANCE_RIP();
3792 IEM_MC_END();
3793 }
3794 return VINF_SUCCESS;
3795}
3796
3797/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3798FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3799{
3800 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3803 {
3804 /*
3805 * Register, register.
3806 */
3807 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(3, 0);
3811 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3812 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3813 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3815 IEM_MC_PREPARE_SSE_USAGE();
3816 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3817 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3818 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3819 IEM_MC_ADVANCE_RIP();
3820 IEM_MC_END();
3821 }
3822 else
3823 {
3824 /*
3825 * Register, memory.
3826 */
3827 IEM_MC_BEGIN(3, 2);
3828 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3829 IEM_MC_LOCAL(RTUINT128U, uSrc);
3830 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3832
3833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3834 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3835 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3837 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3838
3839 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3840 IEM_MC_PREPARE_SSE_USAGE();
3841 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3842 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3843
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 }
3847 return VINF_SUCCESS;
3848}
3849
3850/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3851FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3852{
3853 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3856 {
3857 /*
3858 * Register, register.
3859 */
3860 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862
3863 IEM_MC_BEGIN(3, 0);
3864 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3865 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3866 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3867 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3868 IEM_MC_PREPARE_SSE_USAGE();
3869 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3870 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3871 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3872 IEM_MC_ADVANCE_RIP();
3873 IEM_MC_END();
3874 }
3875 else
3876 {
3877 /*
3878 * Register, memory.
3879 */
3880 IEM_MC_BEGIN(3, 2);
3881 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3882 IEM_MC_LOCAL(RTUINT128U, uSrc);
3883 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3885
3886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3887 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3888 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3891
3892 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3893 IEM_MC_PREPARE_SSE_USAGE();
3894 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3895 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3896
3897 IEM_MC_ADVANCE_RIP();
3898 IEM_MC_END();
3899 }
3900 return VINF_SUCCESS;
3901}
3902
3903/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3904FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3905{
3906 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3909 {
3910 /*
3911 * Register, register.
3912 */
3913 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915
3916 IEM_MC_BEGIN(3, 0);
3917 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3918 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3919 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3920 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3921 IEM_MC_PREPARE_SSE_USAGE();
3922 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3923 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3924 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3925 IEM_MC_ADVANCE_RIP();
3926 IEM_MC_END();
3927 }
3928 else
3929 {
3930 /*
3931 * Register, memory.
3932 */
3933 IEM_MC_BEGIN(3, 2);
3934 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3935 IEM_MC_LOCAL(RTUINT128U, uSrc);
3936 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3938
3939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3940 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3941 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3943 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3944
3945 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3946 IEM_MC_PREPARE_SSE_USAGE();
3947 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3948 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3949
3950 IEM_MC_ADVANCE_RIP();
3951 IEM_MC_END();
3952 }
3953 return VINF_SUCCESS;
3954}
3955
3956
3957/** Opcode 0x0f 0x71 11/2. */
3958FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3959
3960/** Opcode 0x66 0x0f 0x71 11/2. */
3961FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3962
3963/** Opcode 0x0f 0x71 11/4. */
3964FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3965
3966/** Opcode 0x66 0x0f 0x71 11/4. */
3967FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3968
3969/** Opcode 0x0f 0x71 11/6. */
3970FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3971
3972/** Opcode 0x66 0x0f 0x71 11/6. */
3973FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3974
3975
3976/**
3977 * Group 12 jump table for register variant.
3978 */
3979IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3980{
3981 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3982 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3983 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3984 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3985 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3986 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3987 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3988 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3989};
3990AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3991
3992
3993/** Opcode 0x0f 0x71. */
3994FNIEMOP_DEF(iemOp_Grp12)
3995{
3996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3998 /* register, register */
3999 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4000 + pVCpu->iem.s.idxPrefix], bRm);
4001 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4002}
4003
4004
4005/** Opcode 0x0f 0x72 11/2. */
4006FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4007
4008/** Opcode 0x66 0x0f 0x72 11/2. */
4009FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4010
4011/** Opcode 0x0f 0x72 11/4. */
4012FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4013
4014/** Opcode 0x66 0x0f 0x72 11/4. */
4015FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4016
4017/** Opcode 0x0f 0x72 11/6. */
4018FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4019
4020/** Opcode 0x66 0x0f 0x72 11/6. */
4021FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4022
4023
4024/**
4025 * Group 13 jump table for register variant.
4026 */
4027IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4028{
4029 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4030 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4031 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4032 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4033 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4034 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4035 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4036 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4037};
4038AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4039
4040/** Opcode 0x0f 0x72. */
4041FNIEMOP_DEF(iemOp_Grp13)
4042{
4043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4045 /* register, register */
4046 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4047 + pVCpu->iem.s.idxPrefix], bRm);
4048 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4049}
4050
4051
4052/** Opcode 0x0f 0x73 11/2. */
4053FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4054
4055/** Opcode 0x66 0x0f 0x73 11/2. */
4056FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4057
4058/** Opcode 0x66 0x0f 0x73 11/3. */
4059FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4060
4061/** Opcode 0x0f 0x73 11/6. */
4062FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4063
4064/** Opcode 0x66 0x0f 0x73 11/6. */
4065FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4066
4067/** Opcode 0x66 0x0f 0x73 11/7. */
4068FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4069
4070/**
4071 * Group 14 jump table for register variant.
4072 */
4073IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4074{
4075 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4076 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4077 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4078 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4079 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4080 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4081 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4082 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4083};
4084AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4085
4086
4087/** Opcode 0x0f 0x73. */
4088FNIEMOP_DEF(iemOp_Grp14)
4089{
4090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4092 /* register, register */
4093 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4094 + pVCpu->iem.s.idxPrefix], bRm);
4095 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4096}
4097
4098
4099/**
4100 * Common worker for MMX instructions on the form:
4101 * pxxx mm1, mm2/mem64
4102 */
4103FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4104{
4105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4107 {
4108 /*
4109 * Register, register.
4110 */
4111 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4112 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114 IEM_MC_BEGIN(2, 0);
4115 IEM_MC_ARG(uint64_t *, pDst, 0);
4116 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4117 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4118 IEM_MC_PREPARE_FPU_USAGE();
4119 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4120 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4121 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4122 IEM_MC_ADVANCE_RIP();
4123 IEM_MC_END();
4124 }
4125 else
4126 {
4127 /*
4128 * Register, memory.
4129 */
4130 IEM_MC_BEGIN(2, 2);
4131 IEM_MC_ARG(uint64_t *, pDst, 0);
4132 IEM_MC_LOCAL(uint64_t, uSrc);
4133 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4135
4136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4138 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4139 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4140
4141 IEM_MC_PREPARE_FPU_USAGE();
4142 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4143 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4144
4145 IEM_MC_ADVANCE_RIP();
4146 IEM_MC_END();
4147 }
4148 return VINF_SUCCESS;
4149}
4150
4151
4152/**
4153 * Common worker for SSE2 instructions on the forms:
4154 * pxxx xmm1, xmm2/mem128
4155 *
4156 * Proper alignment of the 128-bit operand is enforced.
4157 * Exceptions type 4. SSE2 cpuid checks.
4158 */
4159FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4160{
4161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4163 {
4164 /*
4165 * Register, register.
4166 */
4167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4168 IEM_MC_BEGIN(2, 0);
4169 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4170 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4172 IEM_MC_PREPARE_SSE_USAGE();
4173 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4174 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4175 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4176 IEM_MC_ADVANCE_RIP();
4177 IEM_MC_END();
4178 }
4179 else
4180 {
4181 /*
4182 * Register, memory.
4183 */
4184 IEM_MC_BEGIN(2, 2);
4185 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4186 IEM_MC_LOCAL(RTUINT128U, uSrc);
4187 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4189
4190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4192 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4193 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4194
4195 IEM_MC_PREPARE_SSE_USAGE();
4196 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4197 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4198
4199 IEM_MC_ADVANCE_RIP();
4200 IEM_MC_END();
4201 }
4202 return VINF_SUCCESS;
4203}
4204
4205
4206/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4207FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4208{
4209 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4210 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4211}
4212
4213/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4214FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4215{
4216 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4217 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4218}
4219
4220/* Opcode 0xf3 0x0f 0x74 - invalid */
4221/* Opcode 0xf2 0x0f 0x74 - invalid */
4222
4223
4224/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4225FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4226{
4227 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4228 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4229}
4230
4231/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4232FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4233{
4234 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4235 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4236}
4237
4238/* Opcode 0xf3 0x0f 0x75 - invalid */
4239/* Opcode 0xf2 0x0f 0x75 - invalid */
4240
4241
4242/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4243FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4244{
4245 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4246 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4247}
4248
4249/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4250FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4251{
4252 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4253 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4254}
4255
4256/* Opcode 0xf3 0x0f 0x76 - invalid */
4257/* Opcode 0xf2 0x0f 0x76 - invalid */
4258
4259
4260/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4261FNIEMOP_DEF(iemOp_emms)
4262{
4263 IEMOP_MNEMONIC(emms, "emms");
4264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4265
4266 IEM_MC_BEGIN(0,0);
4267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
4268 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4269 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4270 IEM_MC_FPU_FROM_MMX_MODE();
4271 IEM_MC_ADVANCE_RIP();
4272 IEM_MC_END();
4273 return VINF_SUCCESS;
4274}
4275
4276/* Opcode 0x66 0x0f 0x77 - invalid */
4277/* Opcode 0xf3 0x0f 0x77 - invalid */
4278/* Opcode 0xf2 0x0f 0x77 - invalid */
4279
4280/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4281#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4282FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
4283{
4284 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
4285 IEMOP_HLP_IN_VMX_OPERATION();
4286 IEMOP_HLP_VMX_INSTR();
4287 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4288
4289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4291 {
4292 /*
4293 * Register, register.
4294 */
4295 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4296 if (enmEffOpSize == IEMMODE_64BIT)
4297 {
4298 IEM_MC_BEGIN(2, 0);
4299 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4300 IEM_MC_ARG(uint64_t, u64Enc, 1);
4301 IEM_MC_FETCH_GREG_U64(u64Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4302 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4303 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread64_reg, pu64Dst, u64Enc);
4304 IEM_MC_END();
4305 }
4306 else
4307 {
4308 IEM_MC_BEGIN(2, 0);
4309 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4310 IEM_MC_ARG(uint32_t, u32Enc, 1);
4311 IEM_MC_FETCH_GREG_U32(u32Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4312 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4313 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread32_reg, pu32Dst, u32Enc);
4314 IEM_MC_END();
4315 }
4316 }
4317 else
4318 {
4319 /*
4320 * Register, memory.
4321 */
4322 if (enmEffOpSize == IEMMODE_64BIT)
4323 {
4324 IEM_MC_BEGIN(4, 0);
4325 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4326 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4327 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4328 IEM_MC_ARG(uint64_t, u64Enc, 3);
4329 IEM_MC_FETCH_GREG_U64(u64Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4331 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4332 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4333 IEM_MC_CALL_CIMPL_4(iemCImpl_vmread_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4334 IEM_MC_END();
4335 }
4336 else
4337 {
4338 IEM_MC_BEGIN(4, 0);
4339 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4340 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4341 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4342 IEM_MC_ARG(uint32_t, u32Enc, 3);
4343 IEM_MC_FETCH_GREG_U32(u32Enc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4345 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4346 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4347 IEM_MC_CALL_CIMPL_4(iemCImpl_vmread_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4348 IEM_MC_END();
4349 }
4350 }
4351 return VINF_SUCCESS;
4352}
4353#else
4354FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4355#endif
4356
4357/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4358FNIEMOP_STUB(iemOp_AmdGrp17);
4359/* Opcode 0xf3 0x0f 0x78 - invalid */
4360/* Opcode 0xf2 0x0f 0x78 - invalid */
4361
4362/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4363#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4364FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
4365{
4366 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
4367 IEMOP_HLP_IN_VMX_OPERATION();
4368 IEMOP_HLP_VMX_INSTR();
4369 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
4370
4371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4373 {
4374 /*
4375 * Register, register.
4376 */
4377 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4378 if (enmEffOpSize == IEMMODE_64BIT)
4379 {
4380 IEM_MC_BEGIN(2, 0);
4381 IEM_MC_ARG(uint64_t, u64Val, 0);
4382 IEM_MC_ARG(uint64_t, u64Enc, 1);
4383 IEM_MC_FETCH_GREG_U64(u64Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4384 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4385 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
4386 IEM_MC_END();
4387 }
4388 else
4389 {
4390 IEM_MC_BEGIN(2, 0);
4391 IEM_MC_ARG(uint32_t, u32Val, 0);
4392 IEM_MC_ARG(uint32_t, u32Enc, 1);
4393 IEM_MC_FETCH_GREG_U32(u32Val, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4394 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4395 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
4396 IEM_MC_END();
4397 }
4398 }
4399 else
4400 {
4401 /*
4402 * Register, memory.
4403 */
4404 if (enmEffOpSize == IEMMODE_64BIT)
4405 {
4406 IEM_MC_BEGIN(4, 0);
4407 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4408 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4409 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4410 IEM_MC_ARG(uint64_t, u64Enc, 3);
4411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4412 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4413 IEM_MC_FETCH_GREG_U64(u64Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4414 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4415 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u64Enc);
4416 IEM_MC_END();
4417 }
4418 else
4419 {
4420 IEM_MC_BEGIN(4, 0);
4421 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4422 IEM_MC_ARG_CONST(IEMMODE, enmEffAddrMode,/*=*/pVCpu->iem.s.enmEffAddrMode, 1);
4423 IEM_MC_ARG(RTGCPTR, GCPtrVal, 2);
4424 IEM_MC_ARG(uint32_t, u32Enc, 3);
4425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
4426 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
4427 IEM_MC_FETCH_GREG_U32(u32Enc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4428 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4429 IEM_MC_CALL_CIMPL_4(iemCImpl_vmwrite_mem, iEffSeg, enmEffAddrMode, GCPtrVal, u32Enc);
4430 IEM_MC_END();
4431 }
4432 }
4433 return VINF_SUCCESS;
4434}
4435#else
4436FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4437#endif
4438/* Opcode 0x66 0x0f 0x79 - invalid */
4439/* Opcode 0xf3 0x0f 0x79 - invalid */
4440/* Opcode 0xf2 0x0f 0x79 - invalid */
4441
4442/* Opcode 0x0f 0x7a - invalid */
4443/* Opcode 0x66 0x0f 0x7a - invalid */
4444/* Opcode 0xf3 0x0f 0x7a - invalid */
4445/* Opcode 0xf2 0x0f 0x7a - invalid */
4446
4447/* Opcode 0x0f 0x7b - invalid */
4448/* Opcode 0x66 0x0f 0x7b - invalid */
4449/* Opcode 0xf3 0x0f 0x7b - invalid */
4450/* Opcode 0xf2 0x0f 0x7b - invalid */
4451
4452/* Opcode 0x0f 0x7c - invalid */
4453/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4454FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4455/* Opcode 0xf3 0x0f 0x7c - invalid */
4456/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4457FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4458
4459/* Opcode 0x0f 0x7d - invalid */
4460/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4461FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4462/* Opcode 0xf3 0x0f 0x7d - invalid */
4463/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4464FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4465
4466
4467/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4468FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4469{
4470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4471 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4472 {
4473 /**
4474 * @opcode 0x7e
4475 * @opcodesub rex.w=1
4476 * @oppfx none
4477 * @opcpuid mmx
4478 * @opgroup og_mmx_datamove
4479 * @opxcpttype 5
4480 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4481 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4482 */
4483 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4484 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4485 {
4486 /* greg64, MMX */
4487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4488 IEM_MC_BEGIN(0, 1);
4489 IEM_MC_LOCAL(uint64_t, u64Tmp);
4490
4491 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4492 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4493
4494 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4495 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4496 IEM_MC_FPU_TO_MMX_MODE();
4497
4498 IEM_MC_ADVANCE_RIP();
4499 IEM_MC_END();
4500 }
4501 else
4502 {
4503 /* [mem64], MMX */
4504 IEM_MC_BEGIN(0, 2);
4505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4506 IEM_MC_LOCAL(uint64_t, u64Tmp);
4507
4508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4510 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4511 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4512
4513 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4514 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4515 IEM_MC_FPU_TO_MMX_MODE();
4516
4517 IEM_MC_ADVANCE_RIP();
4518 IEM_MC_END();
4519 }
4520 }
4521 else
4522 {
4523 /**
4524 * @opdone
4525 * @opcode 0x7e
4526 * @opcodesub rex.w=0
4527 * @oppfx none
4528 * @opcpuid mmx
4529 * @opgroup og_mmx_datamove
4530 * @opxcpttype 5
4531 * @opfunction iemOp_movd_q_Pd_Ey
4532 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4533 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4534 */
4535 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4536 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4537 {
4538 /* greg32, MMX */
4539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4540 IEM_MC_BEGIN(0, 1);
4541 IEM_MC_LOCAL(uint32_t, u32Tmp);
4542
4543 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4544 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4545
4546 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4547 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4548 IEM_MC_FPU_TO_MMX_MODE();
4549
4550 IEM_MC_ADVANCE_RIP();
4551 IEM_MC_END();
4552 }
4553 else
4554 {
4555 /* [mem32], MMX */
4556 IEM_MC_BEGIN(0, 2);
4557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4558 IEM_MC_LOCAL(uint32_t, u32Tmp);
4559
4560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4562 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4563 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4564
4565 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4566 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4567 IEM_MC_FPU_TO_MMX_MODE();
4568
4569 IEM_MC_ADVANCE_RIP();
4570 IEM_MC_END();
4571 }
4572 }
4573 return VINF_SUCCESS;
4574
4575}
4576
4577
4578FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4579{
4580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4581 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4582 {
4583 /**
4584 * @opcode 0x7e
4585 * @opcodesub rex.w=1
4586 * @oppfx 0x66
4587 * @opcpuid sse2
4588 * @opgroup og_sse2_simdint_datamove
4589 * @opxcpttype 5
4590 * @optest 64-bit / op1=1 op2=2 -> op1=2
4591 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4592 */
4593 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4595 {
4596 /* greg64, XMM */
4597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4598 IEM_MC_BEGIN(0, 1);
4599 IEM_MC_LOCAL(uint64_t, u64Tmp);
4600
4601 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4602 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4603
4604 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4605 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4606
4607 IEM_MC_ADVANCE_RIP();
4608 IEM_MC_END();
4609 }
4610 else
4611 {
4612 /* [mem64], XMM */
4613 IEM_MC_BEGIN(0, 2);
4614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4615 IEM_MC_LOCAL(uint64_t, u64Tmp);
4616
4617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4619 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4621
4622 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4623 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4624
4625 IEM_MC_ADVANCE_RIP();
4626 IEM_MC_END();
4627 }
4628 }
4629 else
4630 {
4631 /**
4632 * @opdone
4633 * @opcode 0x7e
4634 * @opcodesub rex.w=0
4635 * @oppfx 0x66
4636 * @opcpuid sse2
4637 * @opgroup og_sse2_simdint_datamove
4638 * @opxcpttype 5
4639 * @opfunction iemOp_movd_q_Vy_Ey
4640 * @optest op1=1 op2=2 -> op1=2
4641 * @optest op1=0 op2=-42 -> op1=-42
4642 */
4643 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
4644 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4645 {
4646 /* greg32, XMM */
4647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4648 IEM_MC_BEGIN(0, 1);
4649 IEM_MC_LOCAL(uint32_t, u32Tmp);
4650
4651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4653
4654 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4655 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4656
4657 IEM_MC_ADVANCE_RIP();
4658 IEM_MC_END();
4659 }
4660 else
4661 {
4662 /* [mem32], XMM */
4663 IEM_MC_BEGIN(0, 2);
4664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4665 IEM_MC_LOCAL(uint32_t, u32Tmp);
4666
4667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4669 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4670 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4671
4672 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4673 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4674
4675 IEM_MC_ADVANCE_RIP();
4676 IEM_MC_END();
4677 }
4678 }
4679 return VINF_SUCCESS;
4680
4681}
4682
4683/**
4684 * @opcode 0x7e
4685 * @oppfx 0xf3
4686 * @opcpuid sse2
4687 * @opgroup og_sse2_pcksclr_datamove
4688 * @opxcpttype none
4689 * @optest op1=1 op2=2 -> op1=2
4690 * @optest op1=0 op2=-42 -> op1=-42
4691 */
4692FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4693{
4694 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4696 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4697 {
4698 /*
4699 * Register, register.
4700 */
4701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4702 IEM_MC_BEGIN(0, 2);
4703 IEM_MC_LOCAL(uint64_t, uSrc);
4704
4705 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4706 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4707
4708 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4709 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4710
4711 IEM_MC_ADVANCE_RIP();
4712 IEM_MC_END();
4713 }
4714 else
4715 {
4716 /*
4717 * Memory, register.
4718 */
4719 IEM_MC_BEGIN(0, 2);
4720 IEM_MC_LOCAL(uint64_t, uSrc);
4721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4722
4723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4725 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4726 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4727
4728 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4729 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4730
4731 IEM_MC_ADVANCE_RIP();
4732 IEM_MC_END();
4733 }
4734 return VINF_SUCCESS;
4735}
4736
4737/* Opcode 0xf2 0x0f 0x7e - invalid */
4738
4739
4740/** Opcode 0x0f 0x7f - movq Qq, Pq */
4741FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4742{
4743 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4746 {
4747 /*
4748 * Register, register.
4749 */
4750 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4751 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753 IEM_MC_BEGIN(0, 1);
4754 IEM_MC_LOCAL(uint64_t, u64Tmp);
4755 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4756 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4757 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4758 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4759 IEM_MC_ADVANCE_RIP();
4760 IEM_MC_END();
4761 }
4762 else
4763 {
4764 /*
4765 * Register, memory.
4766 */
4767 IEM_MC_BEGIN(0, 2);
4768 IEM_MC_LOCAL(uint64_t, u64Tmp);
4769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4770
4771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4773 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4774 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4775
4776 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4777 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4778
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 }
4782 return VINF_SUCCESS;
4783}
4784
4785/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4786FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4787{
4788 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4790 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4791 {
4792 /*
4793 * Register, register.
4794 */
4795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4796 IEM_MC_BEGIN(0, 0);
4797 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4798 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4799 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4800 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4801 IEM_MC_ADVANCE_RIP();
4802 IEM_MC_END();
4803 }
4804 else
4805 {
4806 /*
4807 * Register, memory.
4808 */
4809 IEM_MC_BEGIN(0, 2);
4810 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4812
4813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4815 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4816 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4817
4818 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4819 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4820
4821 IEM_MC_ADVANCE_RIP();
4822 IEM_MC_END();
4823 }
4824 return VINF_SUCCESS;
4825}
4826
4827/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4828FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4829{
4830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4831 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4832 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4833 {
4834 /*
4835 * Register, register.
4836 */
4837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4838 IEM_MC_BEGIN(0, 0);
4839 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4840 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4841 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4842 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4843 IEM_MC_ADVANCE_RIP();
4844 IEM_MC_END();
4845 }
4846 else
4847 {
4848 /*
4849 * Register, memory.
4850 */
4851 IEM_MC_BEGIN(0, 2);
4852 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4854
4855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4857 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4858 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4859
4860 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4861 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4862
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 }
4866 return VINF_SUCCESS;
4867}
4868
4869/* Opcode 0xf2 0x0f 0x7f - invalid */
4870
4871
4872
4873/** Opcode 0x0f 0x80. */
4874FNIEMOP_DEF(iemOp_jo_Jv)
4875{
4876 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4877 IEMOP_HLP_MIN_386();
4878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4879 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4880 {
4881 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4883
4884 IEM_MC_BEGIN(0, 0);
4885 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4886 IEM_MC_REL_JMP_S16(i16Imm);
4887 } IEM_MC_ELSE() {
4888 IEM_MC_ADVANCE_RIP();
4889 } IEM_MC_ENDIF();
4890 IEM_MC_END();
4891 }
4892 else
4893 {
4894 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4896
4897 IEM_MC_BEGIN(0, 0);
4898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4899 IEM_MC_REL_JMP_S32(i32Imm);
4900 } IEM_MC_ELSE() {
4901 IEM_MC_ADVANCE_RIP();
4902 } IEM_MC_ENDIF();
4903 IEM_MC_END();
4904 }
4905 return VINF_SUCCESS;
4906}
4907
4908
4909/** Opcode 0x0f 0x81. */
4910FNIEMOP_DEF(iemOp_jno_Jv)
4911{
4912 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4913 IEMOP_HLP_MIN_386();
4914 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4915 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4916 {
4917 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4919
4920 IEM_MC_BEGIN(0, 0);
4921 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4922 IEM_MC_ADVANCE_RIP();
4923 } IEM_MC_ELSE() {
4924 IEM_MC_REL_JMP_S16(i16Imm);
4925 } IEM_MC_ENDIF();
4926 IEM_MC_END();
4927 }
4928 else
4929 {
4930 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4932
4933 IEM_MC_BEGIN(0, 0);
4934 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4935 IEM_MC_ADVANCE_RIP();
4936 } IEM_MC_ELSE() {
4937 IEM_MC_REL_JMP_S32(i32Imm);
4938 } IEM_MC_ENDIF();
4939 IEM_MC_END();
4940 }
4941 return VINF_SUCCESS;
4942}
4943
4944
4945/** Opcode 0x0f 0x82. */
4946FNIEMOP_DEF(iemOp_jc_Jv)
4947{
4948 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4949 IEMOP_HLP_MIN_386();
4950 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4951 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4952 {
4953 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4955
4956 IEM_MC_BEGIN(0, 0);
4957 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4958 IEM_MC_REL_JMP_S16(i16Imm);
4959 } IEM_MC_ELSE() {
4960 IEM_MC_ADVANCE_RIP();
4961 } IEM_MC_ENDIF();
4962 IEM_MC_END();
4963 }
4964 else
4965 {
4966 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4968
4969 IEM_MC_BEGIN(0, 0);
4970 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4971 IEM_MC_REL_JMP_S32(i32Imm);
4972 } IEM_MC_ELSE() {
4973 IEM_MC_ADVANCE_RIP();
4974 } IEM_MC_ENDIF();
4975 IEM_MC_END();
4976 }
4977 return VINF_SUCCESS;
4978}
4979
4980
4981/** Opcode 0x0f 0x83. */
4982FNIEMOP_DEF(iemOp_jnc_Jv)
4983{
4984 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4985 IEMOP_HLP_MIN_386();
4986 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4987 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4988 {
4989 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4991
4992 IEM_MC_BEGIN(0, 0);
4993 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4994 IEM_MC_ADVANCE_RIP();
4995 } IEM_MC_ELSE() {
4996 IEM_MC_REL_JMP_S16(i16Imm);
4997 } IEM_MC_ENDIF();
4998 IEM_MC_END();
4999 }
5000 else
5001 {
5002 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5004
5005 IEM_MC_BEGIN(0, 0);
5006 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5007 IEM_MC_ADVANCE_RIP();
5008 } IEM_MC_ELSE() {
5009 IEM_MC_REL_JMP_S32(i32Imm);
5010 } IEM_MC_ENDIF();
5011 IEM_MC_END();
5012 }
5013 return VINF_SUCCESS;
5014}
5015
5016
5017/** Opcode 0x0f 0x84. */
5018FNIEMOP_DEF(iemOp_je_Jv)
5019{
5020 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5021 IEMOP_HLP_MIN_386();
5022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5023 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5024 {
5025 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5027
5028 IEM_MC_BEGIN(0, 0);
5029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5030 IEM_MC_REL_JMP_S16(i16Imm);
5031 } IEM_MC_ELSE() {
5032 IEM_MC_ADVANCE_RIP();
5033 } IEM_MC_ENDIF();
5034 IEM_MC_END();
5035 }
5036 else
5037 {
5038 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5040
5041 IEM_MC_BEGIN(0, 0);
5042 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5043 IEM_MC_REL_JMP_S32(i32Imm);
5044 } IEM_MC_ELSE() {
5045 IEM_MC_ADVANCE_RIP();
5046 } IEM_MC_ENDIF();
5047 IEM_MC_END();
5048 }
5049 return VINF_SUCCESS;
5050}
5051
5052
5053/** Opcode 0x0f 0x85. */
5054FNIEMOP_DEF(iemOp_jne_Jv)
5055{
5056 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5057 IEMOP_HLP_MIN_386();
5058 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5059 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5060 {
5061 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5063
5064 IEM_MC_BEGIN(0, 0);
5065 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5066 IEM_MC_ADVANCE_RIP();
5067 } IEM_MC_ELSE() {
5068 IEM_MC_REL_JMP_S16(i16Imm);
5069 } IEM_MC_ENDIF();
5070 IEM_MC_END();
5071 }
5072 else
5073 {
5074 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5076
5077 IEM_MC_BEGIN(0, 0);
5078 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5079 IEM_MC_ADVANCE_RIP();
5080 } IEM_MC_ELSE() {
5081 IEM_MC_REL_JMP_S32(i32Imm);
5082 } IEM_MC_ENDIF();
5083 IEM_MC_END();
5084 }
5085 return VINF_SUCCESS;
5086}
5087
5088
5089/** Opcode 0x0f 0x86. */
5090FNIEMOP_DEF(iemOp_jbe_Jv)
5091{
5092 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
5093 IEMOP_HLP_MIN_386();
5094 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5095 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5096 {
5097 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5099
5100 IEM_MC_BEGIN(0, 0);
5101 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5102 IEM_MC_REL_JMP_S16(i16Imm);
5103 } IEM_MC_ELSE() {
5104 IEM_MC_ADVANCE_RIP();
5105 } IEM_MC_ENDIF();
5106 IEM_MC_END();
5107 }
5108 else
5109 {
5110 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5112
5113 IEM_MC_BEGIN(0, 0);
5114 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5115 IEM_MC_REL_JMP_S32(i32Imm);
5116 } IEM_MC_ELSE() {
5117 IEM_MC_ADVANCE_RIP();
5118 } IEM_MC_ENDIF();
5119 IEM_MC_END();
5120 }
5121 return VINF_SUCCESS;
5122}
5123
5124
5125/** Opcode 0x0f 0x87. */
5126FNIEMOP_DEF(iemOp_jnbe_Jv)
5127{
5128 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
5129 IEMOP_HLP_MIN_386();
5130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5131 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5132 {
5133 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5135
5136 IEM_MC_BEGIN(0, 0);
5137 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5138 IEM_MC_ADVANCE_RIP();
5139 } IEM_MC_ELSE() {
5140 IEM_MC_REL_JMP_S16(i16Imm);
5141 } IEM_MC_ENDIF();
5142 IEM_MC_END();
5143 }
5144 else
5145 {
5146 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5148
5149 IEM_MC_BEGIN(0, 0);
5150 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5151 IEM_MC_ADVANCE_RIP();
5152 } IEM_MC_ELSE() {
5153 IEM_MC_REL_JMP_S32(i32Imm);
5154 } IEM_MC_ENDIF();
5155 IEM_MC_END();
5156 }
5157 return VINF_SUCCESS;
5158}
5159
5160
5161/** Opcode 0x0f 0x88. */
5162FNIEMOP_DEF(iemOp_js_Jv)
5163{
5164 IEMOP_MNEMONIC(js_Jv, "js Jv");
5165 IEMOP_HLP_MIN_386();
5166 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5167 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5168 {
5169 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5171
5172 IEM_MC_BEGIN(0, 0);
5173 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5174 IEM_MC_REL_JMP_S16(i16Imm);
5175 } IEM_MC_ELSE() {
5176 IEM_MC_ADVANCE_RIP();
5177 } IEM_MC_ENDIF();
5178 IEM_MC_END();
5179 }
5180 else
5181 {
5182 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5184
5185 IEM_MC_BEGIN(0, 0);
5186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5187 IEM_MC_REL_JMP_S32(i32Imm);
5188 } IEM_MC_ELSE() {
5189 IEM_MC_ADVANCE_RIP();
5190 } IEM_MC_ENDIF();
5191 IEM_MC_END();
5192 }
5193 return VINF_SUCCESS;
5194}
5195
5196
5197/** Opcode 0x0f 0x89. */
5198FNIEMOP_DEF(iemOp_jns_Jv)
5199{
5200 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5201 IEMOP_HLP_MIN_386();
5202 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5203 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5204 {
5205 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5207
5208 IEM_MC_BEGIN(0, 0);
5209 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5210 IEM_MC_ADVANCE_RIP();
5211 } IEM_MC_ELSE() {
5212 IEM_MC_REL_JMP_S16(i16Imm);
5213 } IEM_MC_ENDIF();
5214 IEM_MC_END();
5215 }
5216 else
5217 {
5218 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5220
5221 IEM_MC_BEGIN(0, 0);
5222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5223 IEM_MC_ADVANCE_RIP();
5224 } IEM_MC_ELSE() {
5225 IEM_MC_REL_JMP_S32(i32Imm);
5226 } IEM_MC_ENDIF();
5227 IEM_MC_END();
5228 }
5229 return VINF_SUCCESS;
5230}
5231
5232
5233/** Opcode 0x0f 0x8a. */
5234FNIEMOP_DEF(iemOp_jp_Jv)
5235{
5236 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5237 IEMOP_HLP_MIN_386();
5238 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5239 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5240 {
5241 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5243
5244 IEM_MC_BEGIN(0, 0);
5245 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5246 IEM_MC_REL_JMP_S16(i16Imm);
5247 } IEM_MC_ELSE() {
5248 IEM_MC_ADVANCE_RIP();
5249 } IEM_MC_ENDIF();
5250 IEM_MC_END();
5251 }
5252 else
5253 {
5254 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5256
5257 IEM_MC_BEGIN(0, 0);
5258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5259 IEM_MC_REL_JMP_S32(i32Imm);
5260 } IEM_MC_ELSE() {
5261 IEM_MC_ADVANCE_RIP();
5262 } IEM_MC_ENDIF();
5263 IEM_MC_END();
5264 }
5265 return VINF_SUCCESS;
5266}
5267
5268
5269/** Opcode 0x0f 0x8b. */
5270FNIEMOP_DEF(iemOp_jnp_Jv)
5271{
5272 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5273 IEMOP_HLP_MIN_386();
5274 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5275 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5276 {
5277 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5279
5280 IEM_MC_BEGIN(0, 0);
5281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5282 IEM_MC_ADVANCE_RIP();
5283 } IEM_MC_ELSE() {
5284 IEM_MC_REL_JMP_S16(i16Imm);
5285 } IEM_MC_ENDIF();
5286 IEM_MC_END();
5287 }
5288 else
5289 {
5290 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5292
5293 IEM_MC_BEGIN(0, 0);
5294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5295 IEM_MC_ADVANCE_RIP();
5296 } IEM_MC_ELSE() {
5297 IEM_MC_REL_JMP_S32(i32Imm);
5298 } IEM_MC_ENDIF();
5299 IEM_MC_END();
5300 }
5301 return VINF_SUCCESS;
5302}
5303
5304
5305/** Opcode 0x0f 0x8c. */
5306FNIEMOP_DEF(iemOp_jl_Jv)
5307{
5308 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5309 IEMOP_HLP_MIN_386();
5310 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5311 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5312 {
5313 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5315
5316 IEM_MC_BEGIN(0, 0);
5317 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5318 IEM_MC_REL_JMP_S16(i16Imm);
5319 } IEM_MC_ELSE() {
5320 IEM_MC_ADVANCE_RIP();
5321 } IEM_MC_ENDIF();
5322 IEM_MC_END();
5323 }
5324 else
5325 {
5326 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5328
5329 IEM_MC_BEGIN(0, 0);
5330 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5331 IEM_MC_REL_JMP_S32(i32Imm);
5332 } IEM_MC_ELSE() {
5333 IEM_MC_ADVANCE_RIP();
5334 } IEM_MC_ENDIF();
5335 IEM_MC_END();
5336 }
5337 return VINF_SUCCESS;
5338}
5339
5340
5341/** Opcode 0x0f 0x8d. */
5342FNIEMOP_DEF(iemOp_jnl_Jv)
5343{
5344 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5345 IEMOP_HLP_MIN_386();
5346 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5347 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5348 {
5349 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5351
5352 IEM_MC_BEGIN(0, 0);
5353 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5354 IEM_MC_ADVANCE_RIP();
5355 } IEM_MC_ELSE() {
5356 IEM_MC_REL_JMP_S16(i16Imm);
5357 } IEM_MC_ENDIF();
5358 IEM_MC_END();
5359 }
5360 else
5361 {
5362 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5364
5365 IEM_MC_BEGIN(0, 0);
5366 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5367 IEM_MC_ADVANCE_RIP();
5368 } IEM_MC_ELSE() {
5369 IEM_MC_REL_JMP_S32(i32Imm);
5370 } IEM_MC_ENDIF();
5371 IEM_MC_END();
5372 }
5373 return VINF_SUCCESS;
5374}
5375
5376
5377/** Opcode 0x0f 0x8e. */
5378FNIEMOP_DEF(iemOp_jle_Jv)
5379{
5380 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5381 IEMOP_HLP_MIN_386();
5382 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5383 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5384 {
5385 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5387
5388 IEM_MC_BEGIN(0, 0);
5389 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5390 IEM_MC_REL_JMP_S16(i16Imm);
5391 } IEM_MC_ELSE() {
5392 IEM_MC_ADVANCE_RIP();
5393 } IEM_MC_ENDIF();
5394 IEM_MC_END();
5395 }
5396 else
5397 {
5398 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5400
5401 IEM_MC_BEGIN(0, 0);
5402 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5403 IEM_MC_REL_JMP_S32(i32Imm);
5404 } IEM_MC_ELSE() {
5405 IEM_MC_ADVANCE_RIP();
5406 } IEM_MC_ENDIF();
5407 IEM_MC_END();
5408 }
5409 return VINF_SUCCESS;
5410}
5411
5412
5413/** Opcode 0x0f 0x8f. */
5414FNIEMOP_DEF(iemOp_jnle_Jv)
5415{
5416 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5417 IEMOP_HLP_MIN_386();
5418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5419 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5420 {
5421 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5423
5424 IEM_MC_BEGIN(0, 0);
5425 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5426 IEM_MC_ADVANCE_RIP();
5427 } IEM_MC_ELSE() {
5428 IEM_MC_REL_JMP_S16(i16Imm);
5429 } IEM_MC_ENDIF();
5430 IEM_MC_END();
5431 }
5432 else
5433 {
5434 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5436
5437 IEM_MC_BEGIN(0, 0);
5438 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5439 IEM_MC_ADVANCE_RIP();
5440 } IEM_MC_ELSE() {
5441 IEM_MC_REL_JMP_S32(i32Imm);
5442 } IEM_MC_ENDIF();
5443 IEM_MC_END();
5444 }
5445 return VINF_SUCCESS;
5446}
5447
5448
5449/** Opcode 0x0f 0x90. */
5450FNIEMOP_DEF(iemOp_seto_Eb)
5451{
5452 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5453 IEMOP_HLP_MIN_386();
5454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5455
5456 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5457 * any way. AMD says it's "unused", whatever that means. We're
5458 * ignoring for now. */
5459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5460 {
5461 /* register target */
5462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5463 IEM_MC_BEGIN(0, 0);
5464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5465 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5466 } IEM_MC_ELSE() {
5467 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5468 } IEM_MC_ENDIF();
5469 IEM_MC_ADVANCE_RIP();
5470 IEM_MC_END();
5471 }
5472 else
5473 {
5474 /* memory target */
5475 IEM_MC_BEGIN(0, 1);
5476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5480 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5481 } IEM_MC_ELSE() {
5482 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5483 } IEM_MC_ENDIF();
5484 IEM_MC_ADVANCE_RIP();
5485 IEM_MC_END();
5486 }
5487 return VINF_SUCCESS;
5488}
5489
5490
5491/** Opcode 0x0f 0x91. */
5492FNIEMOP_DEF(iemOp_setno_Eb)
5493{
5494 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5495 IEMOP_HLP_MIN_386();
5496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5497
5498 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5499 * any way. AMD says it's "unused", whatever that means. We're
5500 * ignoring for now. */
5501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5502 {
5503 /* register target */
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_BEGIN(0, 0);
5506 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5507 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5508 } IEM_MC_ELSE() {
5509 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5510 } IEM_MC_ENDIF();
5511 IEM_MC_ADVANCE_RIP();
5512 IEM_MC_END();
5513 }
5514 else
5515 {
5516 /* memory target */
5517 IEM_MC_BEGIN(0, 1);
5518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5521 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5522 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5523 } IEM_MC_ELSE() {
5524 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5525 } IEM_MC_ENDIF();
5526 IEM_MC_ADVANCE_RIP();
5527 IEM_MC_END();
5528 }
5529 return VINF_SUCCESS;
5530}
5531
5532
5533/** Opcode 0x0f 0x92. */
5534FNIEMOP_DEF(iemOp_setc_Eb)
5535{
5536 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5537 IEMOP_HLP_MIN_386();
5538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5539
5540 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5541 * any way. AMD says it's "unused", whatever that means. We're
5542 * ignoring for now. */
5543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5544 {
5545 /* register target */
5546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5547 IEM_MC_BEGIN(0, 0);
5548 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5549 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5550 } IEM_MC_ELSE() {
5551 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5552 } IEM_MC_ENDIF();
5553 IEM_MC_ADVANCE_RIP();
5554 IEM_MC_END();
5555 }
5556 else
5557 {
5558 /* memory target */
5559 IEM_MC_BEGIN(0, 1);
5560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5563 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5564 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5565 } IEM_MC_ELSE() {
5566 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5567 } IEM_MC_ENDIF();
5568 IEM_MC_ADVANCE_RIP();
5569 IEM_MC_END();
5570 }
5571 return VINF_SUCCESS;
5572}
5573
5574
5575/** Opcode 0x0f 0x93. */
5576FNIEMOP_DEF(iemOp_setnc_Eb)
5577{
5578 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5579 IEMOP_HLP_MIN_386();
5580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5581
5582 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5583 * any way. AMD says it's "unused", whatever that means. We're
5584 * ignoring for now. */
5585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5586 {
5587 /* register target */
5588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5589 IEM_MC_BEGIN(0, 0);
5590 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5591 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5592 } IEM_MC_ELSE() {
5593 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5594 } IEM_MC_ENDIF();
5595 IEM_MC_ADVANCE_RIP();
5596 IEM_MC_END();
5597 }
5598 else
5599 {
5600 /* memory target */
5601 IEM_MC_BEGIN(0, 1);
5602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5605 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5606 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5607 } IEM_MC_ELSE() {
5608 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5609 } IEM_MC_ENDIF();
5610 IEM_MC_ADVANCE_RIP();
5611 IEM_MC_END();
5612 }
5613 return VINF_SUCCESS;
5614}
5615
5616
5617/** Opcode 0x0f 0x94. */
5618FNIEMOP_DEF(iemOp_sete_Eb)
5619{
5620 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5621 IEMOP_HLP_MIN_386();
5622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5623
5624 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5625 * any way. AMD says it's "unused", whatever that means. We're
5626 * ignoring for now. */
5627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5628 {
5629 /* register target */
5630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5631 IEM_MC_BEGIN(0, 0);
5632 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5633 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5634 } IEM_MC_ELSE() {
5635 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5636 } IEM_MC_ENDIF();
5637 IEM_MC_ADVANCE_RIP();
5638 IEM_MC_END();
5639 }
5640 else
5641 {
5642 /* memory target */
5643 IEM_MC_BEGIN(0, 1);
5644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5648 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5649 } IEM_MC_ELSE() {
5650 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5651 } IEM_MC_ENDIF();
5652 IEM_MC_ADVANCE_RIP();
5653 IEM_MC_END();
5654 }
5655 return VINF_SUCCESS;
5656}
5657
5658
5659/** Opcode 0x0f 0x95. */
5660FNIEMOP_DEF(iemOp_setne_Eb)
5661{
5662 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5663 IEMOP_HLP_MIN_386();
5664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5665
5666 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5667 * any way. AMD says it's "unused", whatever that means. We're
5668 * ignoring for now. */
5669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5670 {
5671 /* register target */
5672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5673 IEM_MC_BEGIN(0, 0);
5674 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5675 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5676 } IEM_MC_ELSE() {
5677 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5678 } IEM_MC_ENDIF();
5679 IEM_MC_ADVANCE_RIP();
5680 IEM_MC_END();
5681 }
5682 else
5683 {
5684 /* memory target */
5685 IEM_MC_BEGIN(0, 1);
5686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5689 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5690 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5691 } IEM_MC_ELSE() {
5692 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5693 } IEM_MC_ENDIF();
5694 IEM_MC_ADVANCE_RIP();
5695 IEM_MC_END();
5696 }
5697 return VINF_SUCCESS;
5698}
5699
5700
5701/** Opcode 0x0f 0x96. */
5702FNIEMOP_DEF(iemOp_setbe_Eb)
5703{
5704 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5705 IEMOP_HLP_MIN_386();
5706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5707
5708 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5709 * any way. AMD says it's "unused", whatever that means. We're
5710 * ignoring for now. */
5711 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5712 {
5713 /* register target */
5714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5715 IEM_MC_BEGIN(0, 0);
5716 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5717 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5718 } IEM_MC_ELSE() {
5719 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5720 } IEM_MC_ENDIF();
5721 IEM_MC_ADVANCE_RIP();
5722 IEM_MC_END();
5723 }
5724 else
5725 {
5726 /* memory target */
5727 IEM_MC_BEGIN(0, 1);
5728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5731 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5732 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5733 } IEM_MC_ELSE() {
5734 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5735 } IEM_MC_ENDIF();
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 }
5739 return VINF_SUCCESS;
5740}
5741
5742
5743/** Opcode 0x0f 0x97. */
5744FNIEMOP_DEF(iemOp_setnbe_Eb)
5745{
5746 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5747 IEMOP_HLP_MIN_386();
5748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5749
5750 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5751 * any way. AMD says it's "unused", whatever that means. We're
5752 * ignoring for now. */
5753 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5754 {
5755 /* register target */
5756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5757 IEM_MC_BEGIN(0, 0);
5758 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5759 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5760 } IEM_MC_ELSE() {
5761 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5762 } IEM_MC_ENDIF();
5763 IEM_MC_ADVANCE_RIP();
5764 IEM_MC_END();
5765 }
5766 else
5767 {
5768 /* memory target */
5769 IEM_MC_BEGIN(0, 1);
5770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5773 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5774 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5775 } IEM_MC_ELSE() {
5776 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5777 } IEM_MC_ENDIF();
5778 IEM_MC_ADVANCE_RIP();
5779 IEM_MC_END();
5780 }
5781 return VINF_SUCCESS;
5782}
5783
5784
5785/** Opcode 0x0f 0x98. */
5786FNIEMOP_DEF(iemOp_sets_Eb)
5787{
5788 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5789 IEMOP_HLP_MIN_386();
5790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5791
5792 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5793 * any way. AMD says it's "unused", whatever that means. We're
5794 * ignoring for now. */
5795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5796 {
5797 /* register target */
5798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5799 IEM_MC_BEGIN(0, 0);
5800 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5801 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5802 } IEM_MC_ELSE() {
5803 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5804 } IEM_MC_ENDIF();
5805 IEM_MC_ADVANCE_RIP();
5806 IEM_MC_END();
5807 }
5808 else
5809 {
5810 /* memory target */
5811 IEM_MC_BEGIN(0, 1);
5812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5815 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5816 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5817 } IEM_MC_ELSE() {
5818 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5819 } IEM_MC_ENDIF();
5820 IEM_MC_ADVANCE_RIP();
5821 IEM_MC_END();
5822 }
5823 return VINF_SUCCESS;
5824}
5825
5826
5827/** Opcode 0x0f 0x99. */
5828FNIEMOP_DEF(iemOp_setns_Eb)
5829{
5830 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5831 IEMOP_HLP_MIN_386();
5832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5833
5834 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5835 * any way. AMD says it's "unused", whatever that means. We're
5836 * ignoring for now. */
5837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5838 {
5839 /* register target */
5840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5841 IEM_MC_BEGIN(0, 0);
5842 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5843 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5844 } IEM_MC_ELSE() {
5845 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5846 } IEM_MC_ENDIF();
5847 IEM_MC_ADVANCE_RIP();
5848 IEM_MC_END();
5849 }
5850 else
5851 {
5852 /* memory target */
5853 IEM_MC_BEGIN(0, 1);
5854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5857 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5858 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5859 } IEM_MC_ELSE() {
5860 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5861 } IEM_MC_ENDIF();
5862 IEM_MC_ADVANCE_RIP();
5863 IEM_MC_END();
5864 }
5865 return VINF_SUCCESS;
5866}
5867
5868
5869/** Opcode 0x0f 0x9a. */
5870FNIEMOP_DEF(iemOp_setp_Eb)
5871{
5872 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5873 IEMOP_HLP_MIN_386();
5874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5875
5876 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5877 * any way. AMD says it's "unused", whatever that means. We're
5878 * ignoring for now. */
5879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5880 {
5881 /* register target */
5882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5883 IEM_MC_BEGIN(0, 0);
5884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5885 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5886 } IEM_MC_ELSE() {
5887 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5888 } IEM_MC_ENDIF();
5889 IEM_MC_ADVANCE_RIP();
5890 IEM_MC_END();
5891 }
5892 else
5893 {
5894 /* memory target */
5895 IEM_MC_BEGIN(0, 1);
5896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5900 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5901 } IEM_MC_ELSE() {
5902 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5903 } IEM_MC_ENDIF();
5904 IEM_MC_ADVANCE_RIP();
5905 IEM_MC_END();
5906 }
5907 return VINF_SUCCESS;
5908}
5909
5910
5911/** Opcode 0x0f 0x9b. */
5912FNIEMOP_DEF(iemOp_setnp_Eb)
5913{
5914 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5915 IEMOP_HLP_MIN_386();
5916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5917
5918 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5919 * any way. AMD says it's "unused", whatever that means. We're
5920 * ignoring for now. */
5921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5922 {
5923 /* register target */
5924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5925 IEM_MC_BEGIN(0, 0);
5926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5927 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5928 } IEM_MC_ELSE() {
5929 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5930 } IEM_MC_ENDIF();
5931 IEM_MC_ADVANCE_RIP();
5932 IEM_MC_END();
5933 }
5934 else
5935 {
5936 /* memory target */
5937 IEM_MC_BEGIN(0, 1);
5938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5941 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5942 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5943 } IEM_MC_ELSE() {
5944 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5945 } IEM_MC_ENDIF();
5946 IEM_MC_ADVANCE_RIP();
5947 IEM_MC_END();
5948 }
5949 return VINF_SUCCESS;
5950}
5951
5952
5953/** Opcode 0x0f 0x9c. */
5954FNIEMOP_DEF(iemOp_setl_Eb)
5955{
5956 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5957 IEMOP_HLP_MIN_386();
5958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5959
5960 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5961 * any way. AMD says it's "unused", whatever that means. We're
5962 * ignoring for now. */
5963 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5964 {
5965 /* register target */
5966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5967 IEM_MC_BEGIN(0, 0);
5968 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5969 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5970 } IEM_MC_ELSE() {
5971 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5972 } IEM_MC_ENDIF();
5973 IEM_MC_ADVANCE_RIP();
5974 IEM_MC_END();
5975 }
5976 else
5977 {
5978 /* memory target */
5979 IEM_MC_BEGIN(0, 1);
5980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5983 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5984 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5985 } IEM_MC_ELSE() {
5986 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5987 } IEM_MC_ENDIF();
5988 IEM_MC_ADVANCE_RIP();
5989 IEM_MC_END();
5990 }
5991 return VINF_SUCCESS;
5992}
5993
5994
5995/** Opcode 0x0f 0x9d. */
5996FNIEMOP_DEF(iemOp_setnl_Eb)
5997{
5998 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5999 IEMOP_HLP_MIN_386();
6000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6001
6002 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6003 * any way. AMD says it's "unused", whatever that means. We're
6004 * ignoring for now. */
6005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6006 {
6007 /* register target */
6008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6009 IEM_MC_BEGIN(0, 0);
6010 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6011 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6012 } IEM_MC_ELSE() {
6013 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6014 } IEM_MC_ENDIF();
6015 IEM_MC_ADVANCE_RIP();
6016 IEM_MC_END();
6017 }
6018 else
6019 {
6020 /* memory target */
6021 IEM_MC_BEGIN(0, 1);
6022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6025 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6026 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6027 } IEM_MC_ELSE() {
6028 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6029 } IEM_MC_ENDIF();
6030 IEM_MC_ADVANCE_RIP();
6031 IEM_MC_END();
6032 }
6033 return VINF_SUCCESS;
6034}
6035
6036
6037/** Opcode 0x0f 0x9e. */
6038FNIEMOP_DEF(iemOp_setle_Eb)
6039{
6040 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6041 IEMOP_HLP_MIN_386();
6042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6043
6044 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6045 * any way. AMD says it's "unused", whatever that means. We're
6046 * ignoring for now. */
6047 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6048 {
6049 /* register target */
6050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6051 IEM_MC_BEGIN(0, 0);
6052 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6053 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6054 } IEM_MC_ELSE() {
6055 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6056 } IEM_MC_ENDIF();
6057 IEM_MC_ADVANCE_RIP();
6058 IEM_MC_END();
6059 }
6060 else
6061 {
6062 /* memory target */
6063 IEM_MC_BEGIN(0, 1);
6064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6067 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6068 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6069 } IEM_MC_ELSE() {
6070 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6071 } IEM_MC_ENDIF();
6072 IEM_MC_ADVANCE_RIP();
6073 IEM_MC_END();
6074 }
6075 return VINF_SUCCESS;
6076}
6077
6078
6079/** Opcode 0x0f 0x9f. */
6080FNIEMOP_DEF(iemOp_setnle_Eb)
6081{
6082 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
6083 IEMOP_HLP_MIN_386();
6084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6085
6086 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6087 * any way. AMD says it's "unused", whatever that means. We're
6088 * ignoring for now. */
6089 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6090 {
6091 /* register target */
6092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6093 IEM_MC_BEGIN(0, 0);
6094 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6095 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
6096 } IEM_MC_ELSE() {
6097 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
6098 } IEM_MC_ENDIF();
6099 IEM_MC_ADVANCE_RIP();
6100 IEM_MC_END();
6101 }
6102 else
6103 {
6104 /* memory target */
6105 IEM_MC_BEGIN(0, 1);
6106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6109 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6110 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6111 } IEM_MC_ELSE() {
6112 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6113 } IEM_MC_ENDIF();
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 }
6117 return VINF_SUCCESS;
6118}
6119
6120
6121/**
6122 * Common 'push segment-register' helper.
6123 */
6124FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
6125{
6126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6127 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
6128 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6129
6130 switch (pVCpu->iem.s.enmEffOpSize)
6131 {
6132 case IEMMODE_16BIT:
6133 IEM_MC_BEGIN(0, 1);
6134 IEM_MC_LOCAL(uint16_t, u16Value);
6135 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
6136 IEM_MC_PUSH_U16(u16Value);
6137 IEM_MC_ADVANCE_RIP();
6138 IEM_MC_END();
6139 break;
6140
6141 case IEMMODE_32BIT:
6142 IEM_MC_BEGIN(0, 1);
6143 IEM_MC_LOCAL(uint32_t, u32Value);
6144 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
6145 IEM_MC_PUSH_U32_SREG(u32Value);
6146 IEM_MC_ADVANCE_RIP();
6147 IEM_MC_END();
6148 break;
6149
6150 case IEMMODE_64BIT:
6151 IEM_MC_BEGIN(0, 1);
6152 IEM_MC_LOCAL(uint64_t, u64Value);
6153 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
6154 IEM_MC_PUSH_U64(u64Value);
6155 IEM_MC_ADVANCE_RIP();
6156 IEM_MC_END();
6157 break;
6158 }
6159
6160 return VINF_SUCCESS;
6161}
6162
6163
6164/** Opcode 0x0f 0xa0. */
6165FNIEMOP_DEF(iemOp_push_fs)
6166{
6167 IEMOP_MNEMONIC(push_fs, "push fs");
6168 IEMOP_HLP_MIN_386();
6169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6170 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
6171}
6172
6173
6174/** Opcode 0x0f 0xa1. */
6175FNIEMOP_DEF(iemOp_pop_fs)
6176{
6177 IEMOP_MNEMONIC(pop_fs, "pop fs");
6178 IEMOP_HLP_MIN_386();
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
6181}
6182
6183
6184/** Opcode 0x0f 0xa2. */
6185FNIEMOP_DEF(iemOp_cpuid)
6186{
6187 IEMOP_MNEMONIC(cpuid, "cpuid");
6188 IEMOP_HLP_MIN_486(); /* not all 486es. */
6189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6190 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
6191}
6192
6193
6194/**
6195 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
6196 * iemOp_bts_Ev_Gv.
6197 */
6198FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6199{
6200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6202
6203 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6204 {
6205 /* register destination. */
6206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6207 switch (pVCpu->iem.s.enmEffOpSize)
6208 {
6209 case IEMMODE_16BIT:
6210 IEM_MC_BEGIN(3, 0);
6211 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6212 IEM_MC_ARG(uint16_t, u16Src, 1);
6213 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6214
6215 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6216 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6217 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6218 IEM_MC_REF_EFLAGS(pEFlags);
6219 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6220
6221 IEM_MC_ADVANCE_RIP();
6222 IEM_MC_END();
6223 return VINF_SUCCESS;
6224
6225 case IEMMODE_32BIT:
6226 IEM_MC_BEGIN(3, 0);
6227 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6228 IEM_MC_ARG(uint32_t, u32Src, 1);
6229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6230
6231 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6232 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6233 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6234 IEM_MC_REF_EFLAGS(pEFlags);
6235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6236
6237 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6238 IEM_MC_ADVANCE_RIP();
6239 IEM_MC_END();
6240 return VINF_SUCCESS;
6241
6242 case IEMMODE_64BIT:
6243 IEM_MC_BEGIN(3, 0);
6244 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6245 IEM_MC_ARG(uint64_t, u64Src, 1);
6246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6247
6248 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6249 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6250 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6251 IEM_MC_REF_EFLAGS(pEFlags);
6252 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6253
6254 IEM_MC_ADVANCE_RIP();
6255 IEM_MC_END();
6256 return VINF_SUCCESS;
6257
6258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6259 }
6260 }
6261 else
6262 {
6263 /* memory destination. */
6264
6265 uint32_t fAccess;
6266 if (pImpl->pfnLockedU16)
6267 fAccess = IEM_ACCESS_DATA_RW;
6268 else /* BT */
6269 fAccess = IEM_ACCESS_DATA_R;
6270
6271 /** @todo test negative bit offsets! */
6272 switch (pVCpu->iem.s.enmEffOpSize)
6273 {
6274 case IEMMODE_16BIT:
6275 IEM_MC_BEGIN(3, 2);
6276 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6277 IEM_MC_ARG(uint16_t, u16Src, 1);
6278 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6280 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6281
6282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6283 if (pImpl->pfnLockedU16)
6284 IEMOP_HLP_DONE_DECODING();
6285 else
6286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6287 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6288 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6289 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6290 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6291 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6292 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6293 IEM_MC_FETCH_EFLAGS(EFlags);
6294
6295 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6296 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6298 else
6299 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6300 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6301
6302 IEM_MC_COMMIT_EFLAGS(EFlags);
6303 IEM_MC_ADVANCE_RIP();
6304 IEM_MC_END();
6305 return VINF_SUCCESS;
6306
6307 case IEMMODE_32BIT:
6308 IEM_MC_BEGIN(3, 2);
6309 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6310 IEM_MC_ARG(uint32_t, u32Src, 1);
6311 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6313 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6314
6315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6316 if (pImpl->pfnLockedU16)
6317 IEMOP_HLP_DONE_DECODING();
6318 else
6319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6320 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6321 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6322 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6323 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6324 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6325 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6326 IEM_MC_FETCH_EFLAGS(EFlags);
6327
6328 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6329 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6331 else
6332 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6333 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6334
6335 IEM_MC_COMMIT_EFLAGS(EFlags);
6336 IEM_MC_ADVANCE_RIP();
6337 IEM_MC_END();
6338 return VINF_SUCCESS;
6339
6340 case IEMMODE_64BIT:
6341 IEM_MC_BEGIN(3, 2);
6342 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6343 IEM_MC_ARG(uint64_t, u64Src, 1);
6344 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6346 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6347
6348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6349 if (pImpl->pfnLockedU16)
6350 IEMOP_HLP_DONE_DECODING();
6351 else
6352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6353 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6354 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6355 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6356 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6357 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6358 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6359 IEM_MC_FETCH_EFLAGS(EFlags);
6360
6361 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6362 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6363 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6364 else
6365 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6366 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6367
6368 IEM_MC_COMMIT_EFLAGS(EFlags);
6369 IEM_MC_ADVANCE_RIP();
6370 IEM_MC_END();
6371 return VINF_SUCCESS;
6372
6373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6374 }
6375 }
6376}
6377
6378
6379/** Opcode 0x0f 0xa3. */
6380FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6381{
6382 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6383 IEMOP_HLP_MIN_386();
6384 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6385}
6386
6387
6388/**
6389 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6390 */
6391FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6392{
6393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6394 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6395
6396 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6397 {
6398 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400
6401 switch (pVCpu->iem.s.enmEffOpSize)
6402 {
6403 case IEMMODE_16BIT:
6404 IEM_MC_BEGIN(4, 0);
6405 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6406 IEM_MC_ARG(uint16_t, u16Src, 1);
6407 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6408 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6409
6410 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6411 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6412 IEM_MC_REF_EFLAGS(pEFlags);
6413 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6414
6415 IEM_MC_ADVANCE_RIP();
6416 IEM_MC_END();
6417 return VINF_SUCCESS;
6418
6419 case IEMMODE_32BIT:
6420 IEM_MC_BEGIN(4, 0);
6421 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6422 IEM_MC_ARG(uint32_t, u32Src, 1);
6423 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6424 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6425
6426 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6427 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6428 IEM_MC_REF_EFLAGS(pEFlags);
6429 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6430
6431 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6432 IEM_MC_ADVANCE_RIP();
6433 IEM_MC_END();
6434 return VINF_SUCCESS;
6435
6436 case IEMMODE_64BIT:
6437 IEM_MC_BEGIN(4, 0);
6438 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6439 IEM_MC_ARG(uint64_t, u64Src, 1);
6440 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6441 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6442
6443 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6444 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6445 IEM_MC_REF_EFLAGS(pEFlags);
6446 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6447
6448 IEM_MC_ADVANCE_RIP();
6449 IEM_MC_END();
6450 return VINF_SUCCESS;
6451
6452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6453 }
6454 }
6455 else
6456 {
6457 switch (pVCpu->iem.s.enmEffOpSize)
6458 {
6459 case IEMMODE_16BIT:
6460 IEM_MC_BEGIN(4, 2);
6461 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6462 IEM_MC_ARG(uint16_t, u16Src, 1);
6463 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6464 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6466
6467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6468 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6469 IEM_MC_ASSIGN(cShiftArg, cShift);
6470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6471 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6472 IEM_MC_FETCH_EFLAGS(EFlags);
6473 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6474 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6475
6476 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6477 IEM_MC_COMMIT_EFLAGS(EFlags);
6478 IEM_MC_ADVANCE_RIP();
6479 IEM_MC_END();
6480 return VINF_SUCCESS;
6481
6482 case IEMMODE_32BIT:
6483 IEM_MC_BEGIN(4, 2);
6484 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6485 IEM_MC_ARG(uint32_t, u32Src, 1);
6486 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6487 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6489
6490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6491 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6492 IEM_MC_ASSIGN(cShiftArg, cShift);
6493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6494 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6495 IEM_MC_FETCH_EFLAGS(EFlags);
6496 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6497 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6498
6499 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6500 IEM_MC_COMMIT_EFLAGS(EFlags);
6501 IEM_MC_ADVANCE_RIP();
6502 IEM_MC_END();
6503 return VINF_SUCCESS;
6504
6505 case IEMMODE_64BIT:
6506 IEM_MC_BEGIN(4, 2);
6507 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6508 IEM_MC_ARG(uint64_t, u64Src, 1);
6509 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6510 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6512
6513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6514 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6515 IEM_MC_ASSIGN(cShiftArg, cShift);
6516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6517 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6518 IEM_MC_FETCH_EFLAGS(EFlags);
6519 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6520 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6521
6522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6523 IEM_MC_COMMIT_EFLAGS(EFlags);
6524 IEM_MC_ADVANCE_RIP();
6525 IEM_MC_END();
6526 return VINF_SUCCESS;
6527
6528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6529 }
6530 }
6531}
6532
6533
6534/**
6535 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6536 */
6537FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6538{
6539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6540 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6541
6542 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6543 {
6544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6545
6546 switch (pVCpu->iem.s.enmEffOpSize)
6547 {
6548 case IEMMODE_16BIT:
6549 IEM_MC_BEGIN(4, 0);
6550 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6551 IEM_MC_ARG(uint16_t, u16Src, 1);
6552 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6553 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6554
6555 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6556 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6557 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6558 IEM_MC_REF_EFLAGS(pEFlags);
6559 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6560
6561 IEM_MC_ADVANCE_RIP();
6562 IEM_MC_END();
6563 return VINF_SUCCESS;
6564
6565 case IEMMODE_32BIT:
6566 IEM_MC_BEGIN(4, 0);
6567 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6568 IEM_MC_ARG(uint32_t, u32Src, 1);
6569 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6570 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6571
6572 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6573 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6574 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6575 IEM_MC_REF_EFLAGS(pEFlags);
6576 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6577
6578 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6579 IEM_MC_ADVANCE_RIP();
6580 IEM_MC_END();
6581 return VINF_SUCCESS;
6582
6583 case IEMMODE_64BIT:
6584 IEM_MC_BEGIN(4, 0);
6585 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6586 IEM_MC_ARG(uint64_t, u64Src, 1);
6587 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6588 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6589
6590 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6591 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6592 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6593 IEM_MC_REF_EFLAGS(pEFlags);
6594 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6595
6596 IEM_MC_ADVANCE_RIP();
6597 IEM_MC_END();
6598 return VINF_SUCCESS;
6599
6600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6601 }
6602 }
6603 else
6604 {
6605 switch (pVCpu->iem.s.enmEffOpSize)
6606 {
6607 case IEMMODE_16BIT:
6608 IEM_MC_BEGIN(4, 2);
6609 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6610 IEM_MC_ARG(uint16_t, u16Src, 1);
6611 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6612 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6614
6615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6617 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6618 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6619 IEM_MC_FETCH_EFLAGS(EFlags);
6620 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6621 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6622
6623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6624 IEM_MC_COMMIT_EFLAGS(EFlags);
6625 IEM_MC_ADVANCE_RIP();
6626 IEM_MC_END();
6627 return VINF_SUCCESS;
6628
6629 case IEMMODE_32BIT:
6630 IEM_MC_BEGIN(4, 2);
6631 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6632 IEM_MC_ARG(uint32_t, u32Src, 1);
6633 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6634 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6636
6637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6639 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6640 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6641 IEM_MC_FETCH_EFLAGS(EFlags);
6642 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6643 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6644
6645 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6646 IEM_MC_COMMIT_EFLAGS(EFlags);
6647 IEM_MC_ADVANCE_RIP();
6648 IEM_MC_END();
6649 return VINF_SUCCESS;
6650
6651 case IEMMODE_64BIT:
6652 IEM_MC_BEGIN(4, 2);
6653 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6654 IEM_MC_ARG(uint64_t, u64Src, 1);
6655 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6656 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6658
6659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6661 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6662 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6663 IEM_MC_FETCH_EFLAGS(EFlags);
6664 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6665 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6666
6667 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6668 IEM_MC_COMMIT_EFLAGS(EFlags);
6669 IEM_MC_ADVANCE_RIP();
6670 IEM_MC_END();
6671 return VINF_SUCCESS;
6672
6673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6674 }
6675 }
6676}
6677
6678
6679
6680/** Opcode 0x0f 0xa4. */
6681FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6682{
6683 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6684 IEMOP_HLP_MIN_386();
6685 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6686}
6687
6688
6689/** Opcode 0x0f 0xa5. */
6690FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6691{
6692 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6693 IEMOP_HLP_MIN_386();
6694 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6695}
6696
6697
6698/** Opcode 0x0f 0xa8. */
6699FNIEMOP_DEF(iemOp_push_gs)
6700{
6701 IEMOP_MNEMONIC(push_gs, "push gs");
6702 IEMOP_HLP_MIN_386();
6703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6704 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6705}
6706
6707
6708/** Opcode 0x0f 0xa9. */
6709FNIEMOP_DEF(iemOp_pop_gs)
6710{
6711 IEMOP_MNEMONIC(pop_gs, "pop gs");
6712 IEMOP_HLP_MIN_386();
6713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6714 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6715}
6716
6717
6718/** Opcode 0x0f 0xaa. */
6719FNIEMOP_DEF(iemOp_rsm)
6720{
6721 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
6722 IEMOP_HLP_MIN_386(); /* 386SL and later. */
6723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6724 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
6725}
6726
6727
6728
6729/** Opcode 0x0f 0xab. */
6730FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6731{
6732 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6733 IEMOP_HLP_MIN_386();
6734 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6735}
6736
6737
6738/** Opcode 0x0f 0xac. */
6739FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6740{
6741 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6742 IEMOP_HLP_MIN_386();
6743 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6744}
6745
6746
6747/** Opcode 0x0f 0xad. */
6748FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6749{
6750 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6751 IEMOP_HLP_MIN_386();
6752 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6753}
6754
6755
6756/** Opcode 0x0f 0xae mem/0. */
6757FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6758{
6759 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6760 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6761 return IEMOP_RAISE_INVALID_OPCODE();
6762
6763 IEM_MC_BEGIN(3, 1);
6764 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6765 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6766 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6769 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6770 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6771 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6772 IEM_MC_END();
6773 return VINF_SUCCESS;
6774}
6775
6776
6777/** Opcode 0x0f 0xae mem/1. */
6778FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6779{
6780 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6781 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6782 return IEMOP_RAISE_INVALID_OPCODE();
6783
6784 IEM_MC_BEGIN(3, 1);
6785 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6786 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6787 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6790 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6791 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6792 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6793 IEM_MC_END();
6794 return VINF_SUCCESS;
6795}
6796
6797
6798/**
6799 * @opmaps grp15
6800 * @opcode !11/2
6801 * @oppfx none
6802 * @opcpuid sse
6803 * @opgroup og_sse_mxcsrsm
6804 * @opxcpttype 5
6805 * @optest op1=0 -> mxcsr=0
6806 * @optest op1=0x2083 -> mxcsr=0x2083
6807 * @optest op1=0xfffffffe -> value.xcpt=0xd
6808 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6809 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6810 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6811 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6812 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6813 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6814 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6815 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6816 */
6817FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6818{
6819 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6820 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6821 return IEMOP_RAISE_INVALID_OPCODE();
6822
6823 IEM_MC_BEGIN(2, 0);
6824 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6825 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6828 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6829 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6830 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6831 IEM_MC_END();
6832 return VINF_SUCCESS;
6833}
6834
6835
6836/**
6837 * @opmaps grp15
6838 * @opcode !11/3
6839 * @oppfx none
6840 * @opcpuid sse
6841 * @opgroup og_sse_mxcsrsm
6842 * @opxcpttype 5
6843 * @optest mxcsr=0 -> op1=0
6844 * @optest mxcsr=0x2083 -> op1=0x2083
6845 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6846 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6847 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6848 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6849 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6850 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6851 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6852 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6853 */
6854FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6855{
6856 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6857 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6858 return IEMOP_RAISE_INVALID_OPCODE();
6859
6860 IEM_MC_BEGIN(2, 0);
6861 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6862 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6866 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6867 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6868 IEM_MC_END();
6869 return VINF_SUCCESS;
6870}
6871
6872
6873/**
6874 * @opmaps grp15
6875 * @opcode !11/4
6876 * @oppfx none
6877 * @opcpuid xsave
6878 * @opgroup og_system
6879 * @opxcpttype none
6880 */
6881FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6882{
6883 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6884 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6885 return IEMOP_RAISE_INVALID_OPCODE();
6886
6887 IEM_MC_BEGIN(3, 0);
6888 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6889 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6890 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6893 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6894 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6895 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6896 IEM_MC_END();
6897 return VINF_SUCCESS;
6898}
6899
6900
6901/**
6902 * @opmaps grp15
6903 * @opcode !11/5
6904 * @oppfx none
6905 * @opcpuid xsave
6906 * @opgroup og_system
6907 * @opxcpttype none
6908 */
6909FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6910{
6911 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6912 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6913 return IEMOP_RAISE_INVALID_OPCODE();
6914
6915 IEM_MC_BEGIN(3, 0);
6916 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6917 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6918 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6921 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6922 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6923 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6924 IEM_MC_END();
6925 return VINF_SUCCESS;
6926}
6927
6928/** Opcode 0x0f 0xae mem/6. */
6929FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6930
6931/**
6932 * @opmaps grp15
6933 * @opcode !11/7
6934 * @oppfx none
6935 * @opcpuid clfsh
6936 * @opgroup og_cachectl
6937 * @optest op1=1 ->
6938 */
6939FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6940{
6941 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6942 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6943 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6944
6945 IEM_MC_BEGIN(2, 0);
6946 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6947 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6950 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6951 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6952 IEM_MC_END();
6953 return VINF_SUCCESS;
6954}
6955
6956/**
6957 * @opmaps grp15
6958 * @opcode !11/7
6959 * @oppfx 0x66
6960 * @opcpuid clflushopt
6961 * @opgroup og_cachectl
6962 * @optest op1=1 ->
6963 */
6964FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6965{
6966 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6967 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6968 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6969
6970 IEM_MC_BEGIN(2, 0);
6971 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6972 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6975 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6976 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6977 IEM_MC_END();
6978 return VINF_SUCCESS;
6979}
6980
6981
6982/** Opcode 0x0f 0xae 11b/5. */
6983FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6984{
6985 RT_NOREF_PV(bRm);
6986 IEMOP_MNEMONIC(lfence, "lfence");
6987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6988 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6989 return IEMOP_RAISE_INVALID_OPCODE();
6990
6991 IEM_MC_BEGIN(0, 0);
6992 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6993 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6994 else
6995 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6996 IEM_MC_ADVANCE_RIP();
6997 IEM_MC_END();
6998 return VINF_SUCCESS;
6999}
7000
7001
7002/** Opcode 0x0f 0xae 11b/6. */
7003FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7004{
7005 RT_NOREF_PV(bRm);
7006 IEMOP_MNEMONIC(mfence, "mfence");
7007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7008 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7009 return IEMOP_RAISE_INVALID_OPCODE();
7010
7011 IEM_MC_BEGIN(0, 0);
7012 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7013 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7014 else
7015 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7016 IEM_MC_ADVANCE_RIP();
7017 IEM_MC_END();
7018 return VINF_SUCCESS;
7019}
7020
7021
7022/** Opcode 0x0f 0xae 11b/7. */
7023FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7024{
7025 RT_NOREF_PV(bRm);
7026 IEMOP_MNEMONIC(sfence, "sfence");
7027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7028 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7029 return IEMOP_RAISE_INVALID_OPCODE();
7030
7031 IEM_MC_BEGIN(0, 0);
7032 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7033 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7034 else
7035 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7036 IEM_MC_ADVANCE_RIP();
7037 IEM_MC_END();
7038 return VINF_SUCCESS;
7039}
7040
7041
7042/** Opcode 0xf3 0x0f 0xae 11b/0. */
7043FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7044{
7045 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7047 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7048 {
7049 IEM_MC_BEGIN(1, 0);
7050 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7051 IEM_MC_ARG(uint64_t, u64Dst, 0);
7052 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7053 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7054 IEM_MC_ADVANCE_RIP();
7055 IEM_MC_END();
7056 }
7057 else
7058 {
7059 IEM_MC_BEGIN(1, 0);
7060 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7061 IEM_MC_ARG(uint32_t, u32Dst, 0);
7062 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
7063 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7064 IEM_MC_ADVANCE_RIP();
7065 IEM_MC_END();
7066 }
7067 return VINF_SUCCESS;
7068}
7069
7070
7071/** Opcode 0xf3 0x0f 0xae 11b/1. */
7072FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
7073{
7074 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
7075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7076 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7077 {
7078 IEM_MC_BEGIN(1, 0);
7079 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7080 IEM_MC_ARG(uint64_t, u64Dst, 0);
7081 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
7082 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Dst);
7083 IEM_MC_ADVANCE_RIP();
7084 IEM_MC_END();
7085 }
7086 else
7087 {
7088 IEM_MC_BEGIN(1, 0);
7089 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7090 IEM_MC_ARG(uint32_t, u32Dst, 0);
7091 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
7092 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Dst);
7093 IEM_MC_ADVANCE_RIP();
7094 IEM_MC_END();
7095 }
7096 return VINF_SUCCESS;
7097}
7098
7099
7100/** Opcode 0xf3 0x0f 0xae 11b/2. */
7101FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
7102{
7103 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
7104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7105 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7106 {
7107 IEM_MC_BEGIN(1, 0);
7108 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7109 IEM_MC_ARG(uint64_t, u64Dst, 0);
7110 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7111 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7112 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
7113 IEM_MC_ADVANCE_RIP();
7114 IEM_MC_END();
7115 }
7116 else
7117 {
7118 IEM_MC_BEGIN(1, 0);
7119 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7120 IEM_MC_ARG(uint32_t, u32Dst, 0);
7121 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7122 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
7123 IEM_MC_ADVANCE_RIP();
7124 IEM_MC_END();
7125 }
7126 return VINF_SUCCESS;
7127}
7128
7129
7130/** Opcode 0xf3 0x0f 0xae 11b/3. */
7131FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
7132{
7133 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
7134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7135 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7136 {
7137 IEM_MC_BEGIN(1, 0);
7138 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7139 IEM_MC_ARG(uint64_t, u64Dst, 0);
7140 IEM_MC_FETCH_GREG_U64(u64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7141 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
7142 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
7143 IEM_MC_ADVANCE_RIP();
7144 IEM_MC_END();
7145 }
7146 else
7147 {
7148 IEM_MC_BEGIN(1, 0);
7149 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7150 IEM_MC_ARG(uint32_t, u32Dst, 0);
7151 IEM_MC_FETCH_GREG_U32(u32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7152 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
7153 IEM_MC_ADVANCE_RIP();
7154 IEM_MC_END();
7155 }
7156 return VINF_SUCCESS;
7157}
7158
7159
7160/**
7161 * Group 15 jump table for register variant.
7162 */
7163IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
7164{ /* pfx: none, 066h, 0f3h, 0f2h */
7165 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
7166 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
7167 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
7168 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
7169 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7170 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7171 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7172 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7173};
7174AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
7175
7176
7177/**
7178 * Group 15 jump table for memory variant.
7179 */
7180IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
7181{ /* pfx: none, 066h, 0f3h, 0f2h */
7182 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7183 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7184 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7185 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7186 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7187 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7188 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7189 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7190};
7191AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
7192
7193
7194/** Opcode 0x0f 0xae. */
7195FNIEMOP_DEF(iemOp_Grp15)
7196{
7197 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
7198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7200 /* register, register */
7201 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7202 + pVCpu->iem.s.idxPrefix], bRm);
7203 /* memory, register */
7204 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7205 + pVCpu->iem.s.idxPrefix], bRm);
7206}
7207
7208
7209/** Opcode 0x0f 0xaf. */
7210FNIEMOP_DEF(iemOp_imul_Gv_Ev)
7211{
7212 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
7213 IEMOP_HLP_MIN_386();
7214 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7215 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
7216}
7217
7218
7219/** Opcode 0x0f 0xb0. */
7220FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
7221{
7222 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
7223 IEMOP_HLP_MIN_486();
7224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7225
7226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7227 {
7228 IEMOP_HLP_DONE_DECODING();
7229 IEM_MC_BEGIN(4, 0);
7230 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7231 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7232 IEM_MC_ARG(uint8_t, u8Src, 2);
7233 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7234
7235 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7236 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7237 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
7238 IEM_MC_REF_EFLAGS(pEFlags);
7239 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7240 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7241 else
7242 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7243
7244 IEM_MC_ADVANCE_RIP();
7245 IEM_MC_END();
7246 }
7247 else
7248 {
7249 IEM_MC_BEGIN(4, 3);
7250 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7251 IEM_MC_ARG(uint8_t *, pu8Al, 1);
7252 IEM_MC_ARG(uint8_t, u8Src, 2);
7253 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7255 IEM_MC_LOCAL(uint8_t, u8Al);
7256
7257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7258 IEMOP_HLP_DONE_DECODING();
7259 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7260 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7261 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
7262 IEM_MC_FETCH_EFLAGS(EFlags);
7263 IEM_MC_REF_LOCAL(pu8Al, u8Al);
7264 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7265 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
7266 else
7267 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
7268
7269 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7270 IEM_MC_COMMIT_EFLAGS(EFlags);
7271 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
7272 IEM_MC_ADVANCE_RIP();
7273 IEM_MC_END();
7274 }
7275 return VINF_SUCCESS;
7276}
7277
7278/** Opcode 0x0f 0xb1. */
7279FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
7280{
7281 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
7282 IEMOP_HLP_MIN_486();
7283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7284
7285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7286 {
7287 IEMOP_HLP_DONE_DECODING();
7288 switch (pVCpu->iem.s.enmEffOpSize)
7289 {
7290 case IEMMODE_16BIT:
7291 IEM_MC_BEGIN(4, 0);
7292 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7293 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7294 IEM_MC_ARG(uint16_t, u16Src, 2);
7295 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7296
7297 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7298 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7299 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
7300 IEM_MC_REF_EFLAGS(pEFlags);
7301 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7302 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7303 else
7304 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7305
7306 IEM_MC_ADVANCE_RIP();
7307 IEM_MC_END();
7308 return VINF_SUCCESS;
7309
7310 case IEMMODE_32BIT:
7311 IEM_MC_BEGIN(4, 0);
7312 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7313 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7314 IEM_MC_ARG(uint32_t, u32Src, 2);
7315 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7316
7317 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7318 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7319 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7320 IEM_MC_REF_EFLAGS(pEFlags);
7321 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7322 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7323 else
7324 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7325
7326 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7327 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7328 IEM_MC_ADVANCE_RIP();
7329 IEM_MC_END();
7330 return VINF_SUCCESS;
7331
7332 case IEMMODE_64BIT:
7333 IEM_MC_BEGIN(4, 0);
7334 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7335 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7336#ifdef RT_ARCH_X86
7337 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7338#else
7339 IEM_MC_ARG(uint64_t, u64Src, 2);
7340#endif
7341 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7342
7343 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7344 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7345 IEM_MC_REF_EFLAGS(pEFlags);
7346#ifdef RT_ARCH_X86
7347 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7348 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7349 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7350 else
7351 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7352#else
7353 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7354 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7355 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7356 else
7357 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7358#endif
7359
7360 IEM_MC_ADVANCE_RIP();
7361 IEM_MC_END();
7362 return VINF_SUCCESS;
7363
7364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7365 }
7366 }
7367 else
7368 {
7369 switch (pVCpu->iem.s.enmEffOpSize)
7370 {
7371 case IEMMODE_16BIT:
7372 IEM_MC_BEGIN(4, 3);
7373 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7374 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7375 IEM_MC_ARG(uint16_t, u16Src, 2);
7376 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7378 IEM_MC_LOCAL(uint16_t, u16Ax);
7379
7380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7381 IEMOP_HLP_DONE_DECODING();
7382 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7383 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7384 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7385 IEM_MC_FETCH_EFLAGS(EFlags);
7386 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7387 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7388 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7389 else
7390 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7391
7392 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7393 IEM_MC_COMMIT_EFLAGS(EFlags);
7394 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7395 IEM_MC_ADVANCE_RIP();
7396 IEM_MC_END();
7397 return VINF_SUCCESS;
7398
7399 case IEMMODE_32BIT:
7400 IEM_MC_BEGIN(4, 3);
7401 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7402 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7403 IEM_MC_ARG(uint32_t, u32Src, 2);
7404 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7406 IEM_MC_LOCAL(uint32_t, u32Eax);
7407
7408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7409 IEMOP_HLP_DONE_DECODING();
7410 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7411 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7412 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7413 IEM_MC_FETCH_EFLAGS(EFlags);
7414 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7415 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7416 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7417 else
7418 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7419
7420 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7421 IEM_MC_COMMIT_EFLAGS(EFlags);
7422 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7423 IEM_MC_ADVANCE_RIP();
7424 IEM_MC_END();
7425 return VINF_SUCCESS;
7426
7427 case IEMMODE_64BIT:
7428 IEM_MC_BEGIN(4, 3);
7429 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7430 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7431#ifdef RT_ARCH_X86
7432 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7433#else
7434 IEM_MC_ARG(uint64_t, u64Src, 2);
7435#endif
7436 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7438 IEM_MC_LOCAL(uint64_t, u64Rax);
7439
7440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7441 IEMOP_HLP_DONE_DECODING();
7442 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7443 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7444 IEM_MC_FETCH_EFLAGS(EFlags);
7445 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7446#ifdef RT_ARCH_X86
7447 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7448 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7449 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7450 else
7451 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7452#else
7453 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7454 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7455 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7456 else
7457 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7458#endif
7459
7460 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7461 IEM_MC_COMMIT_EFLAGS(EFlags);
7462 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7463 IEM_MC_ADVANCE_RIP();
7464 IEM_MC_END();
7465 return VINF_SUCCESS;
7466
7467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7468 }
7469 }
7470}
7471
7472
7473FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7474{
7475 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7476 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7477
7478 switch (pVCpu->iem.s.enmEffOpSize)
7479 {
7480 case IEMMODE_16BIT:
7481 IEM_MC_BEGIN(5, 1);
7482 IEM_MC_ARG(uint16_t, uSel, 0);
7483 IEM_MC_ARG(uint16_t, offSeg, 1);
7484 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7485 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7486 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7487 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7490 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7491 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7492 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7493 IEM_MC_END();
7494 return VINF_SUCCESS;
7495
7496 case IEMMODE_32BIT:
7497 IEM_MC_BEGIN(5, 1);
7498 IEM_MC_ARG(uint16_t, uSel, 0);
7499 IEM_MC_ARG(uint32_t, offSeg, 1);
7500 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7501 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7502 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7503 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7506 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7507 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7508 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7509 IEM_MC_END();
7510 return VINF_SUCCESS;
7511
7512 case IEMMODE_64BIT:
7513 IEM_MC_BEGIN(5, 1);
7514 IEM_MC_ARG(uint16_t, uSel, 0);
7515 IEM_MC_ARG(uint64_t, offSeg, 1);
7516 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7517 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7518 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7519 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7522 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7523 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7524 else
7525 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7526 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7527 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7528 IEM_MC_END();
7529 return VINF_SUCCESS;
7530
7531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7532 }
7533}
7534
7535
7536/** Opcode 0x0f 0xb2. */
7537FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7538{
7539 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7540 IEMOP_HLP_MIN_386();
7541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7542 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7543 return IEMOP_RAISE_INVALID_OPCODE();
7544 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7545}
7546
7547
7548/** Opcode 0x0f 0xb3. */
7549FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7550{
7551 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7552 IEMOP_HLP_MIN_386();
7553 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7554}
7555
7556
7557/** Opcode 0x0f 0xb4. */
7558FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7559{
7560 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7561 IEMOP_HLP_MIN_386();
7562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7564 return IEMOP_RAISE_INVALID_OPCODE();
7565 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7566}
7567
7568
7569/** Opcode 0x0f 0xb5. */
7570FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7571{
7572 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7573 IEMOP_HLP_MIN_386();
7574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7576 return IEMOP_RAISE_INVALID_OPCODE();
7577 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7578}
7579
7580
7581/** Opcode 0x0f 0xb6. */
7582FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7583{
7584 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7585 IEMOP_HLP_MIN_386();
7586
7587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7588
7589 /*
7590 * If rm is denoting a register, no more instruction bytes.
7591 */
7592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7593 {
7594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7595 switch (pVCpu->iem.s.enmEffOpSize)
7596 {
7597 case IEMMODE_16BIT:
7598 IEM_MC_BEGIN(0, 1);
7599 IEM_MC_LOCAL(uint16_t, u16Value);
7600 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7601 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7602 IEM_MC_ADVANCE_RIP();
7603 IEM_MC_END();
7604 return VINF_SUCCESS;
7605
7606 case IEMMODE_32BIT:
7607 IEM_MC_BEGIN(0, 1);
7608 IEM_MC_LOCAL(uint32_t, u32Value);
7609 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7610 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7611 IEM_MC_ADVANCE_RIP();
7612 IEM_MC_END();
7613 return VINF_SUCCESS;
7614
7615 case IEMMODE_64BIT:
7616 IEM_MC_BEGIN(0, 1);
7617 IEM_MC_LOCAL(uint64_t, u64Value);
7618 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7619 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7620 IEM_MC_ADVANCE_RIP();
7621 IEM_MC_END();
7622 return VINF_SUCCESS;
7623
7624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7625 }
7626 }
7627 else
7628 {
7629 /*
7630 * We're loading a register from memory.
7631 */
7632 switch (pVCpu->iem.s.enmEffOpSize)
7633 {
7634 case IEMMODE_16BIT:
7635 IEM_MC_BEGIN(0, 2);
7636 IEM_MC_LOCAL(uint16_t, u16Value);
7637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7640 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7641 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7642 IEM_MC_ADVANCE_RIP();
7643 IEM_MC_END();
7644 return VINF_SUCCESS;
7645
7646 case IEMMODE_32BIT:
7647 IEM_MC_BEGIN(0, 2);
7648 IEM_MC_LOCAL(uint32_t, u32Value);
7649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7652 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7653 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7654 IEM_MC_ADVANCE_RIP();
7655 IEM_MC_END();
7656 return VINF_SUCCESS;
7657
7658 case IEMMODE_64BIT:
7659 IEM_MC_BEGIN(0, 2);
7660 IEM_MC_LOCAL(uint64_t, u64Value);
7661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7664 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7665 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7666 IEM_MC_ADVANCE_RIP();
7667 IEM_MC_END();
7668 return VINF_SUCCESS;
7669
7670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7671 }
7672 }
7673}
7674
7675
7676/** Opcode 0x0f 0xb7. */
7677FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7678{
7679 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7680 IEMOP_HLP_MIN_386();
7681
7682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7683
7684 /** @todo Not entirely sure how the operand size prefix is handled here,
7685 * assuming that it will be ignored. Would be nice to have a few
7686 * test for this. */
7687 /*
7688 * If rm is denoting a register, no more instruction bytes.
7689 */
7690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7691 {
7692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7693 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7694 {
7695 IEM_MC_BEGIN(0, 1);
7696 IEM_MC_LOCAL(uint32_t, u32Value);
7697 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7698 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7699 IEM_MC_ADVANCE_RIP();
7700 IEM_MC_END();
7701 }
7702 else
7703 {
7704 IEM_MC_BEGIN(0, 1);
7705 IEM_MC_LOCAL(uint64_t, u64Value);
7706 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7707 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7708 IEM_MC_ADVANCE_RIP();
7709 IEM_MC_END();
7710 }
7711 }
7712 else
7713 {
7714 /*
7715 * We're loading a register from memory.
7716 */
7717 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7718 {
7719 IEM_MC_BEGIN(0, 2);
7720 IEM_MC_LOCAL(uint32_t, u32Value);
7721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7724 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7725 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7726 IEM_MC_ADVANCE_RIP();
7727 IEM_MC_END();
7728 }
7729 else
7730 {
7731 IEM_MC_BEGIN(0, 2);
7732 IEM_MC_LOCAL(uint64_t, u64Value);
7733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7736 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7737 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7738 IEM_MC_ADVANCE_RIP();
7739 IEM_MC_END();
7740 }
7741 }
7742 return VINF_SUCCESS;
7743}
7744
7745
7746/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7747FNIEMOP_UD_STUB(iemOp_jmpe);
7748/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7749FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7750
7751
7752/**
7753 * @opcode 0xb9
7754 * @opinvalid intel-modrm
7755 * @optest ->
7756 */
7757FNIEMOP_DEF(iemOp_Grp10)
7758{
7759 /*
7760 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7761 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7762 */
7763 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7764 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7765 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7766}
7767
7768
7769/** Opcode 0x0f 0xba. */
7770FNIEMOP_DEF(iemOp_Grp8)
7771{
7772 IEMOP_HLP_MIN_386();
7773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7774 PCIEMOPBINSIZES pImpl;
7775 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7776 {
7777 case 0: case 1: case 2: case 3:
7778 /* Both AMD and Intel want full modr/m decoding and imm8. */
7779 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7780 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7781 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7782 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7783 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7785 }
7786 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7787
7788 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7789 {
7790 /* register destination. */
7791 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7793
7794 switch (pVCpu->iem.s.enmEffOpSize)
7795 {
7796 case IEMMODE_16BIT:
7797 IEM_MC_BEGIN(3, 0);
7798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7799 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7800 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7801
7802 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7803 IEM_MC_REF_EFLAGS(pEFlags);
7804 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7805
7806 IEM_MC_ADVANCE_RIP();
7807 IEM_MC_END();
7808 return VINF_SUCCESS;
7809
7810 case IEMMODE_32BIT:
7811 IEM_MC_BEGIN(3, 0);
7812 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7813 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7814 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7815
7816 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7817 IEM_MC_REF_EFLAGS(pEFlags);
7818 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7819
7820 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7821 IEM_MC_ADVANCE_RIP();
7822 IEM_MC_END();
7823 return VINF_SUCCESS;
7824
7825 case IEMMODE_64BIT:
7826 IEM_MC_BEGIN(3, 0);
7827 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7828 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7829 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7830
7831 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7832 IEM_MC_REF_EFLAGS(pEFlags);
7833 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7834
7835 IEM_MC_ADVANCE_RIP();
7836 IEM_MC_END();
7837 return VINF_SUCCESS;
7838
7839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7840 }
7841 }
7842 else
7843 {
7844 /* memory destination. */
7845
7846 uint32_t fAccess;
7847 if (pImpl->pfnLockedU16)
7848 fAccess = IEM_ACCESS_DATA_RW;
7849 else /* BT */
7850 fAccess = IEM_ACCESS_DATA_R;
7851
7852 /** @todo test negative bit offsets! */
7853 switch (pVCpu->iem.s.enmEffOpSize)
7854 {
7855 case IEMMODE_16BIT:
7856 IEM_MC_BEGIN(3, 1);
7857 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7858 IEM_MC_ARG(uint16_t, u16Src, 1);
7859 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7861
7862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7863 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7864 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7865 if (pImpl->pfnLockedU16)
7866 IEMOP_HLP_DONE_DECODING();
7867 else
7868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7869 IEM_MC_FETCH_EFLAGS(EFlags);
7870 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7871 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7872 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7873 else
7874 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7875 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7876
7877 IEM_MC_COMMIT_EFLAGS(EFlags);
7878 IEM_MC_ADVANCE_RIP();
7879 IEM_MC_END();
7880 return VINF_SUCCESS;
7881
7882 case IEMMODE_32BIT:
7883 IEM_MC_BEGIN(3, 1);
7884 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7885 IEM_MC_ARG(uint32_t, u32Src, 1);
7886 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7888
7889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7890 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7891 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7892 if (pImpl->pfnLockedU16)
7893 IEMOP_HLP_DONE_DECODING();
7894 else
7895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7896 IEM_MC_FETCH_EFLAGS(EFlags);
7897 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7898 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7900 else
7901 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7903
7904 IEM_MC_COMMIT_EFLAGS(EFlags);
7905 IEM_MC_ADVANCE_RIP();
7906 IEM_MC_END();
7907 return VINF_SUCCESS;
7908
7909 case IEMMODE_64BIT:
7910 IEM_MC_BEGIN(3, 1);
7911 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7912 IEM_MC_ARG(uint64_t, u64Src, 1);
7913 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7915
7916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7917 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7918 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7919 if (pImpl->pfnLockedU16)
7920 IEMOP_HLP_DONE_DECODING();
7921 else
7922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7923 IEM_MC_FETCH_EFLAGS(EFlags);
7924 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7925 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7926 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7927 else
7928 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7929 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7930
7931 IEM_MC_COMMIT_EFLAGS(EFlags);
7932 IEM_MC_ADVANCE_RIP();
7933 IEM_MC_END();
7934 return VINF_SUCCESS;
7935
7936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7937 }
7938 }
7939}
7940
7941
7942/** Opcode 0x0f 0xbb. */
7943FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7944{
7945 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7946 IEMOP_HLP_MIN_386();
7947 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7948}
7949
7950
7951/** Opcode 0x0f 0xbc. */
7952FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7953{
7954 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7955 IEMOP_HLP_MIN_386();
7956 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7957 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7958}
7959
7960
7961/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7962FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7963
7964
7965/** Opcode 0x0f 0xbd. */
7966FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7967{
7968 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7969 IEMOP_HLP_MIN_386();
7970 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7971 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7972}
7973
7974
7975/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7976FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7977
7978
7979/** Opcode 0x0f 0xbe. */
7980FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7981{
7982 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7983 IEMOP_HLP_MIN_386();
7984
7985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7986
7987 /*
7988 * If rm is denoting a register, no more instruction bytes.
7989 */
7990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7991 {
7992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7993 switch (pVCpu->iem.s.enmEffOpSize)
7994 {
7995 case IEMMODE_16BIT:
7996 IEM_MC_BEGIN(0, 1);
7997 IEM_MC_LOCAL(uint16_t, u16Value);
7998 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7999 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8000 IEM_MC_ADVANCE_RIP();
8001 IEM_MC_END();
8002 return VINF_SUCCESS;
8003
8004 case IEMMODE_32BIT:
8005 IEM_MC_BEGIN(0, 1);
8006 IEM_MC_LOCAL(uint32_t, u32Value);
8007 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8008 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8009 IEM_MC_ADVANCE_RIP();
8010 IEM_MC_END();
8011 return VINF_SUCCESS;
8012
8013 case IEMMODE_64BIT:
8014 IEM_MC_BEGIN(0, 1);
8015 IEM_MC_LOCAL(uint64_t, u64Value);
8016 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8017 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8018 IEM_MC_ADVANCE_RIP();
8019 IEM_MC_END();
8020 return VINF_SUCCESS;
8021
8022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8023 }
8024 }
8025 else
8026 {
8027 /*
8028 * We're loading a register from memory.
8029 */
8030 switch (pVCpu->iem.s.enmEffOpSize)
8031 {
8032 case IEMMODE_16BIT:
8033 IEM_MC_BEGIN(0, 2);
8034 IEM_MC_LOCAL(uint16_t, u16Value);
8035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8038 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8039 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
8040 IEM_MC_ADVANCE_RIP();
8041 IEM_MC_END();
8042 return VINF_SUCCESS;
8043
8044 case IEMMODE_32BIT:
8045 IEM_MC_BEGIN(0, 2);
8046 IEM_MC_LOCAL(uint32_t, u32Value);
8047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8050 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8051 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8052 IEM_MC_ADVANCE_RIP();
8053 IEM_MC_END();
8054 return VINF_SUCCESS;
8055
8056 case IEMMODE_64BIT:
8057 IEM_MC_BEGIN(0, 2);
8058 IEM_MC_LOCAL(uint64_t, u64Value);
8059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8062 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8063 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8064 IEM_MC_ADVANCE_RIP();
8065 IEM_MC_END();
8066 return VINF_SUCCESS;
8067
8068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8069 }
8070 }
8071}
8072
8073
8074/** Opcode 0x0f 0xbf. */
8075FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
8076{
8077 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
8078 IEMOP_HLP_MIN_386();
8079
8080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8081
8082 /** @todo Not entirely sure how the operand size prefix is handled here,
8083 * assuming that it will be ignored. Would be nice to have a few
8084 * test for this. */
8085 /*
8086 * If rm is denoting a register, no more instruction bytes.
8087 */
8088 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8089 {
8090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8091 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8092 {
8093 IEM_MC_BEGIN(0, 1);
8094 IEM_MC_LOCAL(uint32_t, u32Value);
8095 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8096 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8097 IEM_MC_ADVANCE_RIP();
8098 IEM_MC_END();
8099 }
8100 else
8101 {
8102 IEM_MC_BEGIN(0, 1);
8103 IEM_MC_LOCAL(uint64_t, u64Value);
8104 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8105 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8106 IEM_MC_ADVANCE_RIP();
8107 IEM_MC_END();
8108 }
8109 }
8110 else
8111 {
8112 /*
8113 * We're loading a register from memory.
8114 */
8115 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8116 {
8117 IEM_MC_BEGIN(0, 2);
8118 IEM_MC_LOCAL(uint32_t, u32Value);
8119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8122 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8123 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
8124 IEM_MC_ADVANCE_RIP();
8125 IEM_MC_END();
8126 }
8127 else
8128 {
8129 IEM_MC_BEGIN(0, 2);
8130 IEM_MC_LOCAL(uint64_t, u64Value);
8131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8134 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8135 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8136 IEM_MC_ADVANCE_RIP();
8137 IEM_MC_END();
8138 }
8139 }
8140 return VINF_SUCCESS;
8141}
8142
8143
8144/** Opcode 0x0f 0xc0. */
8145FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
8146{
8147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8148 IEMOP_HLP_MIN_486();
8149 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
8150
8151 /*
8152 * If rm is denoting a register, no more instruction bytes.
8153 */
8154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8155 {
8156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8157
8158 IEM_MC_BEGIN(3, 0);
8159 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8160 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8161 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8162
8163 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8164 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8165 IEM_MC_REF_EFLAGS(pEFlags);
8166 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8167
8168 IEM_MC_ADVANCE_RIP();
8169 IEM_MC_END();
8170 }
8171 else
8172 {
8173 /*
8174 * We're accessing memory.
8175 */
8176 IEM_MC_BEGIN(3, 3);
8177 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8178 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8179 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8180 IEM_MC_LOCAL(uint8_t, u8RegCopy);
8181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8182
8183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8184 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8185 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8186 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
8187 IEM_MC_FETCH_EFLAGS(EFlags);
8188 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8189 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
8190 else
8191 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
8192
8193 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8194 IEM_MC_COMMIT_EFLAGS(EFlags);
8195 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
8196 IEM_MC_ADVANCE_RIP();
8197 IEM_MC_END();
8198 return VINF_SUCCESS;
8199 }
8200 return VINF_SUCCESS;
8201}
8202
8203
8204/** Opcode 0x0f 0xc1. */
8205FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
8206{
8207 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
8208 IEMOP_HLP_MIN_486();
8209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8210
8211 /*
8212 * If rm is denoting a register, no more instruction bytes.
8213 */
8214 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8215 {
8216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8217
8218 switch (pVCpu->iem.s.enmEffOpSize)
8219 {
8220 case IEMMODE_16BIT:
8221 IEM_MC_BEGIN(3, 0);
8222 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8223 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8224 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8225
8226 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8227 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8228 IEM_MC_REF_EFLAGS(pEFlags);
8229 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8230
8231 IEM_MC_ADVANCE_RIP();
8232 IEM_MC_END();
8233 return VINF_SUCCESS;
8234
8235 case IEMMODE_32BIT:
8236 IEM_MC_BEGIN(3, 0);
8237 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8238 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8239 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8240
8241 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8242 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8243 IEM_MC_REF_EFLAGS(pEFlags);
8244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8245
8246 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8247 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8248 IEM_MC_ADVANCE_RIP();
8249 IEM_MC_END();
8250 return VINF_SUCCESS;
8251
8252 case IEMMODE_64BIT:
8253 IEM_MC_BEGIN(3, 0);
8254 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8255 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8256 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8257
8258 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8259 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8260 IEM_MC_REF_EFLAGS(pEFlags);
8261 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8262
8263 IEM_MC_ADVANCE_RIP();
8264 IEM_MC_END();
8265 return VINF_SUCCESS;
8266
8267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8268 }
8269 }
8270 else
8271 {
8272 /*
8273 * We're accessing memory.
8274 */
8275 switch (pVCpu->iem.s.enmEffOpSize)
8276 {
8277 case IEMMODE_16BIT:
8278 IEM_MC_BEGIN(3, 3);
8279 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8280 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8281 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8282 IEM_MC_LOCAL(uint16_t, u16RegCopy);
8283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8284
8285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8286 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8287 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8288 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
8289 IEM_MC_FETCH_EFLAGS(EFlags);
8290 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8291 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
8292 else
8293 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
8294
8295 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8296 IEM_MC_COMMIT_EFLAGS(EFlags);
8297 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
8298 IEM_MC_ADVANCE_RIP();
8299 IEM_MC_END();
8300 return VINF_SUCCESS;
8301
8302 case IEMMODE_32BIT:
8303 IEM_MC_BEGIN(3, 3);
8304 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8305 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8306 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8307 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8309
8310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8311 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8312 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8313 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8314 IEM_MC_FETCH_EFLAGS(EFlags);
8315 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8316 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8317 else
8318 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8319
8320 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8321 IEM_MC_COMMIT_EFLAGS(EFlags);
8322 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8323 IEM_MC_ADVANCE_RIP();
8324 IEM_MC_END();
8325 return VINF_SUCCESS;
8326
8327 case IEMMODE_64BIT:
8328 IEM_MC_BEGIN(3, 3);
8329 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8330 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8331 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8332 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8334
8335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8336 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8337 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8338 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8339 IEM_MC_FETCH_EFLAGS(EFlags);
8340 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8341 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8342 else
8343 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8344
8345 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8346 IEM_MC_COMMIT_EFLAGS(EFlags);
8347 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8348 IEM_MC_ADVANCE_RIP();
8349 IEM_MC_END();
8350 return VINF_SUCCESS;
8351
8352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8353 }
8354 }
8355}
8356
8357
8358/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8359FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8360/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8361FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8362/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8363FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8364/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8365FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8366
8367
8368/** Opcode 0x0f 0xc3. */
8369FNIEMOP_DEF(iemOp_movnti_My_Gy)
8370{
8371 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8372
8373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8374
8375 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8376 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8377 {
8378 switch (pVCpu->iem.s.enmEffOpSize)
8379 {
8380 case IEMMODE_32BIT:
8381 IEM_MC_BEGIN(0, 2);
8382 IEM_MC_LOCAL(uint32_t, u32Value);
8383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8384
8385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8387 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8388 return IEMOP_RAISE_INVALID_OPCODE();
8389
8390 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8391 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8392 IEM_MC_ADVANCE_RIP();
8393 IEM_MC_END();
8394 break;
8395
8396 case IEMMODE_64BIT:
8397 IEM_MC_BEGIN(0, 2);
8398 IEM_MC_LOCAL(uint64_t, u64Value);
8399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8400
8401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8403 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8404 return IEMOP_RAISE_INVALID_OPCODE();
8405
8406 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8407 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8408 IEM_MC_ADVANCE_RIP();
8409 IEM_MC_END();
8410 break;
8411
8412 case IEMMODE_16BIT:
8413 /** @todo check this form. */
8414 return IEMOP_RAISE_INVALID_OPCODE();
8415 }
8416 }
8417 else
8418 return IEMOP_RAISE_INVALID_OPCODE();
8419 return VINF_SUCCESS;
8420}
8421/* Opcode 0x66 0x0f 0xc3 - invalid */
8422/* Opcode 0xf3 0x0f 0xc3 - invalid */
8423/* Opcode 0xf2 0x0f 0xc3 - invalid */
8424
8425/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8426FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8427/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8428FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8429/* Opcode 0xf3 0x0f 0xc4 - invalid */
8430/* Opcode 0xf2 0x0f 0xc4 - invalid */
8431
8432/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8433FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8434/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8435FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8436/* Opcode 0xf3 0x0f 0xc5 - invalid */
8437/* Opcode 0xf2 0x0f 0xc5 - invalid */
8438
8439/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8440FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8441/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8442FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8443/* Opcode 0xf3 0x0f 0xc6 - invalid */
8444/* Opcode 0xf2 0x0f 0xc6 - invalid */
8445
8446
8447/** Opcode 0x0f 0xc7 !11/1. */
8448FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8449{
8450 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8451
8452 IEM_MC_BEGIN(4, 3);
8453 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8454 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8455 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8456 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8457 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8458 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8460
8461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8462 IEMOP_HLP_DONE_DECODING();
8463 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8464
8465 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8466 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8467 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8468
8469 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8470 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8471 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8472
8473 IEM_MC_FETCH_EFLAGS(EFlags);
8474 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8475 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8476 else
8477 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8478
8479 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8480 IEM_MC_COMMIT_EFLAGS(EFlags);
8481 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8482 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8483 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8484 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8485 IEM_MC_ENDIF();
8486 IEM_MC_ADVANCE_RIP();
8487
8488 IEM_MC_END();
8489 return VINF_SUCCESS;
8490}
8491
8492
8493/** Opcode REX.W 0x0f 0xc7 !11/1. */
8494FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8495{
8496 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8497 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8498 {
8499#if 0
8500 RT_NOREF(bRm);
8501 IEMOP_BITCH_ABOUT_STUB();
8502 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8503#else
8504 IEM_MC_BEGIN(4, 3);
8505 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8506 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8507 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8508 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8509 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8510 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8512
8513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8514 IEMOP_HLP_DONE_DECODING();
8515 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8516 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8517
8518 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8519 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8520 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8521
8522 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8523 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8524 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8525
8526 IEM_MC_FETCH_EFLAGS(EFlags);
8527# ifdef RT_ARCH_AMD64
8528 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8529 {
8530 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8531 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8532 else
8533 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8534 }
8535 else
8536# endif
8537 {
8538 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8539 accesses and not all all atomic, which works fine on in UNI CPU guest
8540 configuration (ignoring DMA). If guest SMP is active we have no choice
8541 but to use a rendezvous callback here. Sigh. */
8542 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8543 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8544 else
8545 {
8546 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8547 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8548 }
8549 }
8550
8551 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8552 IEM_MC_COMMIT_EFLAGS(EFlags);
8553 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8554 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8555 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8556 IEM_MC_ENDIF();
8557 IEM_MC_ADVANCE_RIP();
8558
8559 IEM_MC_END();
8560 return VINF_SUCCESS;
8561#endif
8562 }
8563 Log(("cmpxchg16b -> #UD\n"));
8564 return IEMOP_RAISE_INVALID_OPCODE();
8565}
8566
8567FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8568{
8569 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8570 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8571 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8572}
8573
8574/** Opcode 0x0f 0xc7 11/6. */
8575FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8576
8577/** Opcode 0x0f 0xc7 !11/6. */
8578#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8579FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
8580{
8581 IEMOP_MNEMONIC(vmptrld, "vmptrld");
8582 IEMOP_HLP_IN_VMX_OPERATION();
8583 IEMOP_HLP_VMX_INSTR();
8584 IEM_MC_BEGIN(2, 0);
8585 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8586 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8588 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8589 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8590 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
8591 IEM_MC_END();
8592 return VINF_SUCCESS;
8593}
8594#else
8595FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8596#endif
8597
8598/** Opcode 0x66 0x0f 0xc7 !11/6. */
8599#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8600FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
8601{
8602 IEMOP_MNEMONIC(vmclear, "vmclear");
8603 IEMOP_HLP_IN_VMX_OPERATION();
8604 IEMOP_HLP_VMX_INSTR();
8605 IEM_MC_BEGIN(2, 0);
8606 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8607 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8609 IEMOP_HLP_DONE_DECODING();
8610 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8611 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
8612 IEM_MC_END();
8613 return VINF_SUCCESS;
8614}
8615#else
8616FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8617#endif
8618
8619/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8620#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8621FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
8622{
8623 IEMOP_MNEMONIC(vmxon, "vmxon");
8624 IEMOP_HLP_VMX_INSTR();
8625 IEM_MC_BEGIN(2, 0);
8626 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8627 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
8628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8629 IEMOP_HLP_DONE_DECODING();
8630 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8631 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
8632 IEM_MC_END();
8633 return VINF_SUCCESS;
8634}
8635#else
8636FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8637#endif
8638
8639/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8640#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
8641FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
8642{
8643 IEMOP_MNEMONIC(vmptrst, "vmptrst");
8644 IEMOP_HLP_IN_VMX_OPERATION();
8645 IEMOP_HLP_VMX_INSTR();
8646 IEM_MC_BEGIN(2, 0);
8647 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8648 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
8649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8650 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
8651 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8652 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
8653 IEM_MC_END();
8654 return VINF_SUCCESS;
8655}
8656#else
8657FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8658#endif
8659
8660/** Opcode 0x0f 0xc7 11/7. */
8661FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8662
8663
8664/**
8665 * Group 9 jump table for register variant.
8666 */
8667IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8668{ /* pfx: none, 066h, 0f3h, 0f2h */
8669 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8670 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8671 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8672 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8673 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8674 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8675 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8676 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8677};
8678AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8679
8680
8681/**
8682 * Group 9 jump table for memory variant.
8683 */
8684IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8685{ /* pfx: none, 066h, 0f3h, 0f2h */
8686 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8687 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8688 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8689 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8690 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8691 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8692 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8693 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8694};
8695AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8696
8697
8698/** Opcode 0x0f 0xc7. */
8699FNIEMOP_DEF(iemOp_Grp9)
8700{
8701 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
8702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8703 /* register, register */
8704 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8705 + pVCpu->iem.s.idxPrefix], bRm);
8706 /* memory, register */
8707 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8708 + pVCpu->iem.s.idxPrefix], bRm);
8709}
8710
8711
8712/**
8713 * Common 'bswap register' helper.
8714 */
8715FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8716{
8717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8718 switch (pVCpu->iem.s.enmEffOpSize)
8719 {
8720 case IEMMODE_16BIT:
8721 IEM_MC_BEGIN(1, 0);
8722 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8723 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8724 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8725 IEM_MC_ADVANCE_RIP();
8726 IEM_MC_END();
8727 return VINF_SUCCESS;
8728
8729 case IEMMODE_32BIT:
8730 IEM_MC_BEGIN(1, 0);
8731 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8732 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8733 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8734 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8735 IEM_MC_ADVANCE_RIP();
8736 IEM_MC_END();
8737 return VINF_SUCCESS;
8738
8739 case IEMMODE_64BIT:
8740 IEM_MC_BEGIN(1, 0);
8741 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8742 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8743 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8744 IEM_MC_ADVANCE_RIP();
8745 IEM_MC_END();
8746 return VINF_SUCCESS;
8747
8748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8749 }
8750}
8751
8752
8753/** Opcode 0x0f 0xc8. */
8754FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8755{
8756 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8757 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8758 prefix. REX.B is the correct prefix it appears. For a parallel
8759 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8760 IEMOP_HLP_MIN_486();
8761 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8762}
8763
8764
8765/** Opcode 0x0f 0xc9. */
8766FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8767{
8768 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8769 IEMOP_HLP_MIN_486();
8770 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8771}
8772
8773
8774/** Opcode 0x0f 0xca. */
8775FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8776{
8777 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8778 IEMOP_HLP_MIN_486();
8779 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8780}
8781
8782
8783/** Opcode 0x0f 0xcb. */
8784FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8785{
8786 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8787 IEMOP_HLP_MIN_486();
8788 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8789}
8790
8791
8792/** Opcode 0x0f 0xcc. */
8793FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8794{
8795 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8796 IEMOP_HLP_MIN_486();
8797 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8798}
8799
8800
8801/** Opcode 0x0f 0xcd. */
8802FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8803{
8804 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8805 IEMOP_HLP_MIN_486();
8806 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8807}
8808
8809
8810/** Opcode 0x0f 0xce. */
8811FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8812{
8813 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8814 IEMOP_HLP_MIN_486();
8815 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8816}
8817
8818
8819/** Opcode 0x0f 0xcf. */
8820FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8821{
8822 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8823 IEMOP_HLP_MIN_486();
8824 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8825}
8826
8827
8828/* Opcode 0x0f 0xd0 - invalid */
8829/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8830FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8831/* Opcode 0xf3 0x0f 0xd0 - invalid */
8832/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8833FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8834
8835/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8836FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8837/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8838FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8839/* Opcode 0xf3 0x0f 0xd1 - invalid */
8840/* Opcode 0xf2 0x0f 0xd1 - invalid */
8841
8842/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8843FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8844/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8845FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8846/* Opcode 0xf3 0x0f 0xd2 - invalid */
8847/* Opcode 0xf2 0x0f 0xd2 - invalid */
8848
8849/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8850FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8851/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8852FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8853/* Opcode 0xf3 0x0f 0xd3 - invalid */
8854/* Opcode 0xf2 0x0f 0xd3 - invalid */
8855
8856/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8857FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8858/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8859FNIEMOP_STUB(iemOp_paddq_Vx_W);
8860/* Opcode 0xf3 0x0f 0xd4 - invalid */
8861/* Opcode 0xf2 0x0f 0xd4 - invalid */
8862
8863/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8864FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8865/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8866FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8867/* Opcode 0xf3 0x0f 0xd5 - invalid */
8868/* Opcode 0xf2 0x0f 0xd5 - invalid */
8869
8870/* Opcode 0x0f 0xd6 - invalid */
8871
8872/**
8873 * @opcode 0xd6
8874 * @oppfx 0x66
8875 * @opcpuid sse2
8876 * @opgroup og_sse2_pcksclr_datamove
8877 * @opxcpttype none
8878 * @optest op1=-1 op2=2 -> op1=2
8879 * @optest op1=0 op2=-42 -> op1=-42
8880 */
8881FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8882{
8883 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8885 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8886 {
8887 /*
8888 * Register, register.
8889 */
8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8891 IEM_MC_BEGIN(0, 2);
8892 IEM_MC_LOCAL(uint64_t, uSrc);
8893
8894 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8895 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8896
8897 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8898 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8899
8900 IEM_MC_ADVANCE_RIP();
8901 IEM_MC_END();
8902 }
8903 else
8904 {
8905 /*
8906 * Memory, register.
8907 */
8908 IEM_MC_BEGIN(0, 2);
8909 IEM_MC_LOCAL(uint64_t, uSrc);
8910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8911
8912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8916
8917 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8918 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8919
8920 IEM_MC_ADVANCE_RIP();
8921 IEM_MC_END();
8922 }
8923 return VINF_SUCCESS;
8924}
8925
8926
8927/**
8928 * @opcode 0xd6
8929 * @opcodesub 11 mr/reg
8930 * @oppfx f3
8931 * @opcpuid sse2
8932 * @opgroup og_sse2_simdint_datamove
8933 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8934 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8935 */
8936FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8937{
8938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8940 {
8941 /*
8942 * Register, register.
8943 */
8944 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8946 IEM_MC_BEGIN(0, 1);
8947 IEM_MC_LOCAL(uint64_t, uSrc);
8948
8949 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8950 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8951
8952 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8953 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8954 IEM_MC_FPU_TO_MMX_MODE();
8955
8956 IEM_MC_ADVANCE_RIP();
8957 IEM_MC_END();
8958 return VINF_SUCCESS;
8959 }
8960
8961 /**
8962 * @opdone
8963 * @opmnemonic udf30fd6mem
8964 * @opcode 0xd6
8965 * @opcodesub !11 mr/reg
8966 * @oppfx f3
8967 * @opunused intel-modrm
8968 * @opcpuid sse
8969 * @optest ->
8970 */
8971 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8972}
8973
8974
8975/**
8976 * @opcode 0xd6
8977 * @opcodesub 11 mr/reg
8978 * @oppfx f2
8979 * @opcpuid sse2
8980 * @opgroup og_sse2_simdint_datamove
8981 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8982 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8983 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8984 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8985 * @optest op1=-42 op2=0xfedcba9876543210
8986 * -> op1=0xfedcba9876543210 ftw=0xff
8987 */
8988FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8989{
8990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8992 {
8993 /*
8994 * Register, register.
8995 */
8996 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8998 IEM_MC_BEGIN(0, 1);
8999 IEM_MC_LOCAL(uint64_t, uSrc);
9000
9001 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9002 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9003
9004 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9005 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
9006 IEM_MC_FPU_TO_MMX_MODE();
9007
9008 IEM_MC_ADVANCE_RIP();
9009 IEM_MC_END();
9010 return VINF_SUCCESS;
9011 }
9012
9013 /**
9014 * @opdone
9015 * @opmnemonic udf20fd6mem
9016 * @opcode 0xd6
9017 * @opcodesub !11 mr/reg
9018 * @oppfx f2
9019 * @opunused intel-modrm
9020 * @opcpuid sse
9021 * @optest ->
9022 */
9023 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
9024}
9025
9026/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
9027FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
9028{
9029 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9030 /** @todo testcase: Check that the instruction implicitly clears the high
9031 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9032 * and opcode modifications are made to work with the whole width (not
9033 * just 128). */
9034 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
9035 /* Docs says register only. */
9036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9038 {
9039 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
9040 IEM_MC_BEGIN(2, 0);
9041 IEM_MC_ARG(uint64_t *, pDst, 0);
9042 IEM_MC_ARG(uint64_t const *, pSrc, 1);
9043 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9044 IEM_MC_PREPARE_FPU_USAGE();
9045 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9046 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
9047 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
9048 IEM_MC_ADVANCE_RIP();
9049 IEM_MC_END();
9050 return VINF_SUCCESS;
9051 }
9052 return IEMOP_RAISE_INVALID_OPCODE();
9053}
9054
9055/** Opcode 0x66 0x0f 0xd7 - */
9056FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
9057{
9058 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
9059 /** @todo testcase: Check that the instruction implicitly clears the high
9060 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
9061 * and opcode modifications are made to work with the whole width (not
9062 * just 128). */
9063 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
9064 /* Docs says register only. */
9065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
9067 {
9068 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
9069 IEM_MC_BEGIN(2, 0);
9070 IEM_MC_ARG(uint64_t *, pDst, 0);
9071 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
9072 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9073 IEM_MC_PREPARE_SSE_USAGE();
9074 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9075 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9076 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
9077 IEM_MC_ADVANCE_RIP();
9078 IEM_MC_END();
9079 return VINF_SUCCESS;
9080 }
9081 return IEMOP_RAISE_INVALID_OPCODE();
9082}
9083
9084/* Opcode 0xf3 0x0f 0xd7 - invalid */
9085/* Opcode 0xf2 0x0f 0xd7 - invalid */
9086
9087
9088/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
9089FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
9090/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
9091FNIEMOP_STUB(iemOp_psubusb_Vx_W);
9092/* Opcode 0xf3 0x0f 0xd8 - invalid */
9093/* Opcode 0xf2 0x0f 0xd8 - invalid */
9094
9095/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
9096FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
9097/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
9098FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
9099/* Opcode 0xf3 0x0f 0xd9 - invalid */
9100/* Opcode 0xf2 0x0f 0xd9 - invalid */
9101
9102/** Opcode 0x0f 0xda - pminub Pq, Qq */
9103FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
9104/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
9105FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
9106/* Opcode 0xf3 0x0f 0xda - invalid */
9107/* Opcode 0xf2 0x0f 0xda - invalid */
9108
9109/** Opcode 0x0f 0xdb - pand Pq, Qq */
9110FNIEMOP_STUB(iemOp_pand_Pq_Qq);
9111/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
9112FNIEMOP_STUB(iemOp_pand_Vx_W);
9113/* Opcode 0xf3 0x0f 0xdb - invalid */
9114/* Opcode 0xf2 0x0f 0xdb - invalid */
9115
9116/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
9117FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
9118/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
9119FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
9120/* Opcode 0xf3 0x0f 0xdc - invalid */
9121/* Opcode 0xf2 0x0f 0xdc - invalid */
9122
9123/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
9124FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
9125/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
9126FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
9127/* Opcode 0xf3 0x0f 0xdd - invalid */
9128/* Opcode 0xf2 0x0f 0xdd - invalid */
9129
9130/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
9131FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
9132/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
9133FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
9134/* Opcode 0xf3 0x0f 0xde - invalid */
9135/* Opcode 0xf2 0x0f 0xde - invalid */
9136
9137/** Opcode 0x0f 0xdf - pandn Pq, Qq */
9138FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
9139/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
9140FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
9141/* Opcode 0xf3 0x0f 0xdf - invalid */
9142/* Opcode 0xf2 0x0f 0xdf - invalid */
9143
9144/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
9145FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
9146/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
9147FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
9148/* Opcode 0xf3 0x0f 0xe0 - invalid */
9149/* Opcode 0xf2 0x0f 0xe0 - invalid */
9150
9151/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
9152FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
9153/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
9154FNIEMOP_STUB(iemOp_psraw_Vx_W);
9155/* Opcode 0xf3 0x0f 0xe1 - invalid */
9156/* Opcode 0xf2 0x0f 0xe1 - invalid */
9157
9158/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
9159FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
9160/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
9161FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
9162/* Opcode 0xf3 0x0f 0xe2 - invalid */
9163/* Opcode 0xf2 0x0f 0xe2 - invalid */
9164
9165/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
9166FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
9167/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
9168FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
9169/* Opcode 0xf3 0x0f 0xe3 - invalid */
9170/* Opcode 0xf2 0x0f 0xe3 - invalid */
9171
9172/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
9173FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
9174/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
9175FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
9176/* Opcode 0xf3 0x0f 0xe4 - invalid */
9177/* Opcode 0xf2 0x0f 0xe4 - invalid */
9178
9179/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
9180FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
9181/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
9182FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
9183/* Opcode 0xf3 0x0f 0xe5 - invalid */
9184/* Opcode 0xf2 0x0f 0xe5 - invalid */
9185
9186/* Opcode 0x0f 0xe6 - invalid */
9187/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
9188FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
9189/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
9190FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
9191/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
9192FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
9193
9194
9195/**
9196 * @opcode 0xe7
9197 * @opcodesub !11 mr/reg
9198 * @oppfx none
9199 * @opcpuid sse
9200 * @opgroup og_sse1_cachect
9201 * @opxcpttype none
9202 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
9203 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
9204 */
9205FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
9206{
9207 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9209 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9210 {
9211 /* Register, memory. */
9212 IEM_MC_BEGIN(0, 2);
9213 IEM_MC_LOCAL(uint64_t, uSrc);
9214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9215
9216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9218 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
9219 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9220
9221 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9222 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9223 IEM_MC_FPU_TO_MMX_MODE();
9224
9225 IEM_MC_ADVANCE_RIP();
9226 IEM_MC_END();
9227 return VINF_SUCCESS;
9228 }
9229 /**
9230 * @opdone
9231 * @opmnemonic ud0fe7reg
9232 * @opcode 0xe7
9233 * @opcodesub 11 mr/reg
9234 * @oppfx none
9235 * @opunused immediate
9236 * @opcpuid sse
9237 * @optest ->
9238 */
9239 return IEMOP_RAISE_INVALID_OPCODE();
9240}
9241
9242/**
9243 * @opcode 0xe7
9244 * @opcodesub !11 mr/reg
9245 * @oppfx 0x66
9246 * @opcpuid sse2
9247 * @opgroup og_sse2_cachect
9248 * @opxcpttype 1
9249 * @optest op1=-1 op2=2 -> op1=2
9250 * @optest op1=0 op2=-42 -> op1=-42
9251 */
9252FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
9253{
9254 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9256 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
9257 {
9258 /* Register, memory. */
9259 IEM_MC_BEGIN(0, 2);
9260 IEM_MC_LOCAL(RTUINT128U, uSrc);
9261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9262
9263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9265 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
9266 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9267
9268 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
9269 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
9270
9271 IEM_MC_ADVANCE_RIP();
9272 IEM_MC_END();
9273 return VINF_SUCCESS;
9274 }
9275
9276 /**
9277 * @opdone
9278 * @opmnemonic ud660fe7reg
9279 * @opcode 0xe7
9280 * @opcodesub 11 mr/reg
9281 * @oppfx 0x66
9282 * @opunused immediate
9283 * @opcpuid sse
9284 * @optest ->
9285 */
9286 return IEMOP_RAISE_INVALID_OPCODE();
9287}
9288
9289/* Opcode 0xf3 0x0f 0xe7 - invalid */
9290/* Opcode 0xf2 0x0f 0xe7 - invalid */
9291
9292
9293/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
9294FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
9295/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
9296FNIEMOP_STUB(iemOp_psubsb_Vx_W);
9297/* Opcode 0xf3 0x0f 0xe8 - invalid */
9298/* Opcode 0xf2 0x0f 0xe8 - invalid */
9299
9300/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
9301FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
9302/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
9303FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
9304/* Opcode 0xf3 0x0f 0xe9 - invalid */
9305/* Opcode 0xf2 0x0f 0xe9 - invalid */
9306
9307/** Opcode 0x0f 0xea - pminsw Pq, Qq */
9308FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
9309/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
9310FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
9311/* Opcode 0xf3 0x0f 0xea - invalid */
9312/* Opcode 0xf2 0x0f 0xea - invalid */
9313
9314/** Opcode 0x0f 0xeb - por Pq, Qq */
9315FNIEMOP_STUB(iemOp_por_Pq_Qq);
9316/** Opcode 0x66 0x0f 0xeb - por Vx, W */
9317FNIEMOP_STUB(iemOp_por_Vx_W);
9318/* Opcode 0xf3 0x0f 0xeb - invalid */
9319/* Opcode 0xf2 0x0f 0xeb - invalid */
9320
9321/** Opcode 0x0f 0xec - paddsb Pq, Qq */
9322FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
9323/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
9324FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
9325/* Opcode 0xf3 0x0f 0xec - invalid */
9326/* Opcode 0xf2 0x0f 0xec - invalid */
9327
9328/** Opcode 0x0f 0xed - paddsw Pq, Qq */
9329FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
9330/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
9331FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
9332/* Opcode 0xf3 0x0f 0xed - invalid */
9333/* Opcode 0xf2 0x0f 0xed - invalid */
9334
9335/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
9336FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
9337/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
9338FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
9339/* Opcode 0xf3 0x0f 0xee - invalid */
9340/* Opcode 0xf2 0x0f 0xee - invalid */
9341
9342
9343/** Opcode 0x0f 0xef - pxor Pq, Qq */
9344FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
9345{
9346 IEMOP_MNEMONIC(pxor, "pxor");
9347 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
9348}
9349
9350/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
9351FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
9352{
9353 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
9354 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
9355}
9356
9357/* Opcode 0xf3 0x0f 0xef - invalid */
9358/* Opcode 0xf2 0x0f 0xef - invalid */
9359
9360/* Opcode 0x0f 0xf0 - invalid */
9361/* Opcode 0x66 0x0f 0xf0 - invalid */
9362/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
9363FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
9364
9365/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
9366FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
9367/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
9368FNIEMOP_STUB(iemOp_psllw_Vx_W);
9369/* Opcode 0xf2 0x0f 0xf1 - invalid */
9370
9371/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
9372FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
9373/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
9374FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
9375/* Opcode 0xf2 0x0f 0xf2 - invalid */
9376
9377/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
9378FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
9379/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
9380FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
9381/* Opcode 0xf2 0x0f 0xf3 - invalid */
9382
9383/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
9384FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
9385/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
9386FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
9387/* Opcode 0xf2 0x0f 0xf4 - invalid */
9388
9389/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
9390FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
9391/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
9392FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
9393/* Opcode 0xf2 0x0f 0xf5 - invalid */
9394
9395/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
9396FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
9397/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
9398FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
9399/* Opcode 0xf2 0x0f 0xf6 - invalid */
9400
9401/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
9402FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
9403/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
9404FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
9405/* Opcode 0xf2 0x0f 0xf7 - invalid */
9406
9407/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
9408FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9409/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9410FNIEMOP_STUB(iemOp_psubb_Vx_W);
9411/* Opcode 0xf2 0x0f 0xf8 - invalid */
9412
9413/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9414FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9415/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9416FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9417/* Opcode 0xf2 0x0f 0xf9 - invalid */
9418
9419/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9420FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9421/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9422FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9423/* Opcode 0xf2 0x0f 0xfa - invalid */
9424
9425/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9426FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9427/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9428FNIEMOP_STUB(iemOp_psubq_Vx_W);
9429/* Opcode 0xf2 0x0f 0xfb - invalid */
9430
9431/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9432FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9433/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9434FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9435/* Opcode 0xf2 0x0f 0xfc - invalid */
9436
9437/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9438FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9439/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9440FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9441/* Opcode 0xf2 0x0f 0xfd - invalid */
9442
9443/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9444FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9445/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9446FNIEMOP_STUB(iemOp_paddd_Vx_W);
9447/* Opcode 0xf2 0x0f 0xfe - invalid */
9448
9449
9450/** Opcode **** 0x0f 0xff - UD0 */
9451FNIEMOP_DEF(iemOp_ud0)
9452{
9453 IEMOP_MNEMONIC(ud0, "ud0");
9454 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9455 {
9456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9457#ifndef TST_IEM_CHECK_MC
9458 RTGCPTR GCPtrEff;
9459 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9460 if (rcStrict != VINF_SUCCESS)
9461 return rcStrict;
9462#endif
9463 IEMOP_HLP_DONE_DECODING();
9464 }
9465 return IEMOP_RAISE_INVALID_OPCODE();
9466}
9467
9468
9469
9470/**
9471 * Two byte opcode map, first byte 0x0f.
9472 *
9473 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9474 * check if it needs updating as well when making changes.
9475 */
9476IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9477{
9478 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9479 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9480 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9481 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9482 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9483 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9484 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9485 /* 0x06 */ IEMOP_X4(iemOp_clts),
9486 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9487 /* 0x08 */ IEMOP_X4(iemOp_invd),
9488 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9489 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9490 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9491 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9492 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9493 /* 0x0e */ IEMOP_X4(iemOp_femms),
9494 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9495
9496 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9497 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9498 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9499 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9500 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9501 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9502 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9503 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9504 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9505 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9506 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9507 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9508 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9509 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9510 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9511 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9512
9513 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9514 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9515 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9516 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9517 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9518 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9519 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9520 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9521 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9522 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9523 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9524 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9525 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9526 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9527 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9528 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9529
9530 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9531 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9532 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9533 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9534 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9535 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9536 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9537 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9538 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9539 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9540 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9541 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9542 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9543 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9544 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9545 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9546
9547 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9548 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9549 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9550 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9551 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9552 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9553 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9554 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9555 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9556 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9557 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9558 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9559 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9560 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9561 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9562 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9563
9564 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9565 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9566 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9567 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9568 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9569 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9570 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9571 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9572 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9573 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9574 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9575 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9576 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9577 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9578 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9579 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9580
9581 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9582 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9583 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9584 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9585 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9586 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9587 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9588 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9589 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9590 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9591 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9592 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9593 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9594 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9595 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9596 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
9597
9598 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9599 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9600 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9601 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9602 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9603 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9604 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9605 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9606
9607 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9608 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9609 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9610 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9611 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9612 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9613 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9614 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9615
9616 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9617 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9618 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9619 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9620 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9621 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9622 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9623 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9624 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9625 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9626 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9627 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9628 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9629 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9630 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9631 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9632
9633 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9634 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9635 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9636 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9637 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9638 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9639 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9640 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9641 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9642 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9643 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9644 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9645 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9646 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9647 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9648 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9649
9650 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9651 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9652 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9653 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9654 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9655 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9656 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9657 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9658 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9659 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9660 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9661 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9662 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9663 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9664 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9665 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9666
9667 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9668 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9669 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9670 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9671 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9672 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9673 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9674 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9675 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9676 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9677 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9678 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9679 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9680 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9681 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9682 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9683
9684 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9685 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9686 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9687 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9688 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9689 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9690 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9691 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9692 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9693 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9694 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9695 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9696 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9697 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9698 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9699 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9700
9701 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9702 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9703 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9704 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9705 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9706 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9707 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9708 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9709 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9710 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9711 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9712 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9713 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9714 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9715 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9716 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9717
9718 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9719 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9720 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9721 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9722 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9723 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9724 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9725 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9726 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9727 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9728 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9729 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9730 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9731 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9732 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9733 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9734
9735 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9736 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9737 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9738 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9739 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9740 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9741 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9742 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9743 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9744 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9745 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9746 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9747 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9748 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9749 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9750 /* 0xff */ IEMOP_X4(iemOp_ud0),
9751};
9752AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9753
9754/** @} */
9755
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette