VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66463

最後變更 在這個檔案從66463是 66463,由 vboxsync 提交於 8 年 前

IEM,bs3-cpu-generated-1: Made the current testcases pass on AMD.

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 332.8 KB
 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66463 2017-04-06 17:58:25Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.alldomusa.eu.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/** @name ..... opcodes.
26 *
27 * @{
28 */
29
30/** @} */
31
32
33/** @name Two byte opcodes (first byte 0x0f).
34 *
35 * @{
36 */
37
38/** Opcode 0x0f 0x00 /0. */
39FNIEMOPRM_DEF(iemOp_Grp6_sldt)
40{
41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
42 IEMOP_HLP_MIN_286();
43 IEMOP_HLP_NO_REAL_OR_V86_MODE();
44
45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
46 {
47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
48 switch (pVCpu->iem.s.enmEffOpSize)
49 {
50 case IEMMODE_16BIT:
51 IEM_MC_BEGIN(0, 1);
52 IEM_MC_LOCAL(uint16_t, u16Ldtr);
53 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 break;
58
59 case IEMMODE_32BIT:
60 IEM_MC_BEGIN(0, 1);
61 IEM_MC_LOCAL(uint32_t, u32Ldtr);
62 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
64 IEM_MC_ADVANCE_RIP();
65 IEM_MC_END();
66 break;
67
68 case IEMMODE_64BIT:
69 IEM_MC_BEGIN(0, 1);
70 IEM_MC_LOCAL(uint64_t, u64Ldtr);
71 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
73 IEM_MC_ADVANCE_RIP();
74 IEM_MC_END();
75 break;
76
77 IEM_NOT_REACHED_DEFAULT_CASE_RET();
78 }
79 }
80 else
81 {
82 IEM_MC_BEGIN(0, 2);
83 IEM_MC_LOCAL(uint16_t, u16Ldtr);
84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
87 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
89 IEM_MC_ADVANCE_RIP();
90 IEM_MC_END();
91 }
92 return VINF_SUCCESS;
93}
94
95
96/** Opcode 0x0f 0x00 /1. */
97FNIEMOPRM_DEF(iemOp_Grp6_str)
98{
99 IEMOP_MNEMONIC(str, "str Rv/Mw");
100 IEMOP_HLP_MIN_286();
101 IEMOP_HLP_NO_REAL_OR_V86_MODE();
102
103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
104 {
105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(0, 1);
110 IEM_MC_LOCAL(uint16_t, u16Tr);
111 IEM_MC_FETCH_TR_U16(u16Tr);
112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(0, 1);
119 IEM_MC_LOCAL(uint32_t, u32Tr);
120 IEM_MC_FETCH_TR_U32(u32Tr);
121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
122 IEM_MC_ADVANCE_RIP();
123 IEM_MC_END();
124 break;
125
126 case IEMMODE_64BIT:
127 IEM_MC_BEGIN(0, 1);
128 IEM_MC_LOCAL(uint64_t, u64Tr);
129 IEM_MC_FETCH_TR_U64(u64Tr);
130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 break;
134
135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
136 }
137 }
138 else
139 {
140 IEM_MC_BEGIN(0, 2);
141 IEM_MC_LOCAL(uint16_t, u16Tr);
142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
145 IEM_MC_FETCH_TR_U16(u16Tr);
146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 }
150 return VINF_SUCCESS;
151}
152
153
154/** Opcode 0x0f 0x00 /2. */
155FNIEMOPRM_DEF(iemOp_Grp6_lldt)
156{
157 IEMOP_MNEMONIC(lldt, "lldt Ew");
158 IEMOP_HLP_MIN_286();
159 IEMOP_HLP_NO_REAL_OR_V86_MODE();
160
161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
162 {
163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
164 IEM_MC_BEGIN(1, 0);
165 IEM_MC_ARG(uint16_t, u16Sel, 0);
166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
168 IEM_MC_END();
169 }
170 else
171 {
172 IEM_MC_BEGIN(1, 1);
173 IEM_MC_ARG(uint16_t, u16Sel, 0);
174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
180 IEM_MC_END();
181 }
182 return VINF_SUCCESS;
183}
184
185
186/** Opcode 0x0f 0x00 /3. */
187FNIEMOPRM_DEF(iemOp_Grp6_ltr)
188{
189 IEMOP_MNEMONIC(ltr, "ltr Ew");
190 IEMOP_HLP_MIN_286();
191 IEMOP_HLP_NO_REAL_OR_V86_MODE();
192
193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
194 {
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_BEGIN(1, 0);
197 IEM_MC_ARG(uint16_t, u16Sel, 0);
198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
200 IEM_MC_END();
201 }
202 else
203 {
204 IEM_MC_BEGIN(1, 1);
205 IEM_MC_ARG(uint16_t, u16Sel, 0);
206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
212 IEM_MC_END();
213 }
214 return VINF_SUCCESS;
215}
216
217
218/** Opcode 0x0f 0x00 /3. */
219FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
220{
221 IEMOP_HLP_MIN_286();
222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
223
224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
225 {
226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
227 IEM_MC_BEGIN(2, 0);
228 IEM_MC_ARG(uint16_t, u16Sel, 0);
229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(2, 1);
237 IEM_MC_ARG(uint16_t, u16Sel, 0);
238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
244 IEM_MC_END();
245 }
246 return VINF_SUCCESS;
247}
248
249
250/** Opcode 0x0f 0x00 /4. */
251FNIEMOPRM_DEF(iemOp_Grp6_verr)
252{
253 IEMOP_MNEMONIC(verr, "verr Ew");
254 IEMOP_HLP_MIN_286();
255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
256}
257
258
259/** Opcode 0x0f 0x00 /5. */
260FNIEMOPRM_DEF(iemOp_Grp6_verw)
261{
262 IEMOP_MNEMONIC(verw, "verw Ew");
263 IEMOP_HLP_MIN_286();
264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
265}
266
267
268/**
269 * Group 6 jump table.
270 */
271IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
272{
273 iemOp_Grp6_sldt,
274 iemOp_Grp6_str,
275 iemOp_Grp6_lldt,
276 iemOp_Grp6_ltr,
277 iemOp_Grp6_verr,
278 iemOp_Grp6_verw,
279 iemOp_InvalidWithRM,
280 iemOp_InvalidWithRM
281};
282
283/** Opcode 0x0f 0x00. */
284FNIEMOP_DEF(iemOp_Grp6)
285{
286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
288}
289
290
291/** Opcode 0x0f 0x01 /0. */
292FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
293{
294 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
295 IEMOP_HLP_MIN_286();
296 IEMOP_HLP_64BIT_OP_SIZE();
297 IEM_MC_BEGIN(2, 1);
298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
304 IEM_MC_END();
305 return VINF_SUCCESS;
306}
307
308
309/** Opcode 0x0f 0x01 /0. */
310FNIEMOP_DEF(iemOp_Grp7_vmcall)
311{
312 IEMOP_BITCH_ABOUT_STUB();
313 return IEMOP_RAISE_INVALID_OPCODE();
314}
315
316
317/** Opcode 0x0f 0x01 /0. */
318FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
319{
320 IEMOP_BITCH_ABOUT_STUB();
321 return IEMOP_RAISE_INVALID_OPCODE();
322}
323
324
325/** Opcode 0x0f 0x01 /0. */
326FNIEMOP_DEF(iemOp_Grp7_vmresume)
327{
328 IEMOP_BITCH_ABOUT_STUB();
329 return IEMOP_RAISE_INVALID_OPCODE();
330}
331
332
333/** Opcode 0x0f 0x01 /0. */
334FNIEMOP_DEF(iemOp_Grp7_vmxoff)
335{
336 IEMOP_BITCH_ABOUT_STUB();
337 return IEMOP_RAISE_INVALID_OPCODE();
338}
339
340
341/** Opcode 0x0f 0x01 /1. */
342FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
343{
344 IEMOP_MNEMONIC(sidt, "sidt Ms");
345 IEMOP_HLP_MIN_286();
346 IEMOP_HLP_64BIT_OP_SIZE();
347 IEM_MC_BEGIN(2, 1);
348 IEM_MC_ARG(uint8_t, iEffSeg, 0);
349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
354 IEM_MC_END();
355 return VINF_SUCCESS;
356}
357
358
359/** Opcode 0x0f 0x01 /1. */
360FNIEMOP_DEF(iemOp_Grp7_monitor)
361{
362 IEMOP_MNEMONIC(monitor, "monitor");
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
365}
366
367
368/** Opcode 0x0f 0x01 /1. */
369FNIEMOP_DEF(iemOp_Grp7_mwait)
370{
371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
374}
375
376
377/** Opcode 0x0f 0x01 /2. */
378FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
379{
380 IEMOP_MNEMONIC(lgdt, "lgdt");
381 IEMOP_HLP_64BIT_OP_SIZE();
382 IEM_MC_BEGIN(3, 1);
383 IEM_MC_ARG(uint8_t, iEffSeg, 0);
384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
390 IEM_MC_END();
391 return VINF_SUCCESS;
392}
393
394
395/** Opcode 0x0f 0x01 0xd0. */
396FNIEMOP_DEF(iemOp_Grp7_xgetbv)
397{
398 IEMOP_MNEMONIC(xgetbv, "xgetbv");
399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
400 {
401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
403 }
404 return IEMOP_RAISE_INVALID_OPCODE();
405}
406
407
408/** Opcode 0x0f 0x01 0xd1. */
409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
410{
411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
413 {
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
416 }
417 return IEMOP_RAISE_INVALID_OPCODE();
418}
419
420
421/** Opcode 0x0f 0x01 /3. */
422FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
423{
424 IEMOP_MNEMONIC(lidt, "lidt");
425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
426 ? IEMMODE_64BIT
427 : pVCpu->iem.s.enmEffOpSize;
428 IEM_MC_BEGIN(3, 1);
429 IEM_MC_ARG(uint8_t, iEffSeg, 0);
430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
436 IEM_MC_END();
437 return VINF_SUCCESS;
438}
439
440
441#ifdef VBOX_WITH_NESTED_HWVIRT
442/** Opcode 0x0f 0x01 0xd8. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
444{
445 IEMOP_MNEMONIC(vmrun, "vmrun");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
447}
448
449/** Opcode 0x0f 0x01 0xd9. */
450FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
451{
452 IEMOP_MNEMONIC(vmmcall, "vmmcall");
453 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
454}
455
456
457/** Opcode 0x0f 0x01 0xda. */
458FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
459{
460 IEMOP_MNEMONIC(vmload, "vmload");
461 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
462}
463
464
465/** Opcode 0x0f 0x01 0xdb. */
466FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
467{
468 IEMOP_MNEMONIC(vmsave, "vmsave");
469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
470}
471
472
473/** Opcode 0x0f 0x01 0xdc. */
474FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
475{
476 IEMOP_MNEMONIC(stgi, "stgi");
477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
478}
479
480
481/** Opcode 0x0f 0x01 0xdd. */
482FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
483{
484 IEMOP_MNEMONIC(clgi, "clgi");
485 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
486}
487
488
489/** Opcode 0x0f 0x01 0xdf. */
490FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
491{
492 IEMOP_MNEMONIC(invlpga, "invlpga");
493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
494}
495#else
496/** Opcode 0x0f 0x01 0xd8. */
497FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
498
499/** Opcode 0x0f 0x01 0xd9. */
500FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
501
502/** Opcode 0x0f 0x01 0xda. */
503FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
504
505/** Opcode 0x0f 0x01 0xdb. */
506FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
507
508/** Opcode 0x0f 0x01 0xdc. */
509FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
510
511/** Opcode 0x0f 0x01 0xdd. */
512FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
513
514/** Opcode 0x0f 0x01 0xdf. */
515FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
516#endif /* VBOX_WITH_NESTED_HWVIRT */
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520
521/** Opcode 0x0f 0x01 /4. */
522FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
523{
524 IEMOP_MNEMONIC(smsw, "smsw");
525 IEMOP_HLP_MIN_286();
526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
527 {
528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
529 switch (pVCpu->iem.s.enmEffOpSize)
530 {
531 case IEMMODE_16BIT:
532 IEM_MC_BEGIN(0, 1);
533 IEM_MC_LOCAL(uint16_t, u16Tmp);
534 IEM_MC_FETCH_CR0_U16(u16Tmp);
535 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
536 { /* likely */ }
537 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
538 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
539 else
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
541 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
542 IEM_MC_ADVANCE_RIP();
543 IEM_MC_END();
544 return VINF_SUCCESS;
545
546 case IEMMODE_32BIT:
547 IEM_MC_BEGIN(0, 1);
548 IEM_MC_LOCAL(uint32_t, u32Tmp);
549 IEM_MC_FETCH_CR0_U32(u32Tmp);
550 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
551 IEM_MC_ADVANCE_RIP();
552 IEM_MC_END();
553 return VINF_SUCCESS;
554
555 case IEMMODE_64BIT:
556 IEM_MC_BEGIN(0, 1);
557 IEM_MC_LOCAL(uint64_t, u64Tmp);
558 IEM_MC_FETCH_CR0_U64(u64Tmp);
559 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
560 IEM_MC_ADVANCE_RIP();
561 IEM_MC_END();
562 return VINF_SUCCESS;
563
564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
565 }
566 }
567 else
568 {
569 /* Ignore operand size here, memory refs are always 16-bit. */
570 IEM_MC_BEGIN(0, 2);
571 IEM_MC_LOCAL(uint16_t, u16Tmp);
572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
575 IEM_MC_FETCH_CR0_U16(u16Tmp);
576 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
577 { /* likely */ }
578 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
579 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
580 else
581 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
582 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
583 IEM_MC_ADVANCE_RIP();
584 IEM_MC_END();
585 return VINF_SUCCESS;
586 }
587}
588
589
590/** Opcode 0x0f 0x01 /6. */
591FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
592{
593 /* The operand size is effectively ignored, all is 16-bit and only the
594 lower 3-bits are used. */
595 IEMOP_MNEMONIC(lmsw, "lmsw");
596 IEMOP_HLP_MIN_286();
597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
598 {
599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
600 IEM_MC_BEGIN(1, 0);
601 IEM_MC_ARG(uint16_t, u16Tmp, 0);
602 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
604 IEM_MC_END();
605 }
606 else
607 {
608 IEM_MC_BEGIN(1, 1);
609 IEM_MC_ARG(uint16_t, u16Tmp, 0);
610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
613 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
614 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
615 IEM_MC_END();
616 }
617 return VINF_SUCCESS;
618}
619
620
621/** Opcode 0x0f 0x01 /7. */
622FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
623{
624 IEMOP_MNEMONIC(invlpg, "invlpg");
625 IEMOP_HLP_MIN_486();
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_BEGIN(1, 1);
628 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
630 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
631 IEM_MC_END();
632 return VINF_SUCCESS;
633}
634
635
636/** Opcode 0x0f 0x01 /7. */
637FNIEMOP_DEF(iemOp_Grp7_swapgs)
638{
639 IEMOP_MNEMONIC(swapgs, "swapgs");
640 IEMOP_HLP_ONLY_64BIT();
641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
642 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
643}
644
645
646/** Opcode 0x0f 0x01 /7. */
647FNIEMOP_DEF(iemOp_Grp7_rdtscp)
648{
649 NOREF(pVCpu);
650 IEMOP_BITCH_ABOUT_STUB();
651 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
652}
653
654
655/**
656 * Group 7 jump table, memory variant.
657 */
658IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
659{
660 iemOp_Grp7_sgdt,
661 iemOp_Grp7_sidt,
662 iemOp_Grp7_lgdt,
663 iemOp_Grp7_lidt,
664 iemOp_Grp7_smsw,
665 iemOp_InvalidWithRM,
666 iemOp_Grp7_lmsw,
667 iemOp_Grp7_invlpg
668};
669
670
671/** Opcode 0x0f 0x01. */
672FNIEMOP_DEF(iemOp_Grp7)
673{
674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
675 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
676 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
677
678 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
679 {
680 case 0:
681 switch (bRm & X86_MODRM_RM_MASK)
682 {
683 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
684 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
685 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
686 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
687 }
688 return IEMOP_RAISE_INVALID_OPCODE();
689
690 case 1:
691 switch (bRm & X86_MODRM_RM_MASK)
692 {
693 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
694 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
695 }
696 return IEMOP_RAISE_INVALID_OPCODE();
697
698 case 2:
699 switch (bRm & X86_MODRM_RM_MASK)
700 {
701 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
702 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
703 }
704 return IEMOP_RAISE_INVALID_OPCODE();
705
706 case 3:
707 switch (bRm & X86_MODRM_RM_MASK)
708 {
709 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
710 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
711 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
712 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
713 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
714 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
715 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
716 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
718 }
719
720 case 4:
721 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
722
723 case 5:
724 return IEMOP_RAISE_INVALID_OPCODE();
725
726 case 6:
727 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
728
729 case 7:
730 switch (bRm & X86_MODRM_RM_MASK)
731 {
732 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
733 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
734 }
735 return IEMOP_RAISE_INVALID_OPCODE();
736
737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
738 }
739}
740
741/** Opcode 0x0f 0x00 /3. */
742FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
743{
744 IEMOP_HLP_NO_REAL_OR_V86_MODE();
745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
746
747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
748 {
749 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
750 switch (pVCpu->iem.s.enmEffOpSize)
751 {
752 case IEMMODE_16BIT:
753 {
754 IEM_MC_BEGIN(3, 0);
755 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
756 IEM_MC_ARG(uint16_t, u16Sel, 1);
757 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
758
759 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
760 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
761 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
762
763 IEM_MC_END();
764 return VINF_SUCCESS;
765 }
766
767 case IEMMODE_32BIT:
768 case IEMMODE_64BIT:
769 {
770 IEM_MC_BEGIN(3, 0);
771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
772 IEM_MC_ARG(uint16_t, u16Sel, 1);
773 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
774
775 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
778
779 IEM_MC_END();
780 return VINF_SUCCESS;
781 }
782
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785 }
786 else
787 {
788 switch (pVCpu->iem.s.enmEffOpSize)
789 {
790 case IEMMODE_16BIT:
791 {
792 IEM_MC_BEGIN(3, 1);
793 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
794 IEM_MC_ARG(uint16_t, u16Sel, 1);
795 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
800
801 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
802 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
803 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
804
805 IEM_MC_END();
806 return VINF_SUCCESS;
807 }
808
809 case IEMMODE_32BIT:
810 case IEMMODE_64BIT:
811 {
812 IEM_MC_BEGIN(3, 1);
813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
814 IEM_MC_ARG(uint16_t, u16Sel, 1);
815 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817
818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
819 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
820/** @todo testcase: make sure it's a 16-bit read. */
821
822 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
823 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
824 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
825
826 IEM_MC_END();
827 return VINF_SUCCESS;
828 }
829
830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
831 }
832 }
833}
834
835
836
837/** Opcode 0x0f 0x02. */
838FNIEMOP_DEF(iemOp_lar_Gv_Ew)
839{
840 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
841 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
842}
843
844
845/** Opcode 0x0f 0x03. */
846FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
847{
848 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
849 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
850}
851
852
853/** Opcode 0x0f 0x05. */
854FNIEMOP_DEF(iemOp_syscall)
855{
856 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
858 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
859}
860
861
862/** Opcode 0x0f 0x06. */
863FNIEMOP_DEF(iemOp_clts)
864{
865 IEMOP_MNEMONIC(clts, "clts");
866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
867 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
868}
869
870
871/** Opcode 0x0f 0x07. */
872FNIEMOP_DEF(iemOp_sysret)
873{
874 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
876 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
877}
878
879
880/** Opcode 0x0f 0x08. */
881FNIEMOP_STUB(iemOp_invd);
882// IEMOP_HLP_MIN_486();
883
884
885/** Opcode 0x0f 0x09. */
886FNIEMOP_DEF(iemOp_wbinvd)
887{
888 IEMOP_MNEMONIC(wbinvd, "wbinvd");
889 IEMOP_HLP_MIN_486();
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
891 IEM_MC_BEGIN(0, 0);
892 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
893 IEM_MC_ADVANCE_RIP();
894 IEM_MC_END();
895 return VINF_SUCCESS; /* ignore for now */
896}
897
898
899/** Opcode 0x0f 0x0b. */
900FNIEMOP_DEF(iemOp_ud2)
901{
902 IEMOP_MNEMONIC(ud2, "ud2");
903 return IEMOP_RAISE_INVALID_OPCODE();
904}
905
906/** Opcode 0x0f 0x0d. */
907FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
908{
909 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
911 {
912 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
913 return IEMOP_RAISE_INVALID_OPCODE();
914 }
915
916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
918 {
919 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
920 return IEMOP_RAISE_INVALID_OPCODE();
921 }
922
923 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
924 {
925 case 2: /* Aliased to /0 for the time being. */
926 case 4: /* Aliased to /0 for the time being. */
927 case 5: /* Aliased to /0 for the time being. */
928 case 6: /* Aliased to /0 for the time being. */
929 case 7: /* Aliased to /0 for the time being. */
930 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
931 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
932 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
934 }
935
936 IEM_MC_BEGIN(0, 1);
937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
940 /* Currently a NOP. */
941 NOREF(GCPtrEffSrc);
942 IEM_MC_ADVANCE_RIP();
943 IEM_MC_END();
944 return VINF_SUCCESS;
945}
946
947
948/** Opcode 0x0f 0x0e. */
949FNIEMOP_STUB(iemOp_femms);
950
951
952/** Opcode 0x0f 0x0f 0x0c. */
953FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
954
955/** Opcode 0x0f 0x0f 0x0d. */
956FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
957
958/** Opcode 0x0f 0x0f 0x1c. */
959FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
960
961/** Opcode 0x0f 0x0f 0x1d. */
962FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
963
964/** Opcode 0x0f 0x0f 0x8a. */
965FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
966
967/** Opcode 0x0f 0x0f 0x8e. */
968FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
969
970/** Opcode 0x0f 0x0f 0x90. */
971FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
972
973/** Opcode 0x0f 0x0f 0x94. */
974FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
975
976/** Opcode 0x0f 0x0f 0x96. */
977FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
978
979/** Opcode 0x0f 0x0f 0x97. */
980FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
981
982/** Opcode 0x0f 0x0f 0x9a. */
983FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
984
985/** Opcode 0x0f 0x0f 0x9e. */
986FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
987
988/** Opcode 0x0f 0x0f 0xa0. */
989FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
990
991/** Opcode 0x0f 0x0f 0xa4. */
992FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
993
994/** Opcode 0x0f 0x0f 0xa6. */
995FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
996
997/** Opcode 0x0f 0x0f 0xa7. */
998FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
999
1000/** Opcode 0x0f 0x0f 0xaa. */
1001FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1002
1003/** Opcode 0x0f 0x0f 0xae. */
1004FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1005
1006/** Opcode 0x0f 0x0f 0xb0. */
1007FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1008
1009/** Opcode 0x0f 0x0f 0xb4. */
1010FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1011
1012/** Opcode 0x0f 0x0f 0xb6. */
1013FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1014
1015/** Opcode 0x0f 0x0f 0xb7. */
1016FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1017
1018/** Opcode 0x0f 0x0f 0xbb. */
1019FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1020
1021/** Opcode 0x0f 0x0f 0xbf. */
1022FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1023
1024
1025/** Opcode 0x0f 0x0f. */
1026FNIEMOP_DEF(iemOp_3Dnow)
1027{
1028 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1029 {
1030 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1031 return IEMOP_RAISE_INVALID_OPCODE();
1032 }
1033
1034 /* This is pretty sparse, use switch instead of table. */
1035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1036 switch (b)
1037 {
1038 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1039 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1040 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1041 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1042 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1043 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1044 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1045 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1046 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1047 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1048 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1049 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1050 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1051 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1052 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1053 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1054 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1055 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1056 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1057 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1058 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1059 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1060 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1061 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1062 default:
1063 return IEMOP_RAISE_INVALID_OPCODE();
1064 }
1065}
1066
1067
1068/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1069FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1070/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1071FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1072/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1073FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss); //NEXT!!
1074/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1075FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1076
1077
1078/**
1079 * @opcode 0x11
1080 * @oppfx none
1081 * @opcpuid sse
1082 * @opgroup og_sse_simdfp_datamove
1083 * @opxcpttype 4UA
1084 * @optest op1=1 op2=2 -> op1=2
1085 * @optest op1=0 op2=-42 -> op1=-42
1086 */
1087FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1088{
1089 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1092 {
1093 /*
1094 * Register, register.
1095 */
1096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1097 IEM_MC_BEGIN(0, 0);
1098 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1100 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1101 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 }
1105 else
1106 {
1107 /*
1108 * Memory, register.
1109 */
1110 IEM_MC_BEGIN(0, 2);
1111 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1113
1114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1117 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1118
1119 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1120 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 return VINF_SUCCESS;
1126}
1127
1128
1129/**
1130 * @opcode 0x11
1131 * @oppfx 0x66
1132 * @opcpuid sse2
1133 * @opgroup og_sse2_pcksclr_datamove
1134 * @opxcpttype 4UA
1135 * @optest op1=1 op2=2 -> op1=2
1136 * @optest op1=0 op2=-42 -> op1=-42
1137 */
1138FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1139{
1140 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1143 {
1144 /*
1145 * Register, register.
1146 */
1147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1148 IEM_MC_BEGIN(0, 0);
1149 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1150 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1151 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1152 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1153 IEM_MC_ADVANCE_RIP();
1154 IEM_MC_END();
1155 }
1156 else
1157 {
1158 /*
1159 * Memory, register.
1160 */
1161 IEM_MC_BEGIN(0, 2);
1162 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1164
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1169
1170 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1171 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1172
1173 IEM_MC_ADVANCE_RIP();
1174 IEM_MC_END();
1175 }
1176 return VINF_SUCCESS;
1177}
1178
1179
1180/**
1181 * @opcode 0x11
1182 * @oppfx 0xf3
1183 * @opcpuid sse
1184 * @opgroup og_sse_simdfp_datamove
1185 * @opxcpttype 5
1186 * @optest op1=1 op2=2 -> op1=2
1187 * @optest op1=0 op2=-22 -> op1=-22
1188 */
1189FNIEMOP_DEF(iemOp_vmovss_Wss_Hx_Vss)
1190{
1191 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1194 {
1195 /*
1196 * Register, register.
1197 */
1198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1199 IEM_MC_BEGIN(0, 1);
1200 IEM_MC_LOCAL(uint32_t, uSrc);
1201
1202 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1204 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1205 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1206
1207 IEM_MC_ADVANCE_RIP();
1208 IEM_MC_END();
1209 }
1210 else
1211 {
1212 /*
1213 * Memory, register.
1214 */
1215 IEM_MC_BEGIN(0, 2);
1216 IEM_MC_LOCAL(uint32_t, uSrc);
1217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1218
1219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1221 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1222 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1223
1224 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1225 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1226
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 return VINF_SUCCESS;
1231}
1232
1233
1234/**
1235 * @opcode 0x11
1236 * @oppfx 0xf2
1237 * @opcpuid sse2
1238 * @opgroup og_sse2_pcksclr_datamove
1239 * @opxcpttype 5
1240 * @optest op1=1 op2=2 -> op1=2
1241 * @optest op1=0 op2=-42 -> op1=-42
1242 */
1243FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1244{
1245 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1248 {
1249 /*
1250 * Register, register.
1251 */
1252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1253 IEM_MC_BEGIN(0, 1);
1254 IEM_MC_LOCAL(uint64_t, uSrc);
1255
1256 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1257 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1258 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1259 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1260
1261 IEM_MC_ADVANCE_RIP();
1262 IEM_MC_END();
1263 }
1264 else
1265 {
1266 /*
1267 * Memory, register.
1268 */
1269 IEM_MC_BEGIN(0, 2);
1270 IEM_MC_LOCAL(uint64_t, uSrc);
1271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1272
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1277
1278 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1279 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1280
1281 IEM_MC_ADVANCE_RIP();
1282 IEM_MC_END();
1283 }
1284 return VINF_SUCCESS;
1285}
1286
1287
1288FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1289{
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1292 {
1293 /**
1294 * @opcode 0x12
1295 * @opcodesub 11 mr/reg
1296 * @oppfx none
1297 * @opcpuid sse
1298 * @opgroup og_sse_simdfp_datamove
1299 * @opxcpttype 5
1300 * @optest op1=1 op2=2 -> op1=2
1301 * @optest op1=0 op2=-42 -> op1=-42
1302 */
1303 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1304
1305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1306 IEM_MC_BEGIN(0, 1);
1307 IEM_MC_LOCAL(uint64_t, uSrc);
1308
1309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1310 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1311 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1312 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1313
1314 IEM_MC_ADVANCE_RIP();
1315 IEM_MC_END();
1316 }
1317 else
1318 {
1319 /**
1320 * @opdone
1321 * @opcode 0x12
1322 * @opcodesub !11 mr/reg
1323 * @oppfx none
1324 * @opcpuid sse
1325 * @opgroup og_sse_simdfp_datamove
1326 * @opxcpttype 5
1327 * @optest op1=1 op2=2 -> op1=2
1328 * @optest op1=0 op2=-42 -> op1=-42
1329 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1330 */
1331 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1332
1333 IEM_MC_BEGIN(0, 2);
1334 IEM_MC_LOCAL(uint64_t, uSrc);
1335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1336
1337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1341
1342 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1343 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1344
1345 IEM_MC_ADVANCE_RIP();
1346 IEM_MC_END();
1347 }
1348 return VINF_SUCCESS;
1349}
1350
1351
1352/**
1353 * @opcode 0x12
1354 * @opcodesub !11 mr/reg
1355 * @oppfx 0x66
1356 * @opcpuid sse2
1357 * @opgroup og_sse2_pcksclr_datamove
1358 * @opxcpttype 5
1359 * @optest op1=1 op2=2 -> op1=2
1360 * @optest op1=0 op2=-42 -> op1=-42
1361 */
1362FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1363{
1364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1365 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1366 {
1367 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1368
1369 IEM_MC_BEGIN(0, 2);
1370 IEM_MC_LOCAL(uint64_t, uSrc);
1371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1372
1373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1375 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1376 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1377
1378 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1379 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1380
1381 IEM_MC_ADVANCE_RIP();
1382 IEM_MC_END();
1383 return VINF_SUCCESS;
1384 }
1385
1386 /**
1387 * @opdone
1388 * @opmnemonic ud660f12m3
1389 * @opcode 0x12
1390 * @opcodesub 11 mr/reg
1391 * @oppfx 0x66
1392 * @opunused immediate
1393 * @opcpuid sse
1394 * @optest ->
1395 */
1396 return IEMOP_RAISE_INVALID_OPCODE();
1397}
1398
1399
1400/**
1401 * @opcode 0x12
1402 * @oppfx 0xf3
1403 * @opcpuid sse3
1404 * @opgroup og_sse3_pcksclr_datamove
1405 * @opxcpttype 4
1406 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1407 * op1=0x00000002000000020000000100000001
1408 */
1409FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1410{
1411 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1413 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1414 {
1415 /*
1416 * Register, register.
1417 */
1418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1419 IEM_MC_BEGIN(2, 0);
1420 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1421 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1422
1423 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1424 IEM_MC_PREPARE_SSE_USAGE();
1425
1426 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1427 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1428 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1429
1430 IEM_MC_ADVANCE_RIP();
1431 IEM_MC_END();
1432 }
1433 else
1434 {
1435 /*
1436 * Register, memory.
1437 */
1438 IEM_MC_BEGIN(2, 2);
1439 IEM_MC_LOCAL(RTUINT128U, uSrc);
1440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1441 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1442 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1443
1444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1446 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1447 IEM_MC_PREPARE_SSE_USAGE();
1448
1449 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1450 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1451 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1452
1453 IEM_MC_ADVANCE_RIP();
1454 IEM_MC_END();
1455 }
1456 return VINF_SUCCESS;
1457}
1458
1459
1460/**
1461 * @opcode 0x12
1462 * @oppfx 0xf2
1463 * @opcpuid sse3
1464 * @opgroup og_sse3_pcksclr_datamove
1465 * @opxcpttype 5
1466 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1467 * op1=0x22222222111111112222222211111111
1468 */
1469FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1470{
1471 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1473 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1474 {
1475 /*
1476 * Register, register.
1477 */
1478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1479 IEM_MC_BEGIN(2, 0);
1480 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1481 IEM_MC_ARG(uint64_t, uSrc, 1);
1482
1483 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1484 IEM_MC_PREPARE_SSE_USAGE();
1485
1486 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1487 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1488 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1489
1490 IEM_MC_ADVANCE_RIP();
1491 IEM_MC_END();
1492 }
1493 else
1494 {
1495 /*
1496 * Register, memory.
1497 */
1498 IEM_MC_BEGIN(2, 2);
1499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1500 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1501 IEM_MC_ARG(uint64_t, uSrc, 1);
1502
1503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1505 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1506 IEM_MC_PREPARE_SSE_USAGE();
1507
1508 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1509 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1510 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1511
1512 IEM_MC_ADVANCE_RIP();
1513 IEM_MC_END();
1514 }
1515 return VINF_SUCCESS;
1516}
1517
1518
1519/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1520FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1521
1522/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1523FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1524{
1525 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1528 {
1529#if 0
1530 /*
1531 * Register, register.
1532 */
1533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1534 IEM_MC_BEGIN(0, 1);
1535 IEM_MC_LOCAL(uint64_t, uSrc);
1536 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1538 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1539 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1540 IEM_MC_ADVANCE_RIP();
1541 IEM_MC_END();
1542#else
1543 return IEMOP_RAISE_INVALID_OPCODE();
1544#endif
1545 }
1546 else
1547 {
1548 /*
1549 * Memory, register.
1550 */
1551 IEM_MC_BEGIN(0, 2);
1552 IEM_MC_LOCAL(uint64_t, uSrc);
1553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1554
1555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1557 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1558 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1559
1560 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1561 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1562
1563 IEM_MC_ADVANCE_RIP();
1564 IEM_MC_END();
1565 }
1566 return VINF_SUCCESS;
1567}
1568
1569/* Opcode 0xf3 0x0f 0x13 - invalid */
1570/* Opcode 0xf2 0x0f 0x13 - invalid */
1571
1572/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1573FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1574/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1575FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1576/* Opcode 0xf3 0x0f 0x14 - invalid */
1577/* Opcode 0xf2 0x0f 0x14 - invalid */
1578/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1579FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1580/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1581FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1582/* Opcode 0xf3 0x0f 0x15 - invalid */
1583/* Opcode 0xf2 0x0f 0x15 - invalid */
1584/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1585FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1586/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1587FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1588/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1589FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1590/* Opcode 0xf2 0x0f 0x16 - invalid */
1591/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1592FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1593/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1594FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1595/* Opcode 0xf3 0x0f 0x17 - invalid */
1596/* Opcode 0xf2 0x0f 0x17 - invalid */
1597
1598
1599/** Opcode 0x0f 0x18. */
1600FNIEMOP_DEF(iemOp_prefetch_Grp16)
1601{
1602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1603 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1604 {
1605 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1606 {
1607 case 4: /* Aliased to /0 for the time being according to AMD. */
1608 case 5: /* Aliased to /0 for the time being according to AMD. */
1609 case 6: /* Aliased to /0 for the time being according to AMD. */
1610 case 7: /* Aliased to /0 for the time being according to AMD. */
1611 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1612 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1613 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1614 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1616 }
1617
1618 IEM_MC_BEGIN(0, 1);
1619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1622 /* Currently a NOP. */
1623 NOREF(GCPtrEffSrc);
1624 IEM_MC_ADVANCE_RIP();
1625 IEM_MC_END();
1626 return VINF_SUCCESS;
1627 }
1628
1629 return IEMOP_RAISE_INVALID_OPCODE();
1630}
1631
1632
1633/** Opcode 0x0f 0x19..0x1f. */
1634FNIEMOP_DEF(iemOp_nop_Ev)
1635{
1636 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1638 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1639 {
1640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1641 IEM_MC_BEGIN(0, 0);
1642 IEM_MC_ADVANCE_RIP();
1643 IEM_MC_END();
1644 }
1645 else
1646 {
1647 IEM_MC_BEGIN(0, 1);
1648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1651 /* Currently a NOP. */
1652 NOREF(GCPtrEffSrc);
1653 IEM_MC_ADVANCE_RIP();
1654 IEM_MC_END();
1655 }
1656 return VINF_SUCCESS;
1657}
1658
1659
1660/** Opcode 0x0f 0x20. */
1661FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1662{
1663 /* mod is ignored, as is operand size overrides. */
1664 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1665 IEMOP_HLP_MIN_386();
1666 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1667 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1668 else
1669 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1670
1671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1672 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1673 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1674 {
1675 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1676 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1677 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1678 iCrReg |= 8;
1679 }
1680 switch (iCrReg)
1681 {
1682 case 0: case 2: case 3: case 4: case 8:
1683 break;
1684 default:
1685 return IEMOP_RAISE_INVALID_OPCODE();
1686 }
1687 IEMOP_HLP_DONE_DECODING();
1688
1689 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1690}
1691
1692
1693/** Opcode 0x0f 0x21. */
1694FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1695{
1696 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1697 IEMOP_HLP_MIN_386();
1698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1700 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1701 return IEMOP_RAISE_INVALID_OPCODE();
1702 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1703 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1704 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1705}
1706
1707
1708/** Opcode 0x0f 0x22. */
1709FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1710{
1711 /* mod is ignored, as is operand size overrides. */
1712 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1713 IEMOP_HLP_MIN_386();
1714 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1715 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1716 else
1717 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1718
1719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1720 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1721 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1722 {
1723 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1724 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1725 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1726 iCrReg |= 8;
1727 }
1728 switch (iCrReg)
1729 {
1730 case 0: case 2: case 3: case 4: case 8:
1731 break;
1732 default:
1733 return IEMOP_RAISE_INVALID_OPCODE();
1734 }
1735 IEMOP_HLP_DONE_DECODING();
1736
1737 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1738}
1739
1740
1741/** Opcode 0x0f 0x23. */
1742FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1743{
1744 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1745 IEMOP_HLP_MIN_386();
1746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1748 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1749 return IEMOP_RAISE_INVALID_OPCODE();
1750 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1751 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1752 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1753}
1754
1755
1756/** Opcode 0x0f 0x24. */
1757FNIEMOP_DEF(iemOp_mov_Rd_Td)
1758{
1759 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1760 /** @todo works on 386 and 486. */
1761 /* The RM byte is not considered, see testcase. */
1762 return IEMOP_RAISE_INVALID_OPCODE();
1763}
1764
1765
1766/** Opcode 0x0f 0x26. */
1767FNIEMOP_DEF(iemOp_mov_Td_Rd)
1768{
1769 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1770 /** @todo works on 386 and 486. */
1771 /* The RM byte is not considered, see testcase. */
1772 return IEMOP_RAISE_INVALID_OPCODE();
1773}
1774
1775
1776/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1777FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1778{
1779 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1781 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1782 {
1783 /*
1784 * Register, register.
1785 */
1786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1787 IEM_MC_BEGIN(0, 0);
1788 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1789 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1790 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1791 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1792 IEM_MC_ADVANCE_RIP();
1793 IEM_MC_END();
1794 }
1795 else
1796 {
1797 /*
1798 * Register, memory.
1799 */
1800 IEM_MC_BEGIN(0, 2);
1801 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1803
1804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1806 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1807 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1808
1809 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1810 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1811
1812 IEM_MC_ADVANCE_RIP();
1813 IEM_MC_END();
1814 }
1815 return VINF_SUCCESS;
1816}
1817
1818/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
1819FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1820{
1821 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1823 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1824 {
1825 /*
1826 * Register, register.
1827 */
1828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1829 IEM_MC_BEGIN(0, 0);
1830 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1831 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1832 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1833 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1834 IEM_MC_ADVANCE_RIP();
1835 IEM_MC_END();
1836 }
1837 else
1838 {
1839 /*
1840 * Register, memory.
1841 */
1842 IEM_MC_BEGIN(0, 2);
1843 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1845
1846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1848 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1849 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1850
1851 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1852 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1853
1854 IEM_MC_ADVANCE_RIP();
1855 IEM_MC_END();
1856 }
1857 return VINF_SUCCESS;
1858}
1859
1860/* Opcode 0xf3 0x0f 0x28 - invalid */
1861/* Opcode 0xf2 0x0f 0x28 - invalid */
1862
1863/** Opcode 0x0f 0x29 - vmovaps Wps, Vps */
1864FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1865{
1866 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1869 {
1870 /*
1871 * Register, register.
1872 */
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 IEM_MC_BEGIN(0, 0);
1875 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1876 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1877 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1878 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1879 IEM_MC_ADVANCE_RIP();
1880 IEM_MC_END();
1881 }
1882 else
1883 {
1884 /*
1885 * Memory, register.
1886 */
1887 IEM_MC_BEGIN(0, 2);
1888 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1890
1891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1893 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1894 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1895
1896 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1897 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1898
1899 IEM_MC_ADVANCE_RIP();
1900 IEM_MC_END();
1901 }
1902 return VINF_SUCCESS;
1903}
1904
1905/** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */
1906FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1907{
1908 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1911 {
1912 /*
1913 * Register, register.
1914 */
1915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1916 IEM_MC_BEGIN(0, 0);
1917 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1918 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1919 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1920 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1921 IEM_MC_ADVANCE_RIP();
1922 IEM_MC_END();
1923 }
1924 else
1925 {
1926 /*
1927 * Memory, register.
1928 */
1929 IEM_MC_BEGIN(0, 2);
1930 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1932
1933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1935 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1936 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1937
1938 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1939 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1940
1941 IEM_MC_ADVANCE_RIP();
1942 IEM_MC_END();
1943 }
1944 return VINF_SUCCESS;
1945}
1946
1947/* Opcode 0xf3 0x0f 0x29 - invalid */
1948/* Opcode 0xf2 0x0f 0x29 - invalid */
1949
1950
1951/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1952FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1953/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1954FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1955/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1956FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
1957/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1958FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
1959
1960
1961/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1962FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1963{
1964 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1966 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1967 {
1968 /*
1969 * memory, register.
1970 */
1971 IEM_MC_BEGIN(0, 2);
1972 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1974
1975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1977 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1978 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1979
1980 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1981 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1982
1983 IEM_MC_ADVANCE_RIP();
1984 IEM_MC_END();
1985 }
1986 /* The register, register encoding is invalid. */
1987 else
1988 return IEMOP_RAISE_INVALID_OPCODE();
1989 return VINF_SUCCESS;
1990}
1991
1992/** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */
1993FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1994{
1995 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1997 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1998 {
1999 /*
2000 * memory, register.
2001 */
2002 IEM_MC_BEGIN(0, 2);
2003 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2005
2006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2008 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2009 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2010
2011 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2012 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2013
2014 IEM_MC_ADVANCE_RIP();
2015 IEM_MC_END();
2016 }
2017 /* The register, register encoding is invalid. */
2018 else
2019 return IEMOP_RAISE_INVALID_OPCODE();
2020 return VINF_SUCCESS;
2021}
2022/* Opcode 0xf3 0x0f 0x2b - invalid */
2023/* Opcode 0xf2 0x0f 0x2b - invalid */
2024
2025
2026/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2027FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2028/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2029FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2030/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2031FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2032/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2033FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2034
2035/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2036FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2037/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2038FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2039/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2040FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2041/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2042FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2043
2044/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2045FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2046/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2047FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2048/* Opcode 0xf3 0x0f 0x2e - invalid */
2049/* Opcode 0xf2 0x0f 0x2e - invalid */
2050
2051/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2052FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2053/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2054FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2055/* Opcode 0xf3 0x0f 0x2f - invalid */
2056/* Opcode 0xf2 0x0f 0x2f - invalid */
2057
2058/** Opcode 0x0f 0x30. */
2059FNIEMOP_DEF(iemOp_wrmsr)
2060{
2061 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2063 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2064}
2065
2066
2067/** Opcode 0x0f 0x31. */
2068FNIEMOP_DEF(iemOp_rdtsc)
2069{
2070 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2072 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2073}
2074
2075
2076/** Opcode 0x0f 0x33. */
2077FNIEMOP_DEF(iemOp_rdmsr)
2078{
2079 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2082}
2083
2084
2085/** Opcode 0x0f 0x34. */
2086FNIEMOP_STUB(iemOp_rdpmc);
2087/** Opcode 0x0f 0x34. */
2088FNIEMOP_STUB(iemOp_sysenter);
2089/** Opcode 0x0f 0x35. */
2090FNIEMOP_STUB(iemOp_sysexit);
2091/** Opcode 0x0f 0x37. */
2092FNIEMOP_STUB(iemOp_getsec);
2093/** Opcode 0x0f 0x38. */
2094FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2095/** Opcode 0x0f 0x3a. */
2096FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2097
2098
2099/**
2100 * Implements a conditional move.
2101 *
2102 * Wish there was an obvious way to do this where we could share and reduce
2103 * code bloat.
2104 *
2105 * @param a_Cnd The conditional "microcode" operation.
2106 */
2107#define CMOV_X(a_Cnd) \
2108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2109 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2110 { \
2111 switch (pVCpu->iem.s.enmEffOpSize) \
2112 { \
2113 case IEMMODE_16BIT: \
2114 IEM_MC_BEGIN(0, 1); \
2115 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2116 a_Cnd { \
2117 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2118 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2119 } IEM_MC_ENDIF(); \
2120 IEM_MC_ADVANCE_RIP(); \
2121 IEM_MC_END(); \
2122 return VINF_SUCCESS; \
2123 \
2124 case IEMMODE_32BIT: \
2125 IEM_MC_BEGIN(0, 1); \
2126 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2127 a_Cnd { \
2128 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2129 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2130 } IEM_MC_ELSE() { \
2131 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2132 } IEM_MC_ENDIF(); \
2133 IEM_MC_ADVANCE_RIP(); \
2134 IEM_MC_END(); \
2135 return VINF_SUCCESS; \
2136 \
2137 case IEMMODE_64BIT: \
2138 IEM_MC_BEGIN(0, 1); \
2139 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2140 a_Cnd { \
2141 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2142 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2143 } IEM_MC_ENDIF(); \
2144 IEM_MC_ADVANCE_RIP(); \
2145 IEM_MC_END(); \
2146 return VINF_SUCCESS; \
2147 \
2148 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2149 } \
2150 } \
2151 else \
2152 { \
2153 switch (pVCpu->iem.s.enmEffOpSize) \
2154 { \
2155 case IEMMODE_16BIT: \
2156 IEM_MC_BEGIN(0, 2); \
2157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2158 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2160 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2161 a_Cnd { \
2162 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2163 } IEM_MC_ENDIF(); \
2164 IEM_MC_ADVANCE_RIP(); \
2165 IEM_MC_END(); \
2166 return VINF_SUCCESS; \
2167 \
2168 case IEMMODE_32BIT: \
2169 IEM_MC_BEGIN(0, 2); \
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2171 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2173 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2174 a_Cnd { \
2175 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2176 } IEM_MC_ELSE() { \
2177 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2178 } IEM_MC_ENDIF(); \
2179 IEM_MC_ADVANCE_RIP(); \
2180 IEM_MC_END(); \
2181 return VINF_SUCCESS; \
2182 \
2183 case IEMMODE_64BIT: \
2184 IEM_MC_BEGIN(0, 2); \
2185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2186 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2188 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2189 a_Cnd { \
2190 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2191 } IEM_MC_ENDIF(); \
2192 IEM_MC_ADVANCE_RIP(); \
2193 IEM_MC_END(); \
2194 return VINF_SUCCESS; \
2195 \
2196 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2197 } \
2198 } do {} while (0)
2199
2200
2201
2202/** Opcode 0x0f 0x40. */
2203FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2204{
2205 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2206 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2207}
2208
2209
2210/** Opcode 0x0f 0x41. */
2211FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2212{
2213 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2214 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2215}
2216
2217
2218/** Opcode 0x0f 0x42. */
2219FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2220{
2221 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2222 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2223}
2224
2225
2226/** Opcode 0x0f 0x43. */
2227FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2228{
2229 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2230 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2231}
2232
2233
2234/** Opcode 0x0f 0x44. */
2235FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2236{
2237 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2238 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2239}
2240
2241
2242/** Opcode 0x0f 0x45. */
2243FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2244{
2245 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2246 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2247}
2248
2249
2250/** Opcode 0x0f 0x46. */
2251FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2252{
2253 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2254 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2255}
2256
2257
2258/** Opcode 0x0f 0x47. */
2259FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2260{
2261 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2262 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2263}
2264
2265
2266/** Opcode 0x0f 0x48. */
2267FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2268{
2269 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2270 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2271}
2272
2273
2274/** Opcode 0x0f 0x49. */
2275FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2276{
2277 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2278 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2279}
2280
2281
2282/** Opcode 0x0f 0x4a. */
2283FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2284{
2285 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2286 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2287}
2288
2289
2290/** Opcode 0x0f 0x4b. */
2291FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2292{
2293 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2294 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2295}
2296
2297
2298/** Opcode 0x0f 0x4c. */
2299FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2300{
2301 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2302 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2303}
2304
2305
2306/** Opcode 0x0f 0x4d. */
2307FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2308{
2309 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2310 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2311}
2312
2313
2314/** Opcode 0x0f 0x4e. */
2315FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2316{
2317 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2318 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2319}
2320
2321
2322/** Opcode 0x0f 0x4f. */
2323FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2324{
2325 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2326 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2327}
2328
2329#undef CMOV_X
2330
2331/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2332FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2333/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2334FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2335/* Opcode 0xf3 0x0f 0x50 - invalid */
2336/* Opcode 0xf2 0x0f 0x50 - invalid */
2337
2338/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2339FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2340/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2341FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2342/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2343FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2344/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2345FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2346
2347/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2348FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2349/* Opcode 0x66 0x0f 0x52 - invalid */
2350/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2351FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2352/* Opcode 0xf2 0x0f 0x52 - invalid */
2353
2354/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2355FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2356/* Opcode 0x66 0x0f 0x53 - invalid */
2357/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2358FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2359/* Opcode 0xf2 0x0f 0x53 - invalid */
2360
2361/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2362FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2363/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2364FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2365/* Opcode 0xf3 0x0f 0x54 - invalid */
2366/* Opcode 0xf2 0x0f 0x54 - invalid */
2367
2368/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2369FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2370/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2371FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2372/* Opcode 0xf3 0x0f 0x55 - invalid */
2373/* Opcode 0xf2 0x0f 0x55 - invalid */
2374
2375/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2376FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2377/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2378FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2379/* Opcode 0xf3 0x0f 0x56 - invalid */
2380/* Opcode 0xf2 0x0f 0x56 - invalid */
2381
2382/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2383FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2384/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2385FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2386/* Opcode 0xf3 0x0f 0x57 - invalid */
2387/* Opcode 0xf2 0x0f 0x57 - invalid */
2388
2389/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2390FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2391/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2392FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2393/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2394FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2395/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2396FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2397
2398/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2399FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2400/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2401FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2402/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2403FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2404/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2405FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2406
2407/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2408FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2409/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2410FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2411/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2412FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2413/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2414FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2415
2416/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2417FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2418/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2419FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2420/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2421FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2422/* Opcode 0xf2 0x0f 0x5b - invalid */
2423
2424/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2425FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2426/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2427FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2428/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2429FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2430/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2431FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2432
2433/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2434FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2435/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2436FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2437/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2438FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2439/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2440FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2441
2442/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2443FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2444/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2445FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2446/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2447FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2448/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2449FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2450
2451/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2452FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2453/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2454FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2455/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2456FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2457/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2458FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2459
2460/**
2461 * Common worker for MMX instructions on the forms:
2462 * pxxxx mm1, mm2/mem32
2463 *
2464 * The 2nd operand is the first half of a register, which in the memory case
2465 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2466 * memory accessed for MMX.
2467 *
2468 * Exceptions type 4.
2469 */
2470FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2471{
2472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2473 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2474 {
2475 /*
2476 * Register, register.
2477 */
2478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2479 IEM_MC_BEGIN(2, 0);
2480 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2481 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2482 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2483 IEM_MC_PREPARE_SSE_USAGE();
2484 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2486 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 else
2491 {
2492 /*
2493 * Register, memory.
2494 */
2495 IEM_MC_BEGIN(2, 2);
2496 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2497 IEM_MC_LOCAL(uint64_t, uSrc);
2498 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2500
2501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2503 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2504 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2505
2506 IEM_MC_PREPARE_SSE_USAGE();
2507 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2508 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2509
2510 IEM_MC_ADVANCE_RIP();
2511 IEM_MC_END();
2512 }
2513 return VINF_SUCCESS;
2514}
2515
2516
2517/**
2518 * Common worker for SSE2 instructions on the forms:
2519 * pxxxx xmm1, xmm2/mem128
2520 *
2521 * The 2nd operand is the first half of a register, which in the memory case
2522 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2523 * memory accessed for MMX.
2524 *
2525 * Exceptions type 4.
2526 */
2527FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2528{
2529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2530 if (!pImpl->pfnU64)
2531 return IEMOP_RAISE_INVALID_OPCODE();
2532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2533 {
2534 /*
2535 * Register, register.
2536 */
2537 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2538 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2540 IEM_MC_BEGIN(2, 0);
2541 IEM_MC_ARG(uint64_t *, pDst, 0);
2542 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2543 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2544 IEM_MC_PREPARE_FPU_USAGE();
2545 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2546 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2547 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2548 IEM_MC_ADVANCE_RIP();
2549 IEM_MC_END();
2550 }
2551 else
2552 {
2553 /*
2554 * Register, memory.
2555 */
2556 IEM_MC_BEGIN(2, 2);
2557 IEM_MC_ARG(uint64_t *, pDst, 0);
2558 IEM_MC_LOCAL(uint32_t, uSrc);
2559 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2561
2562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2564 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2565 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2566
2567 IEM_MC_PREPARE_FPU_USAGE();
2568 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2569 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2570
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 return VINF_SUCCESS;
2575}
2576
2577
2578/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2579FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2580{
2581 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2582 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2583}
2584
2585/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2586FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2587{
2588 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2589 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2590}
2591
2592/* Opcode 0xf3 0x0f 0x60 - invalid */
2593
2594
2595/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2596FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2597{
2598 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2599 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2600}
2601
2602/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2603FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2604{
2605 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2606 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2607}
2608
2609/* Opcode 0xf3 0x0f 0x61 - invalid */
2610
2611
2612/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2613FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2614{
2615 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2616 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2617}
2618
2619/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2620FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2621{
2622 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2623 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2624}
2625
2626/* Opcode 0xf3 0x0f 0x62 - invalid */
2627
2628
2629
2630/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2631FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2632/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2633FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2634/* Opcode 0xf3 0x0f 0x63 - invalid */
2635
2636/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2637FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2638/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2639FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2640/* Opcode 0xf3 0x0f 0x64 - invalid */
2641
2642/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2643FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2644/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2645FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2646/* Opcode 0xf3 0x0f 0x65 - invalid */
2647
2648/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2649FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2650/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2651FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2652/* Opcode 0xf3 0x0f 0x66 - invalid */
2653
2654/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2655FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2656/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2657FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2658/* Opcode 0xf3 0x0f 0x67 - invalid */
2659
2660
2661/**
2662 * Common worker for MMX instructions on the form:
2663 * pxxxx mm1, mm2/mem64
2664 *
2665 * The 2nd operand is the second half of a register, which in the memory case
2666 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2667 * where it may read the full 128 bits or only the upper 64 bits.
2668 *
2669 * Exceptions type 4.
2670 */
2671FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2672{
2673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2674 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2676 {
2677 /*
2678 * Register, register.
2679 */
2680 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2681 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2683 IEM_MC_BEGIN(2, 0);
2684 IEM_MC_ARG(uint64_t *, pDst, 0);
2685 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2686 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2687 IEM_MC_PREPARE_FPU_USAGE();
2688 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2689 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2690 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2691 IEM_MC_ADVANCE_RIP();
2692 IEM_MC_END();
2693 }
2694 else
2695 {
2696 /*
2697 * Register, memory.
2698 */
2699 IEM_MC_BEGIN(2, 2);
2700 IEM_MC_ARG(uint64_t *, pDst, 0);
2701 IEM_MC_LOCAL(uint64_t, uSrc);
2702 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2704
2705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2707 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2708 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2709
2710 IEM_MC_PREPARE_FPU_USAGE();
2711 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2712 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2713
2714 IEM_MC_ADVANCE_RIP();
2715 IEM_MC_END();
2716 }
2717 return VINF_SUCCESS;
2718}
2719
2720
2721/**
2722 * Common worker for SSE2 instructions on the form:
2723 * pxxxx xmm1, xmm2/mem128
2724 *
2725 * The 2nd operand is the second half of a register, which in the memory case
2726 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2727 * where it may read the full 128 bits or only the upper 64 bits.
2728 *
2729 * Exceptions type 4.
2730 */
2731FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2732{
2733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2734 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2735 {
2736 /*
2737 * Register, register.
2738 */
2739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2740 IEM_MC_BEGIN(2, 0);
2741 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2742 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2743 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2744 IEM_MC_PREPARE_SSE_USAGE();
2745 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2746 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2747 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2748 IEM_MC_ADVANCE_RIP();
2749 IEM_MC_END();
2750 }
2751 else
2752 {
2753 /*
2754 * Register, memory.
2755 */
2756 IEM_MC_BEGIN(2, 2);
2757 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2758 IEM_MC_LOCAL(RTUINT128U, uSrc);
2759 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2761
2762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2764 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2765 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2766
2767 IEM_MC_PREPARE_SSE_USAGE();
2768 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2769 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2770
2771 IEM_MC_ADVANCE_RIP();
2772 IEM_MC_END();
2773 }
2774 return VINF_SUCCESS;
2775}
2776
2777
2778/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2779FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2780{
2781 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2782 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2783}
2784
2785/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2786FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2787{
2788 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2789 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2790}
2791/* Opcode 0xf3 0x0f 0x68 - invalid */
2792
2793
2794/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2795FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2796{
2797 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2798 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2799}
2800
2801/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2802FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2803{
2804 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2805 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2806
2807}
2808/* Opcode 0xf3 0x0f 0x69 - invalid */
2809
2810
2811/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2812FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2813{
2814 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2815 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2816}
2817
2818/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2819FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2820{
2821 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2822 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2823}
2824/* Opcode 0xf3 0x0f 0x6a - invalid */
2825
2826
2827/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2828FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2829/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2830FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2831/* Opcode 0xf3 0x0f 0x6b - invalid */
2832
2833
2834/* Opcode 0x0f 0x6c - invalid */
2835
2836/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2837FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2838{
2839 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2840 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2841}
2842
2843/* Opcode 0xf3 0x0f 0x6c - invalid */
2844/* Opcode 0xf2 0x0f 0x6c - invalid */
2845
2846
2847/* Opcode 0x0f 0x6d - invalid */
2848
2849/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
2850FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2851{
2852 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2853 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2854}
2855
2856/* Opcode 0xf3 0x0f 0x6d - invalid */
2857
2858
2859/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2860FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2861{
2862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2863 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2864 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2865 else
2866 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2867 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2868 {
2869 /* MMX, greg */
2870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2871 IEM_MC_BEGIN(0, 1);
2872 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2873 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2874 IEM_MC_LOCAL(uint64_t, u64Tmp);
2875 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2876 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2877 else
2878 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2879 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2880 IEM_MC_ADVANCE_RIP();
2881 IEM_MC_END();
2882 }
2883 else
2884 {
2885 /* MMX, [mem] */
2886 IEM_MC_BEGIN(0, 2);
2887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2888 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2891 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2892 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2893 {
2894 IEM_MC_LOCAL(uint64_t, u64Tmp);
2895 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2896 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2897 }
2898 else
2899 {
2900 IEM_MC_LOCAL(uint32_t, u32Tmp);
2901 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2902 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2903 }
2904 IEM_MC_ADVANCE_RIP();
2905 IEM_MC_END();
2906 }
2907 return VINF_SUCCESS;
2908}
2909
2910/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
2911FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2912{
2913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2914 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2915 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
2916 else
2917 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
2918 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2919 {
2920 /* XMM, greg*/
2921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2922 IEM_MC_BEGIN(0, 1);
2923 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2924 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2925 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2926 {
2927 IEM_MC_LOCAL(uint64_t, u64Tmp);
2928 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2929 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2930 }
2931 else
2932 {
2933 IEM_MC_LOCAL(uint32_t, u32Tmp);
2934 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2935 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2936 }
2937 IEM_MC_ADVANCE_RIP();
2938 IEM_MC_END();
2939 }
2940 else
2941 {
2942 /* XMM, [mem] */
2943 IEM_MC_BEGIN(0, 2);
2944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2948 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2949 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2950 {
2951 IEM_MC_LOCAL(uint64_t, u64Tmp);
2952 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2953 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2954 }
2955 else
2956 {
2957 IEM_MC_LOCAL(uint32_t, u32Tmp);
2958 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2959 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2960 }
2961 IEM_MC_ADVANCE_RIP();
2962 IEM_MC_END();
2963 }
2964 return VINF_SUCCESS;
2965}
2966
2967/* Opcode 0xf3 0x0f 0x6e - invalid */
2968
2969
2970/** Opcode 0x0f 0x6f - movq Pq, Qq */
2971FNIEMOP_DEF(iemOp_movq_Pq_Qq)
2972{
2973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2974 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2976 {
2977 /*
2978 * Register, register.
2979 */
2980 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2981 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2983 IEM_MC_BEGIN(0, 1);
2984 IEM_MC_LOCAL(uint64_t, u64Tmp);
2985 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2986 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2987 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2988 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2989 IEM_MC_ADVANCE_RIP();
2990 IEM_MC_END();
2991 }
2992 else
2993 {
2994 /*
2995 * Register, memory.
2996 */
2997 IEM_MC_BEGIN(0, 2);
2998 IEM_MC_LOCAL(uint64_t, u64Tmp);
2999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3000
3001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3003 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3004 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3005 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3006 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3007
3008 IEM_MC_ADVANCE_RIP();
3009 IEM_MC_END();
3010 }
3011 return VINF_SUCCESS;
3012}
3013
3014/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
3015FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3016{
3017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3018 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3019 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3020 {
3021 /*
3022 * Register, register.
3023 */
3024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3025 IEM_MC_BEGIN(0, 0);
3026 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3027 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3028 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3029 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3030 IEM_MC_ADVANCE_RIP();
3031 IEM_MC_END();
3032 }
3033 else
3034 {
3035 /*
3036 * Register, memory.
3037 */
3038 IEM_MC_BEGIN(0, 2);
3039 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3041
3042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3044 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3045 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3046 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3047 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3048
3049 IEM_MC_ADVANCE_RIP();
3050 IEM_MC_END();
3051 }
3052 return VINF_SUCCESS;
3053}
3054
3055/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
3056FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3057{
3058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3059 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3060 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3061 {
3062 /*
3063 * Register, register.
3064 */
3065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3066 IEM_MC_BEGIN(0, 0);
3067 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3068 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3069 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3070 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3071 IEM_MC_ADVANCE_RIP();
3072 IEM_MC_END();
3073 }
3074 else
3075 {
3076 /*
3077 * Register, memory.
3078 */
3079 IEM_MC_BEGIN(0, 2);
3080 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3082
3083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3085 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3086 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3087 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3088 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3089
3090 IEM_MC_ADVANCE_RIP();
3091 IEM_MC_END();
3092 }
3093 return VINF_SUCCESS;
3094}
3095
3096
3097/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3098FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3099{
3100 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3102 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3103 {
3104 /*
3105 * Register, register.
3106 */
3107 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3109
3110 IEM_MC_BEGIN(3, 0);
3111 IEM_MC_ARG(uint64_t *, pDst, 0);
3112 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3113 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3114 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3115 IEM_MC_PREPARE_FPU_USAGE();
3116 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3117 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3118 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3119 IEM_MC_ADVANCE_RIP();
3120 IEM_MC_END();
3121 }
3122 else
3123 {
3124 /*
3125 * Register, memory.
3126 */
3127 IEM_MC_BEGIN(3, 2);
3128 IEM_MC_ARG(uint64_t *, pDst, 0);
3129 IEM_MC_LOCAL(uint64_t, uSrc);
3130 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3132
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3134 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3135 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3138
3139 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3140 IEM_MC_PREPARE_FPU_USAGE();
3141 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3142 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3143
3144 IEM_MC_ADVANCE_RIP();
3145 IEM_MC_END();
3146 }
3147 return VINF_SUCCESS;
3148}
3149
3150/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
3151FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3152{
3153 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
3154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3156 {
3157 /*
3158 * Register, register.
3159 */
3160 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3162
3163 IEM_MC_BEGIN(3, 0);
3164 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3165 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3166 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3168 IEM_MC_PREPARE_SSE_USAGE();
3169 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3170 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3171 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3172 IEM_MC_ADVANCE_RIP();
3173 IEM_MC_END();
3174 }
3175 else
3176 {
3177 /*
3178 * Register, memory.
3179 */
3180 IEM_MC_BEGIN(3, 2);
3181 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3182 IEM_MC_LOCAL(RTUINT128U, uSrc);
3183 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3185
3186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3187 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3188 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3191
3192 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3193 IEM_MC_PREPARE_SSE_USAGE();
3194 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3195 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3196
3197 IEM_MC_ADVANCE_RIP();
3198 IEM_MC_END();
3199 }
3200 return VINF_SUCCESS;
3201}
3202
3203/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
3204FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3205{
3206 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
3207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3209 {
3210 /*
3211 * Register, register.
3212 */
3213 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3215
3216 IEM_MC_BEGIN(3, 0);
3217 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3218 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3219 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3220 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3221 IEM_MC_PREPARE_SSE_USAGE();
3222 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3223 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3224 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3225 IEM_MC_ADVANCE_RIP();
3226 IEM_MC_END();
3227 }
3228 else
3229 {
3230 /*
3231 * Register, memory.
3232 */
3233 IEM_MC_BEGIN(3, 2);
3234 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3235 IEM_MC_LOCAL(RTUINT128U, uSrc);
3236 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3238
3239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3240 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3241 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3243 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3244
3245 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3246 IEM_MC_PREPARE_SSE_USAGE();
3247 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3248 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3249
3250 IEM_MC_ADVANCE_RIP();
3251 IEM_MC_END();
3252 }
3253 return VINF_SUCCESS;
3254}
3255
3256/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
3257FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3258{
3259 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
3260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3262 {
3263 /*
3264 * Register, register.
3265 */
3266 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3268
3269 IEM_MC_BEGIN(3, 0);
3270 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3271 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3272 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3273 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3274 IEM_MC_PREPARE_SSE_USAGE();
3275 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3276 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3277 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3278 IEM_MC_ADVANCE_RIP();
3279 IEM_MC_END();
3280 }
3281 else
3282 {
3283 /*
3284 * Register, memory.
3285 */
3286 IEM_MC_BEGIN(3, 2);
3287 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3288 IEM_MC_LOCAL(RTUINT128U, uSrc);
3289 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3291
3292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3293 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3294 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3296 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3297
3298 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3299 IEM_MC_PREPARE_SSE_USAGE();
3300 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3301 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3302
3303 IEM_MC_ADVANCE_RIP();
3304 IEM_MC_END();
3305 }
3306 return VINF_SUCCESS;
3307}
3308
3309
3310/** Opcode 0x0f 0x71 11/2. */
3311FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3312
3313/** Opcode 0x66 0x0f 0x71 11/2. */
3314FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3315
3316/** Opcode 0x0f 0x71 11/4. */
3317FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3318
3319/** Opcode 0x66 0x0f 0x71 11/4. */
3320FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3321
3322/** Opcode 0x0f 0x71 11/6. */
3323FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3324
3325/** Opcode 0x66 0x0f 0x71 11/6. */
3326FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3327
3328
3329/**
3330 * Group 12 jump table for register variant.
3331 */
3332IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3333{
3334 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3335 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3336 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3337 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3338 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3339 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3340 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3341 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3342};
3343AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3344
3345
3346/** Opcode 0x0f 0x71. */
3347FNIEMOP_DEF(iemOp_Grp12)
3348{
3349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3350 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3351 /* register, register */
3352 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3353 + pVCpu->iem.s.idxPrefix], bRm);
3354 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3355}
3356
3357
3358/** Opcode 0x0f 0x72 11/2. */
3359FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3360
3361/** Opcode 0x66 0x0f 0x72 11/2. */
3362FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3363
3364/** Opcode 0x0f 0x72 11/4. */
3365FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3366
3367/** Opcode 0x66 0x0f 0x72 11/4. */
3368FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3369
3370/** Opcode 0x0f 0x72 11/6. */
3371FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3372
3373/** Opcode 0x66 0x0f 0x72 11/6. */
3374FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3375
3376
3377/**
3378 * Group 13 jump table for register variant.
3379 */
3380IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3381{
3382 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3383 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3384 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3385 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3386 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3387 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3388 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3389 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3390};
3391AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3392
3393/** Opcode 0x0f 0x72. */
3394FNIEMOP_DEF(iemOp_Grp13)
3395{
3396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3398 /* register, register */
3399 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3400 + pVCpu->iem.s.idxPrefix], bRm);
3401 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3402}
3403
3404
3405/** Opcode 0x0f 0x73 11/2. */
3406FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3407
3408/** Opcode 0x66 0x0f 0x73 11/2. */
3409FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3410
3411/** Opcode 0x66 0x0f 0x73 11/3. */
3412FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3413
3414/** Opcode 0x0f 0x73 11/6. */
3415FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3416
3417/** Opcode 0x66 0x0f 0x73 11/6. */
3418FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3419
3420/** Opcode 0x66 0x0f 0x73 11/7. */
3421FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT
3422
3423/**
3424 * Group 14 jump table for register variant.
3425 */
3426IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3427{
3428 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3429 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3430 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3431 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3432 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3433 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3434 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3435 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3436};
3437AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3438
3439
3440/** Opcode 0x0f 0x73. */
3441FNIEMOP_DEF(iemOp_Grp14)
3442{
3443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3444 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3445 /* register, register */
3446 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3447 + pVCpu->iem.s.idxPrefix], bRm);
3448 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3449}
3450
3451
3452/**
3453 * Common worker for MMX instructions on the form:
3454 * pxxx mm1, mm2/mem64
3455 */
3456FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3457{
3458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3460 {
3461 /*
3462 * Register, register.
3463 */
3464 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3465 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3467 IEM_MC_BEGIN(2, 0);
3468 IEM_MC_ARG(uint64_t *, pDst, 0);
3469 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3470 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3471 IEM_MC_PREPARE_FPU_USAGE();
3472 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3473 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3474 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3475 IEM_MC_ADVANCE_RIP();
3476 IEM_MC_END();
3477 }
3478 else
3479 {
3480 /*
3481 * Register, memory.
3482 */
3483 IEM_MC_BEGIN(2, 2);
3484 IEM_MC_ARG(uint64_t *, pDst, 0);
3485 IEM_MC_LOCAL(uint64_t, uSrc);
3486 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3488
3489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3491 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3492 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3493
3494 IEM_MC_PREPARE_FPU_USAGE();
3495 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3496 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3497
3498 IEM_MC_ADVANCE_RIP();
3499 IEM_MC_END();
3500 }
3501 return VINF_SUCCESS;
3502}
3503
3504
3505/**
3506 * Common worker for SSE2 instructions on the forms:
3507 * pxxx xmm1, xmm2/mem128
3508 *
3509 * Proper alignment of the 128-bit operand is enforced.
3510 * Exceptions type 4. SSE2 cpuid checks.
3511 */
3512FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3513{
3514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3516 {
3517 /*
3518 * Register, register.
3519 */
3520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3521 IEM_MC_BEGIN(2, 0);
3522 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3523 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3524 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3525 IEM_MC_PREPARE_SSE_USAGE();
3526 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3527 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3528 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3529 IEM_MC_ADVANCE_RIP();
3530 IEM_MC_END();
3531 }
3532 else
3533 {
3534 /*
3535 * Register, memory.
3536 */
3537 IEM_MC_BEGIN(2, 2);
3538 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3539 IEM_MC_LOCAL(RTUINT128U, uSrc);
3540 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3542
3543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3545 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3546 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3547
3548 IEM_MC_PREPARE_SSE_USAGE();
3549 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3550 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3551
3552 IEM_MC_ADVANCE_RIP();
3553 IEM_MC_END();
3554 }
3555 return VINF_SUCCESS;
3556}
3557
3558
3559/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3560FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3561{
3562 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3563 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3564}
3565
3566/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3567FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3568{
3569 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3570 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3571}
3572
3573/* Opcode 0xf3 0x0f 0x74 - invalid */
3574/* Opcode 0xf2 0x0f 0x74 - invalid */
3575
3576
3577/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3578FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3579{
3580 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3581 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3582}
3583
3584/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3585FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3586{
3587 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3588 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3589}
3590
3591/* Opcode 0xf3 0x0f 0x75 - invalid */
3592/* Opcode 0xf2 0x0f 0x75 - invalid */
3593
3594
3595/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3596FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3597{
3598 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3599 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3600}
3601
3602/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3603FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3604{
3605 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3606 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3607}
3608
3609/* Opcode 0xf3 0x0f 0x76 - invalid */
3610/* Opcode 0xf2 0x0f 0x76 - invalid */
3611
3612
3613/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3614FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3615/* Opcode 0x66 0x0f 0x77 - invalid */
3616/* Opcode 0xf3 0x0f 0x77 - invalid */
3617/* Opcode 0xf2 0x0f 0x77 - invalid */
3618
3619/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3620FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3621/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3622FNIEMOP_STUB(iemOp_AmdGrp17);
3623/* Opcode 0xf3 0x0f 0x78 - invalid */
3624/* Opcode 0xf2 0x0f 0x78 - invalid */
3625
3626/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3627FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3628/* Opcode 0x66 0x0f 0x79 - invalid */
3629/* Opcode 0xf3 0x0f 0x79 - invalid */
3630/* Opcode 0xf2 0x0f 0x79 - invalid */
3631
3632/* Opcode 0x0f 0x7a - invalid */
3633/* Opcode 0x66 0x0f 0x7a - invalid */
3634/* Opcode 0xf3 0x0f 0x7a - invalid */
3635/* Opcode 0xf2 0x0f 0x7a - invalid */
3636
3637/* Opcode 0x0f 0x7b - invalid */
3638/* Opcode 0x66 0x0f 0x7b - invalid */
3639/* Opcode 0xf3 0x0f 0x7b - invalid */
3640/* Opcode 0xf2 0x0f 0x7b - invalid */
3641
3642/* Opcode 0x0f 0x7c - invalid */
3643/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3644FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3645/* Opcode 0xf3 0x0f 0x7c - invalid */
3646/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3647FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3648
3649/* Opcode 0x0f 0x7d - invalid */
3650/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3651FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3652/* Opcode 0xf3 0x0f 0x7d - invalid */
3653/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3654FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3655
3656
3657/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3658FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3659{
3660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3661 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3662 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3663 else
3664 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3666 {
3667 /* greg, MMX */
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669 IEM_MC_BEGIN(0, 1);
3670 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3671 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3672 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3673 {
3674 IEM_MC_LOCAL(uint64_t, u64Tmp);
3675 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3676 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3677 }
3678 else
3679 {
3680 IEM_MC_LOCAL(uint32_t, u32Tmp);
3681 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3682 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3683 }
3684 IEM_MC_ADVANCE_RIP();
3685 IEM_MC_END();
3686 }
3687 else
3688 {
3689 /* [mem], MMX */
3690 IEM_MC_BEGIN(0, 2);
3691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3692 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3695 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3696 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3697 {
3698 IEM_MC_LOCAL(uint64_t, u64Tmp);
3699 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3700 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3701 }
3702 else
3703 {
3704 IEM_MC_LOCAL(uint32_t, u32Tmp);
3705 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3706 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3707 }
3708 IEM_MC_ADVANCE_RIP();
3709 IEM_MC_END();
3710 }
3711 return VINF_SUCCESS;
3712}
3713
3714/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3715FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3716{
3717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3718 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3719 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3720 else
3721 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3722 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3723 {
3724 /* greg, XMM */
3725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3726 IEM_MC_BEGIN(0, 1);
3727 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3728 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3729 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3730 {
3731 IEM_MC_LOCAL(uint64_t, u64Tmp);
3732 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3733 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3734 }
3735 else
3736 {
3737 IEM_MC_LOCAL(uint32_t, u32Tmp);
3738 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3739 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3740 }
3741 IEM_MC_ADVANCE_RIP();
3742 IEM_MC_END();
3743 }
3744 else
3745 {
3746 /* [mem], XMM */
3747 IEM_MC_BEGIN(0, 2);
3748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3749 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3752 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3753 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3754 {
3755 IEM_MC_LOCAL(uint64_t, u64Tmp);
3756 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3757 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3758 }
3759 else
3760 {
3761 IEM_MC_LOCAL(uint32_t, u32Tmp);
3762 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3763 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3764 }
3765 IEM_MC_ADVANCE_RIP();
3766 IEM_MC_END();
3767 }
3768 return VINF_SUCCESS;
3769}
3770
3771/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3772FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3773/* Opcode 0xf2 0x0f 0x7e - invalid */
3774
3775
3776/** Opcode 0x0f 0x7f - movq Qq, Pq */
3777FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3778{
3779 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3781 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3782 {
3783 /*
3784 * Register, register.
3785 */
3786 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3787 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789 IEM_MC_BEGIN(0, 1);
3790 IEM_MC_LOCAL(uint64_t, u64Tmp);
3791 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3792 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3793 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3794 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3795 IEM_MC_ADVANCE_RIP();
3796 IEM_MC_END();
3797 }
3798 else
3799 {
3800 /*
3801 * Register, memory.
3802 */
3803 IEM_MC_BEGIN(0, 2);
3804 IEM_MC_LOCAL(uint64_t, u64Tmp);
3805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3806
3807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3810 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3811
3812 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3813 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3814
3815 IEM_MC_ADVANCE_RIP();
3816 IEM_MC_END();
3817 }
3818 return VINF_SUCCESS;
3819}
3820
3821/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
3822FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3823{
3824 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
3825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3827 {
3828 /*
3829 * Register, register.
3830 */
3831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3832 IEM_MC_BEGIN(0, 0);
3833 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3834 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3835 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3836 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3837 IEM_MC_ADVANCE_RIP();
3838 IEM_MC_END();
3839 }
3840 else
3841 {
3842 /*
3843 * Register, memory.
3844 */
3845 IEM_MC_BEGIN(0, 2);
3846 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3848
3849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3851 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3852 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3853
3854 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3855 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3856
3857 IEM_MC_ADVANCE_RIP();
3858 IEM_MC_END();
3859 }
3860 return VINF_SUCCESS;
3861}
3862
3863/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
3864FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3865{
3866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3867 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
3868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3869 {
3870 /*
3871 * Register, register.
3872 */
3873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3874 IEM_MC_BEGIN(0, 0);
3875 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3876 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3877 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3878 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3879 IEM_MC_ADVANCE_RIP();
3880 IEM_MC_END();
3881 }
3882 else
3883 {
3884 /*
3885 * Register, memory.
3886 */
3887 IEM_MC_BEGIN(0, 2);
3888 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3890
3891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3894 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3895
3896 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3897 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3898
3899 IEM_MC_ADVANCE_RIP();
3900 IEM_MC_END();
3901 }
3902 return VINF_SUCCESS;
3903}
3904
3905/* Opcode 0xf2 0x0f 0x7f - invalid */
3906
3907
3908
3909/** Opcode 0x0f 0x80. */
3910FNIEMOP_DEF(iemOp_jo_Jv)
3911{
3912 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3913 IEMOP_HLP_MIN_386();
3914 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3915 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3916 {
3917 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3919
3920 IEM_MC_BEGIN(0, 0);
3921 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3922 IEM_MC_REL_JMP_S16(i16Imm);
3923 } IEM_MC_ELSE() {
3924 IEM_MC_ADVANCE_RIP();
3925 } IEM_MC_ENDIF();
3926 IEM_MC_END();
3927 }
3928 else
3929 {
3930 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3932
3933 IEM_MC_BEGIN(0, 0);
3934 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3935 IEM_MC_REL_JMP_S32(i32Imm);
3936 } IEM_MC_ELSE() {
3937 IEM_MC_ADVANCE_RIP();
3938 } IEM_MC_ENDIF();
3939 IEM_MC_END();
3940 }
3941 return VINF_SUCCESS;
3942}
3943
3944
3945/** Opcode 0x0f 0x81. */
3946FNIEMOP_DEF(iemOp_jno_Jv)
3947{
3948 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3949 IEMOP_HLP_MIN_386();
3950 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3951 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3952 {
3953 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3955
3956 IEM_MC_BEGIN(0, 0);
3957 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3958 IEM_MC_ADVANCE_RIP();
3959 } IEM_MC_ELSE() {
3960 IEM_MC_REL_JMP_S16(i16Imm);
3961 } IEM_MC_ENDIF();
3962 IEM_MC_END();
3963 }
3964 else
3965 {
3966 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3968
3969 IEM_MC_BEGIN(0, 0);
3970 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3971 IEM_MC_ADVANCE_RIP();
3972 } IEM_MC_ELSE() {
3973 IEM_MC_REL_JMP_S32(i32Imm);
3974 } IEM_MC_ENDIF();
3975 IEM_MC_END();
3976 }
3977 return VINF_SUCCESS;
3978}
3979
3980
3981/** Opcode 0x0f 0x82. */
3982FNIEMOP_DEF(iemOp_jc_Jv)
3983{
3984 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3985 IEMOP_HLP_MIN_386();
3986 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3987 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3988 {
3989 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3991
3992 IEM_MC_BEGIN(0, 0);
3993 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3994 IEM_MC_REL_JMP_S16(i16Imm);
3995 } IEM_MC_ELSE() {
3996 IEM_MC_ADVANCE_RIP();
3997 } IEM_MC_ENDIF();
3998 IEM_MC_END();
3999 }
4000 else
4001 {
4002 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4004
4005 IEM_MC_BEGIN(0, 0);
4006 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4007 IEM_MC_REL_JMP_S32(i32Imm);
4008 } IEM_MC_ELSE() {
4009 IEM_MC_ADVANCE_RIP();
4010 } IEM_MC_ENDIF();
4011 IEM_MC_END();
4012 }
4013 return VINF_SUCCESS;
4014}
4015
4016
4017/** Opcode 0x0f 0x83. */
4018FNIEMOP_DEF(iemOp_jnc_Jv)
4019{
4020 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4021 IEMOP_HLP_MIN_386();
4022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4023 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4024 {
4025 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4027
4028 IEM_MC_BEGIN(0, 0);
4029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4030 IEM_MC_ADVANCE_RIP();
4031 } IEM_MC_ELSE() {
4032 IEM_MC_REL_JMP_S16(i16Imm);
4033 } IEM_MC_ENDIF();
4034 IEM_MC_END();
4035 }
4036 else
4037 {
4038 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4040
4041 IEM_MC_BEGIN(0, 0);
4042 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4043 IEM_MC_ADVANCE_RIP();
4044 } IEM_MC_ELSE() {
4045 IEM_MC_REL_JMP_S32(i32Imm);
4046 } IEM_MC_ENDIF();
4047 IEM_MC_END();
4048 }
4049 return VINF_SUCCESS;
4050}
4051
4052
4053/** Opcode 0x0f 0x84. */
4054FNIEMOP_DEF(iemOp_je_Jv)
4055{
4056 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4057 IEMOP_HLP_MIN_386();
4058 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4059 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4060 {
4061 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4063
4064 IEM_MC_BEGIN(0, 0);
4065 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4066 IEM_MC_REL_JMP_S16(i16Imm);
4067 } IEM_MC_ELSE() {
4068 IEM_MC_ADVANCE_RIP();
4069 } IEM_MC_ENDIF();
4070 IEM_MC_END();
4071 }
4072 else
4073 {
4074 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4076
4077 IEM_MC_BEGIN(0, 0);
4078 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4079 IEM_MC_REL_JMP_S32(i32Imm);
4080 } IEM_MC_ELSE() {
4081 IEM_MC_ADVANCE_RIP();
4082 } IEM_MC_ENDIF();
4083 IEM_MC_END();
4084 }
4085 return VINF_SUCCESS;
4086}
4087
4088
4089/** Opcode 0x0f 0x85. */
4090FNIEMOP_DEF(iemOp_jne_Jv)
4091{
4092 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4093 IEMOP_HLP_MIN_386();
4094 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4095 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4096 {
4097 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4099
4100 IEM_MC_BEGIN(0, 0);
4101 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4102 IEM_MC_ADVANCE_RIP();
4103 } IEM_MC_ELSE() {
4104 IEM_MC_REL_JMP_S16(i16Imm);
4105 } IEM_MC_ENDIF();
4106 IEM_MC_END();
4107 }
4108 else
4109 {
4110 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4112
4113 IEM_MC_BEGIN(0, 0);
4114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4115 IEM_MC_ADVANCE_RIP();
4116 } IEM_MC_ELSE() {
4117 IEM_MC_REL_JMP_S32(i32Imm);
4118 } IEM_MC_ENDIF();
4119 IEM_MC_END();
4120 }
4121 return VINF_SUCCESS;
4122}
4123
4124
4125/** Opcode 0x0f 0x86. */
4126FNIEMOP_DEF(iemOp_jbe_Jv)
4127{
4128 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4129 IEMOP_HLP_MIN_386();
4130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4131 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4132 {
4133 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4135
4136 IEM_MC_BEGIN(0, 0);
4137 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4138 IEM_MC_REL_JMP_S16(i16Imm);
4139 } IEM_MC_ELSE() {
4140 IEM_MC_ADVANCE_RIP();
4141 } IEM_MC_ENDIF();
4142 IEM_MC_END();
4143 }
4144 else
4145 {
4146 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4148
4149 IEM_MC_BEGIN(0, 0);
4150 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4151 IEM_MC_REL_JMP_S32(i32Imm);
4152 } IEM_MC_ELSE() {
4153 IEM_MC_ADVANCE_RIP();
4154 } IEM_MC_ENDIF();
4155 IEM_MC_END();
4156 }
4157 return VINF_SUCCESS;
4158}
4159
4160
4161/** Opcode 0x0f 0x87. */
4162FNIEMOP_DEF(iemOp_jnbe_Jv)
4163{
4164 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4165 IEMOP_HLP_MIN_386();
4166 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4167 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4168 {
4169 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4171
4172 IEM_MC_BEGIN(0, 0);
4173 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4174 IEM_MC_ADVANCE_RIP();
4175 } IEM_MC_ELSE() {
4176 IEM_MC_REL_JMP_S16(i16Imm);
4177 } IEM_MC_ENDIF();
4178 IEM_MC_END();
4179 }
4180 else
4181 {
4182 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4184
4185 IEM_MC_BEGIN(0, 0);
4186 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4187 IEM_MC_ADVANCE_RIP();
4188 } IEM_MC_ELSE() {
4189 IEM_MC_REL_JMP_S32(i32Imm);
4190 } IEM_MC_ENDIF();
4191 IEM_MC_END();
4192 }
4193 return VINF_SUCCESS;
4194}
4195
4196
4197/** Opcode 0x0f 0x88. */
4198FNIEMOP_DEF(iemOp_js_Jv)
4199{
4200 IEMOP_MNEMONIC(js_Jv, "js Jv");
4201 IEMOP_HLP_MIN_386();
4202 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4203 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4204 {
4205 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4207
4208 IEM_MC_BEGIN(0, 0);
4209 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4210 IEM_MC_REL_JMP_S16(i16Imm);
4211 } IEM_MC_ELSE() {
4212 IEM_MC_ADVANCE_RIP();
4213 } IEM_MC_ENDIF();
4214 IEM_MC_END();
4215 }
4216 else
4217 {
4218 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4220
4221 IEM_MC_BEGIN(0, 0);
4222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4223 IEM_MC_REL_JMP_S32(i32Imm);
4224 } IEM_MC_ELSE() {
4225 IEM_MC_ADVANCE_RIP();
4226 } IEM_MC_ENDIF();
4227 IEM_MC_END();
4228 }
4229 return VINF_SUCCESS;
4230}
4231
4232
4233/** Opcode 0x0f 0x89. */
4234FNIEMOP_DEF(iemOp_jns_Jv)
4235{
4236 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4237 IEMOP_HLP_MIN_386();
4238 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4239 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4240 {
4241 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4243
4244 IEM_MC_BEGIN(0, 0);
4245 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4246 IEM_MC_ADVANCE_RIP();
4247 } IEM_MC_ELSE() {
4248 IEM_MC_REL_JMP_S16(i16Imm);
4249 } IEM_MC_ENDIF();
4250 IEM_MC_END();
4251 }
4252 else
4253 {
4254 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4256
4257 IEM_MC_BEGIN(0, 0);
4258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4259 IEM_MC_ADVANCE_RIP();
4260 } IEM_MC_ELSE() {
4261 IEM_MC_REL_JMP_S32(i32Imm);
4262 } IEM_MC_ENDIF();
4263 IEM_MC_END();
4264 }
4265 return VINF_SUCCESS;
4266}
4267
4268
4269/** Opcode 0x0f 0x8a. */
4270FNIEMOP_DEF(iemOp_jp_Jv)
4271{
4272 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4273 IEMOP_HLP_MIN_386();
4274 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4275 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4276 {
4277 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4279
4280 IEM_MC_BEGIN(0, 0);
4281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4282 IEM_MC_REL_JMP_S16(i16Imm);
4283 } IEM_MC_ELSE() {
4284 IEM_MC_ADVANCE_RIP();
4285 } IEM_MC_ENDIF();
4286 IEM_MC_END();
4287 }
4288 else
4289 {
4290 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4292
4293 IEM_MC_BEGIN(0, 0);
4294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4295 IEM_MC_REL_JMP_S32(i32Imm);
4296 } IEM_MC_ELSE() {
4297 IEM_MC_ADVANCE_RIP();
4298 } IEM_MC_ENDIF();
4299 IEM_MC_END();
4300 }
4301 return VINF_SUCCESS;
4302}
4303
4304
4305/** Opcode 0x0f 0x8b. */
4306FNIEMOP_DEF(iemOp_jnp_Jv)
4307{
4308 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4309 IEMOP_HLP_MIN_386();
4310 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4311 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4312 {
4313 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4315
4316 IEM_MC_BEGIN(0, 0);
4317 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4318 IEM_MC_ADVANCE_RIP();
4319 } IEM_MC_ELSE() {
4320 IEM_MC_REL_JMP_S16(i16Imm);
4321 } IEM_MC_ENDIF();
4322 IEM_MC_END();
4323 }
4324 else
4325 {
4326 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4328
4329 IEM_MC_BEGIN(0, 0);
4330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4331 IEM_MC_ADVANCE_RIP();
4332 } IEM_MC_ELSE() {
4333 IEM_MC_REL_JMP_S32(i32Imm);
4334 } IEM_MC_ENDIF();
4335 IEM_MC_END();
4336 }
4337 return VINF_SUCCESS;
4338}
4339
4340
4341/** Opcode 0x0f 0x8c. */
4342FNIEMOP_DEF(iemOp_jl_Jv)
4343{
4344 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4345 IEMOP_HLP_MIN_386();
4346 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4347 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4348 {
4349 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4351
4352 IEM_MC_BEGIN(0, 0);
4353 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4354 IEM_MC_REL_JMP_S16(i16Imm);
4355 } IEM_MC_ELSE() {
4356 IEM_MC_ADVANCE_RIP();
4357 } IEM_MC_ENDIF();
4358 IEM_MC_END();
4359 }
4360 else
4361 {
4362 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4364
4365 IEM_MC_BEGIN(0, 0);
4366 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4367 IEM_MC_REL_JMP_S32(i32Imm);
4368 } IEM_MC_ELSE() {
4369 IEM_MC_ADVANCE_RIP();
4370 } IEM_MC_ENDIF();
4371 IEM_MC_END();
4372 }
4373 return VINF_SUCCESS;
4374}
4375
4376
4377/** Opcode 0x0f 0x8d. */
4378FNIEMOP_DEF(iemOp_jnl_Jv)
4379{
4380 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4381 IEMOP_HLP_MIN_386();
4382 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4383 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4384 {
4385 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4387
4388 IEM_MC_BEGIN(0, 0);
4389 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4390 IEM_MC_ADVANCE_RIP();
4391 } IEM_MC_ELSE() {
4392 IEM_MC_REL_JMP_S16(i16Imm);
4393 } IEM_MC_ENDIF();
4394 IEM_MC_END();
4395 }
4396 else
4397 {
4398 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4400
4401 IEM_MC_BEGIN(0, 0);
4402 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4403 IEM_MC_ADVANCE_RIP();
4404 } IEM_MC_ELSE() {
4405 IEM_MC_REL_JMP_S32(i32Imm);
4406 } IEM_MC_ENDIF();
4407 IEM_MC_END();
4408 }
4409 return VINF_SUCCESS;
4410}
4411
4412
4413/** Opcode 0x0f 0x8e. */
4414FNIEMOP_DEF(iemOp_jle_Jv)
4415{
4416 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4417 IEMOP_HLP_MIN_386();
4418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4419 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4420 {
4421 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4423
4424 IEM_MC_BEGIN(0, 0);
4425 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4426 IEM_MC_REL_JMP_S16(i16Imm);
4427 } IEM_MC_ELSE() {
4428 IEM_MC_ADVANCE_RIP();
4429 } IEM_MC_ENDIF();
4430 IEM_MC_END();
4431 }
4432 else
4433 {
4434 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4436
4437 IEM_MC_BEGIN(0, 0);
4438 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4439 IEM_MC_REL_JMP_S32(i32Imm);
4440 } IEM_MC_ELSE() {
4441 IEM_MC_ADVANCE_RIP();
4442 } IEM_MC_ENDIF();
4443 IEM_MC_END();
4444 }
4445 return VINF_SUCCESS;
4446}
4447
4448
4449/** Opcode 0x0f 0x8f. */
4450FNIEMOP_DEF(iemOp_jnle_Jv)
4451{
4452 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4453 IEMOP_HLP_MIN_386();
4454 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4455 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4456 {
4457 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4459
4460 IEM_MC_BEGIN(0, 0);
4461 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4462 IEM_MC_ADVANCE_RIP();
4463 } IEM_MC_ELSE() {
4464 IEM_MC_REL_JMP_S16(i16Imm);
4465 } IEM_MC_ENDIF();
4466 IEM_MC_END();
4467 }
4468 else
4469 {
4470 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4472
4473 IEM_MC_BEGIN(0, 0);
4474 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4475 IEM_MC_ADVANCE_RIP();
4476 } IEM_MC_ELSE() {
4477 IEM_MC_REL_JMP_S32(i32Imm);
4478 } IEM_MC_ENDIF();
4479 IEM_MC_END();
4480 }
4481 return VINF_SUCCESS;
4482}
4483
4484
4485/** Opcode 0x0f 0x90. */
4486FNIEMOP_DEF(iemOp_seto_Eb)
4487{
4488 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4489 IEMOP_HLP_MIN_386();
4490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4491
4492 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4493 * any way. AMD says it's "unused", whatever that means. We're
4494 * ignoring for now. */
4495 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4496 {
4497 /* register target */
4498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4499 IEM_MC_BEGIN(0, 0);
4500 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4501 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4502 } IEM_MC_ELSE() {
4503 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4504 } IEM_MC_ENDIF();
4505 IEM_MC_ADVANCE_RIP();
4506 IEM_MC_END();
4507 }
4508 else
4509 {
4510 /* memory target */
4511 IEM_MC_BEGIN(0, 1);
4512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4516 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4517 } IEM_MC_ELSE() {
4518 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4519 } IEM_MC_ENDIF();
4520 IEM_MC_ADVANCE_RIP();
4521 IEM_MC_END();
4522 }
4523 return VINF_SUCCESS;
4524}
4525
4526
4527/** Opcode 0x0f 0x91. */
4528FNIEMOP_DEF(iemOp_setno_Eb)
4529{
4530 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4531 IEMOP_HLP_MIN_386();
4532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4533
4534 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4535 * any way. AMD says it's "unused", whatever that means. We're
4536 * ignoring for now. */
4537 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4538 {
4539 /* register target */
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4541 IEM_MC_BEGIN(0, 0);
4542 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4543 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4544 } IEM_MC_ELSE() {
4545 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4546 } IEM_MC_ENDIF();
4547 IEM_MC_ADVANCE_RIP();
4548 IEM_MC_END();
4549 }
4550 else
4551 {
4552 /* memory target */
4553 IEM_MC_BEGIN(0, 1);
4554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4557 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4558 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4559 } IEM_MC_ELSE() {
4560 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4561 } IEM_MC_ENDIF();
4562 IEM_MC_ADVANCE_RIP();
4563 IEM_MC_END();
4564 }
4565 return VINF_SUCCESS;
4566}
4567
4568
4569/** Opcode 0x0f 0x92. */
4570FNIEMOP_DEF(iemOp_setc_Eb)
4571{
4572 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4573 IEMOP_HLP_MIN_386();
4574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4575
4576 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4577 * any way. AMD says it's "unused", whatever that means. We're
4578 * ignoring for now. */
4579 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4580 {
4581 /* register target */
4582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4583 IEM_MC_BEGIN(0, 0);
4584 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4585 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4586 } IEM_MC_ELSE() {
4587 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4588 } IEM_MC_ENDIF();
4589 IEM_MC_ADVANCE_RIP();
4590 IEM_MC_END();
4591 }
4592 else
4593 {
4594 /* memory target */
4595 IEM_MC_BEGIN(0, 1);
4596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4600 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4601 } IEM_MC_ELSE() {
4602 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4603 } IEM_MC_ENDIF();
4604 IEM_MC_ADVANCE_RIP();
4605 IEM_MC_END();
4606 }
4607 return VINF_SUCCESS;
4608}
4609
4610
4611/** Opcode 0x0f 0x93. */
4612FNIEMOP_DEF(iemOp_setnc_Eb)
4613{
4614 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4615 IEMOP_HLP_MIN_386();
4616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4617
4618 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4619 * any way. AMD says it's "unused", whatever that means. We're
4620 * ignoring for now. */
4621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4622 {
4623 /* register target */
4624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4625 IEM_MC_BEGIN(0, 0);
4626 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4627 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4628 } IEM_MC_ELSE() {
4629 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4630 } IEM_MC_ENDIF();
4631 IEM_MC_ADVANCE_RIP();
4632 IEM_MC_END();
4633 }
4634 else
4635 {
4636 /* memory target */
4637 IEM_MC_BEGIN(0, 1);
4638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4641 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4642 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4643 } IEM_MC_ELSE() {
4644 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4645 } IEM_MC_ENDIF();
4646 IEM_MC_ADVANCE_RIP();
4647 IEM_MC_END();
4648 }
4649 return VINF_SUCCESS;
4650}
4651
4652
4653/** Opcode 0x0f 0x94. */
4654FNIEMOP_DEF(iemOp_sete_Eb)
4655{
4656 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4657 IEMOP_HLP_MIN_386();
4658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4659
4660 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4661 * any way. AMD says it's "unused", whatever that means. We're
4662 * ignoring for now. */
4663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4664 {
4665 /* register target */
4666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4667 IEM_MC_BEGIN(0, 0);
4668 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4669 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4670 } IEM_MC_ELSE() {
4671 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4672 } IEM_MC_ENDIF();
4673 IEM_MC_ADVANCE_RIP();
4674 IEM_MC_END();
4675 }
4676 else
4677 {
4678 /* memory target */
4679 IEM_MC_BEGIN(0, 1);
4680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4684 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4685 } IEM_MC_ELSE() {
4686 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4687 } IEM_MC_ENDIF();
4688 IEM_MC_ADVANCE_RIP();
4689 IEM_MC_END();
4690 }
4691 return VINF_SUCCESS;
4692}
4693
4694
4695/** Opcode 0x0f 0x95. */
4696FNIEMOP_DEF(iemOp_setne_Eb)
4697{
4698 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4699 IEMOP_HLP_MIN_386();
4700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4701
4702 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4703 * any way. AMD says it's "unused", whatever that means. We're
4704 * ignoring for now. */
4705 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4706 {
4707 /* register target */
4708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4709 IEM_MC_BEGIN(0, 0);
4710 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4711 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4712 } IEM_MC_ELSE() {
4713 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4714 } IEM_MC_ENDIF();
4715 IEM_MC_ADVANCE_RIP();
4716 IEM_MC_END();
4717 }
4718 else
4719 {
4720 /* memory target */
4721 IEM_MC_BEGIN(0, 1);
4722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4726 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4727 } IEM_MC_ELSE() {
4728 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4729 } IEM_MC_ENDIF();
4730 IEM_MC_ADVANCE_RIP();
4731 IEM_MC_END();
4732 }
4733 return VINF_SUCCESS;
4734}
4735
4736
4737/** Opcode 0x0f 0x96. */
4738FNIEMOP_DEF(iemOp_setbe_Eb)
4739{
4740 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4741 IEMOP_HLP_MIN_386();
4742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4743
4744 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4745 * any way. AMD says it's "unused", whatever that means. We're
4746 * ignoring for now. */
4747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4748 {
4749 /* register target */
4750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4751 IEM_MC_BEGIN(0, 0);
4752 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4753 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4754 } IEM_MC_ELSE() {
4755 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4756 } IEM_MC_ENDIF();
4757 IEM_MC_ADVANCE_RIP();
4758 IEM_MC_END();
4759 }
4760 else
4761 {
4762 /* memory target */
4763 IEM_MC_BEGIN(0, 1);
4764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4767 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4768 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4769 } IEM_MC_ELSE() {
4770 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4771 } IEM_MC_ENDIF();
4772 IEM_MC_ADVANCE_RIP();
4773 IEM_MC_END();
4774 }
4775 return VINF_SUCCESS;
4776}
4777
4778
4779/** Opcode 0x0f 0x97. */
4780FNIEMOP_DEF(iemOp_setnbe_Eb)
4781{
4782 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4783 IEMOP_HLP_MIN_386();
4784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4785
4786 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4787 * any way. AMD says it's "unused", whatever that means. We're
4788 * ignoring for now. */
4789 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4790 {
4791 /* register target */
4792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4793 IEM_MC_BEGIN(0, 0);
4794 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4795 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4796 } IEM_MC_ELSE() {
4797 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4798 } IEM_MC_ENDIF();
4799 IEM_MC_ADVANCE_RIP();
4800 IEM_MC_END();
4801 }
4802 else
4803 {
4804 /* memory target */
4805 IEM_MC_BEGIN(0, 1);
4806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4809 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4810 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4811 } IEM_MC_ELSE() {
4812 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4813 } IEM_MC_ENDIF();
4814 IEM_MC_ADVANCE_RIP();
4815 IEM_MC_END();
4816 }
4817 return VINF_SUCCESS;
4818}
4819
4820
4821/** Opcode 0x0f 0x98. */
4822FNIEMOP_DEF(iemOp_sets_Eb)
4823{
4824 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4825 IEMOP_HLP_MIN_386();
4826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4827
4828 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4829 * any way. AMD says it's "unused", whatever that means. We're
4830 * ignoring for now. */
4831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4832 {
4833 /* register target */
4834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4835 IEM_MC_BEGIN(0, 0);
4836 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4837 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4838 } IEM_MC_ELSE() {
4839 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4840 } IEM_MC_ENDIF();
4841 IEM_MC_ADVANCE_RIP();
4842 IEM_MC_END();
4843 }
4844 else
4845 {
4846 /* memory target */
4847 IEM_MC_BEGIN(0, 1);
4848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4852 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4853 } IEM_MC_ELSE() {
4854 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4855 } IEM_MC_ENDIF();
4856 IEM_MC_ADVANCE_RIP();
4857 IEM_MC_END();
4858 }
4859 return VINF_SUCCESS;
4860}
4861
4862
4863/** Opcode 0x0f 0x99. */
4864FNIEMOP_DEF(iemOp_setns_Eb)
4865{
4866 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4867 IEMOP_HLP_MIN_386();
4868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4869
4870 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4871 * any way. AMD says it's "unused", whatever that means. We're
4872 * ignoring for now. */
4873 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4874 {
4875 /* register target */
4876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4877 IEM_MC_BEGIN(0, 0);
4878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4879 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4880 } IEM_MC_ELSE() {
4881 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4882 } IEM_MC_ENDIF();
4883 IEM_MC_ADVANCE_RIP();
4884 IEM_MC_END();
4885 }
4886 else
4887 {
4888 /* memory target */
4889 IEM_MC_BEGIN(0, 1);
4890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4894 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4895 } IEM_MC_ELSE() {
4896 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4897 } IEM_MC_ENDIF();
4898 IEM_MC_ADVANCE_RIP();
4899 IEM_MC_END();
4900 }
4901 return VINF_SUCCESS;
4902}
4903
4904
4905/** Opcode 0x0f 0x9a. */
4906FNIEMOP_DEF(iemOp_setp_Eb)
4907{
4908 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4909 IEMOP_HLP_MIN_386();
4910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4911
4912 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4913 * any way. AMD says it's "unused", whatever that means. We're
4914 * ignoring for now. */
4915 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4916 {
4917 /* register target */
4918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4919 IEM_MC_BEGIN(0, 0);
4920 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4921 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4922 } IEM_MC_ELSE() {
4923 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4924 } IEM_MC_ENDIF();
4925 IEM_MC_ADVANCE_RIP();
4926 IEM_MC_END();
4927 }
4928 else
4929 {
4930 /* memory target */
4931 IEM_MC_BEGIN(0, 1);
4932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4936 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4937 } IEM_MC_ELSE() {
4938 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4939 } IEM_MC_ENDIF();
4940 IEM_MC_ADVANCE_RIP();
4941 IEM_MC_END();
4942 }
4943 return VINF_SUCCESS;
4944}
4945
4946
4947/** Opcode 0x0f 0x9b. */
4948FNIEMOP_DEF(iemOp_setnp_Eb)
4949{
4950 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4951 IEMOP_HLP_MIN_386();
4952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4953
4954 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4955 * any way. AMD says it's "unused", whatever that means. We're
4956 * ignoring for now. */
4957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4958 {
4959 /* register target */
4960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4961 IEM_MC_BEGIN(0, 0);
4962 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4963 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4964 } IEM_MC_ELSE() {
4965 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4966 } IEM_MC_ENDIF();
4967 IEM_MC_ADVANCE_RIP();
4968 IEM_MC_END();
4969 }
4970 else
4971 {
4972 /* memory target */
4973 IEM_MC_BEGIN(0, 1);
4974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4977 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4978 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4979 } IEM_MC_ELSE() {
4980 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4981 } IEM_MC_ENDIF();
4982 IEM_MC_ADVANCE_RIP();
4983 IEM_MC_END();
4984 }
4985 return VINF_SUCCESS;
4986}
4987
4988
4989/** Opcode 0x0f 0x9c. */
4990FNIEMOP_DEF(iemOp_setl_Eb)
4991{
4992 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4993 IEMOP_HLP_MIN_386();
4994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4995
4996 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4997 * any way. AMD says it's "unused", whatever that means. We're
4998 * ignoring for now. */
4999 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5000 {
5001 /* register target */
5002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5003 IEM_MC_BEGIN(0, 0);
5004 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5005 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5006 } IEM_MC_ELSE() {
5007 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5008 } IEM_MC_ENDIF();
5009 IEM_MC_ADVANCE_RIP();
5010 IEM_MC_END();
5011 }
5012 else
5013 {
5014 /* memory target */
5015 IEM_MC_BEGIN(0, 1);
5016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5019 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5020 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5021 } IEM_MC_ELSE() {
5022 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5023 } IEM_MC_ENDIF();
5024 IEM_MC_ADVANCE_RIP();
5025 IEM_MC_END();
5026 }
5027 return VINF_SUCCESS;
5028}
5029
5030
5031/** Opcode 0x0f 0x9d. */
5032FNIEMOP_DEF(iemOp_setnl_Eb)
5033{
5034 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5035 IEMOP_HLP_MIN_386();
5036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5037
5038 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5039 * any way. AMD says it's "unused", whatever that means. We're
5040 * ignoring for now. */
5041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5042 {
5043 /* register target */
5044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5045 IEM_MC_BEGIN(0, 0);
5046 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5047 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5048 } IEM_MC_ELSE() {
5049 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5050 } IEM_MC_ENDIF();
5051 IEM_MC_ADVANCE_RIP();
5052 IEM_MC_END();
5053 }
5054 else
5055 {
5056 /* memory target */
5057 IEM_MC_BEGIN(0, 1);
5058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5061 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5062 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5063 } IEM_MC_ELSE() {
5064 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5065 } IEM_MC_ENDIF();
5066 IEM_MC_ADVANCE_RIP();
5067 IEM_MC_END();
5068 }
5069 return VINF_SUCCESS;
5070}
5071
5072
5073/** Opcode 0x0f 0x9e. */
5074FNIEMOP_DEF(iemOp_setle_Eb)
5075{
5076 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5077 IEMOP_HLP_MIN_386();
5078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5079
5080 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5081 * any way. AMD says it's "unused", whatever that means. We're
5082 * ignoring for now. */
5083 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5084 {
5085 /* register target */
5086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5087 IEM_MC_BEGIN(0, 0);
5088 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5089 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5090 } IEM_MC_ELSE() {
5091 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5092 } IEM_MC_ENDIF();
5093 IEM_MC_ADVANCE_RIP();
5094 IEM_MC_END();
5095 }
5096 else
5097 {
5098 /* memory target */
5099 IEM_MC_BEGIN(0, 1);
5100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5103 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5104 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5105 } IEM_MC_ELSE() {
5106 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5107 } IEM_MC_ENDIF();
5108 IEM_MC_ADVANCE_RIP();
5109 IEM_MC_END();
5110 }
5111 return VINF_SUCCESS;
5112}
5113
5114
5115/** Opcode 0x0f 0x9f. */
5116FNIEMOP_DEF(iemOp_setnle_Eb)
5117{
5118 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5119 IEMOP_HLP_MIN_386();
5120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5121
5122 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5123 * any way. AMD says it's "unused", whatever that means. We're
5124 * ignoring for now. */
5125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5126 {
5127 /* register target */
5128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5129 IEM_MC_BEGIN(0, 0);
5130 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5131 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5132 } IEM_MC_ELSE() {
5133 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5134 } IEM_MC_ENDIF();
5135 IEM_MC_ADVANCE_RIP();
5136 IEM_MC_END();
5137 }
5138 else
5139 {
5140 /* memory target */
5141 IEM_MC_BEGIN(0, 1);
5142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5145 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5146 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5147 } IEM_MC_ELSE() {
5148 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5149 } IEM_MC_ENDIF();
5150 IEM_MC_ADVANCE_RIP();
5151 IEM_MC_END();
5152 }
5153 return VINF_SUCCESS;
5154}
5155
5156
5157/**
5158 * Common 'push segment-register' helper.
5159 */
5160FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5161{
5162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5163 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5164 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5165
5166 switch (pVCpu->iem.s.enmEffOpSize)
5167 {
5168 case IEMMODE_16BIT:
5169 IEM_MC_BEGIN(0, 1);
5170 IEM_MC_LOCAL(uint16_t, u16Value);
5171 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5172 IEM_MC_PUSH_U16(u16Value);
5173 IEM_MC_ADVANCE_RIP();
5174 IEM_MC_END();
5175 break;
5176
5177 case IEMMODE_32BIT:
5178 IEM_MC_BEGIN(0, 1);
5179 IEM_MC_LOCAL(uint32_t, u32Value);
5180 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5181 IEM_MC_PUSH_U32_SREG(u32Value);
5182 IEM_MC_ADVANCE_RIP();
5183 IEM_MC_END();
5184 break;
5185
5186 case IEMMODE_64BIT:
5187 IEM_MC_BEGIN(0, 1);
5188 IEM_MC_LOCAL(uint64_t, u64Value);
5189 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5190 IEM_MC_PUSH_U64(u64Value);
5191 IEM_MC_ADVANCE_RIP();
5192 IEM_MC_END();
5193 break;
5194 }
5195
5196 return VINF_SUCCESS;
5197}
5198
5199
5200/** Opcode 0x0f 0xa0. */
5201FNIEMOP_DEF(iemOp_push_fs)
5202{
5203 IEMOP_MNEMONIC(push_fs, "push fs");
5204 IEMOP_HLP_MIN_386();
5205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5206 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5207}
5208
5209
5210/** Opcode 0x0f 0xa1. */
5211FNIEMOP_DEF(iemOp_pop_fs)
5212{
5213 IEMOP_MNEMONIC(pop_fs, "pop fs");
5214 IEMOP_HLP_MIN_386();
5215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5217}
5218
5219
5220/** Opcode 0x0f 0xa2. */
5221FNIEMOP_DEF(iemOp_cpuid)
5222{
5223 IEMOP_MNEMONIC(cpuid, "cpuid");
5224 IEMOP_HLP_MIN_486(); /* not all 486es. */
5225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5226 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5227}
5228
5229
5230/**
5231 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5232 * iemOp_bts_Ev_Gv.
5233 */
5234FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5235{
5236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5238
5239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5240 {
5241 /* register destination. */
5242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5243 switch (pVCpu->iem.s.enmEffOpSize)
5244 {
5245 case IEMMODE_16BIT:
5246 IEM_MC_BEGIN(3, 0);
5247 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5248 IEM_MC_ARG(uint16_t, u16Src, 1);
5249 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5250
5251 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5252 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5253 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5254 IEM_MC_REF_EFLAGS(pEFlags);
5255 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5256
5257 IEM_MC_ADVANCE_RIP();
5258 IEM_MC_END();
5259 return VINF_SUCCESS;
5260
5261 case IEMMODE_32BIT:
5262 IEM_MC_BEGIN(3, 0);
5263 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5264 IEM_MC_ARG(uint32_t, u32Src, 1);
5265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5266
5267 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5268 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5269 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5270 IEM_MC_REF_EFLAGS(pEFlags);
5271 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5272
5273 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5274 IEM_MC_ADVANCE_RIP();
5275 IEM_MC_END();
5276 return VINF_SUCCESS;
5277
5278 case IEMMODE_64BIT:
5279 IEM_MC_BEGIN(3, 0);
5280 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5281 IEM_MC_ARG(uint64_t, u64Src, 1);
5282 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5283
5284 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5285 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5286 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5287 IEM_MC_REF_EFLAGS(pEFlags);
5288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5289
5290 IEM_MC_ADVANCE_RIP();
5291 IEM_MC_END();
5292 return VINF_SUCCESS;
5293
5294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5295 }
5296 }
5297 else
5298 {
5299 /* memory destination. */
5300
5301 uint32_t fAccess;
5302 if (pImpl->pfnLockedU16)
5303 fAccess = IEM_ACCESS_DATA_RW;
5304 else /* BT */
5305 fAccess = IEM_ACCESS_DATA_R;
5306
5307 /** @todo test negative bit offsets! */
5308 switch (pVCpu->iem.s.enmEffOpSize)
5309 {
5310 case IEMMODE_16BIT:
5311 IEM_MC_BEGIN(3, 2);
5312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5313 IEM_MC_ARG(uint16_t, u16Src, 1);
5314 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5316 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5317
5318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5319 if (pImpl->pfnLockedU16)
5320 IEMOP_HLP_DONE_DECODING();
5321 else
5322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5323 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5324 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5325 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5326 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5327 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5328 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5329 IEM_MC_FETCH_EFLAGS(EFlags);
5330
5331 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5334 else
5335 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5337
5338 IEM_MC_COMMIT_EFLAGS(EFlags);
5339 IEM_MC_ADVANCE_RIP();
5340 IEM_MC_END();
5341 return VINF_SUCCESS;
5342
5343 case IEMMODE_32BIT:
5344 IEM_MC_BEGIN(3, 2);
5345 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5346 IEM_MC_ARG(uint32_t, u32Src, 1);
5347 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5349 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5350
5351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5352 if (pImpl->pfnLockedU16)
5353 IEMOP_HLP_DONE_DECODING();
5354 else
5355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5356 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5357 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5358 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5359 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5360 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5361 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5362 IEM_MC_FETCH_EFLAGS(EFlags);
5363
5364 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5365 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5366 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5367 else
5368 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5369 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5370
5371 IEM_MC_COMMIT_EFLAGS(EFlags);
5372 IEM_MC_ADVANCE_RIP();
5373 IEM_MC_END();
5374 return VINF_SUCCESS;
5375
5376 case IEMMODE_64BIT:
5377 IEM_MC_BEGIN(3, 2);
5378 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5379 IEM_MC_ARG(uint64_t, u64Src, 1);
5380 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5382 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5383
5384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5385 if (pImpl->pfnLockedU16)
5386 IEMOP_HLP_DONE_DECODING();
5387 else
5388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5389 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5390 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5391 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5392 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5393 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5394 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5395 IEM_MC_FETCH_EFLAGS(EFlags);
5396
5397 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5398 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5399 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5400 else
5401 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5402 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5403
5404 IEM_MC_COMMIT_EFLAGS(EFlags);
5405 IEM_MC_ADVANCE_RIP();
5406 IEM_MC_END();
5407 return VINF_SUCCESS;
5408
5409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5410 }
5411 }
5412}
5413
5414
5415/** Opcode 0x0f 0xa3. */
5416FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5417{
5418 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5419 IEMOP_HLP_MIN_386();
5420 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5421}
5422
5423
5424/**
5425 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5426 */
5427FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5428{
5429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5430 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5431
5432 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5433 {
5434 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5436
5437 switch (pVCpu->iem.s.enmEffOpSize)
5438 {
5439 case IEMMODE_16BIT:
5440 IEM_MC_BEGIN(4, 0);
5441 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5442 IEM_MC_ARG(uint16_t, u16Src, 1);
5443 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5444 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5445
5446 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5447 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5448 IEM_MC_REF_EFLAGS(pEFlags);
5449 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5450
5451 IEM_MC_ADVANCE_RIP();
5452 IEM_MC_END();
5453 return VINF_SUCCESS;
5454
5455 case IEMMODE_32BIT:
5456 IEM_MC_BEGIN(4, 0);
5457 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5458 IEM_MC_ARG(uint32_t, u32Src, 1);
5459 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5460 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5461
5462 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5463 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5464 IEM_MC_REF_EFLAGS(pEFlags);
5465 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5466
5467 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5468 IEM_MC_ADVANCE_RIP();
5469 IEM_MC_END();
5470 return VINF_SUCCESS;
5471
5472 case IEMMODE_64BIT:
5473 IEM_MC_BEGIN(4, 0);
5474 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5475 IEM_MC_ARG(uint64_t, u64Src, 1);
5476 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5477 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5478
5479 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5480 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5481 IEM_MC_REF_EFLAGS(pEFlags);
5482 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5483
5484 IEM_MC_ADVANCE_RIP();
5485 IEM_MC_END();
5486 return VINF_SUCCESS;
5487
5488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5489 }
5490 }
5491 else
5492 {
5493 switch (pVCpu->iem.s.enmEffOpSize)
5494 {
5495 case IEMMODE_16BIT:
5496 IEM_MC_BEGIN(4, 2);
5497 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5498 IEM_MC_ARG(uint16_t, u16Src, 1);
5499 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5500 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5502
5503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5504 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5505 IEM_MC_ASSIGN(cShiftArg, cShift);
5506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5507 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5508 IEM_MC_FETCH_EFLAGS(EFlags);
5509 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5510 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5511
5512 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5513 IEM_MC_COMMIT_EFLAGS(EFlags);
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 return VINF_SUCCESS;
5517
5518 case IEMMODE_32BIT:
5519 IEM_MC_BEGIN(4, 2);
5520 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5521 IEM_MC_ARG(uint32_t, u32Src, 1);
5522 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5523 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5525
5526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5527 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5528 IEM_MC_ASSIGN(cShiftArg, cShift);
5529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5530 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5531 IEM_MC_FETCH_EFLAGS(EFlags);
5532 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5533 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5534
5535 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5536 IEM_MC_COMMIT_EFLAGS(EFlags);
5537 IEM_MC_ADVANCE_RIP();
5538 IEM_MC_END();
5539 return VINF_SUCCESS;
5540
5541 case IEMMODE_64BIT:
5542 IEM_MC_BEGIN(4, 2);
5543 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5544 IEM_MC_ARG(uint64_t, u64Src, 1);
5545 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5546 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5548
5549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5550 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5551 IEM_MC_ASSIGN(cShiftArg, cShift);
5552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5553 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5554 IEM_MC_FETCH_EFLAGS(EFlags);
5555 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5556 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5557
5558 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5559 IEM_MC_COMMIT_EFLAGS(EFlags);
5560 IEM_MC_ADVANCE_RIP();
5561 IEM_MC_END();
5562 return VINF_SUCCESS;
5563
5564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5565 }
5566 }
5567}
5568
5569
5570/**
5571 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5572 */
5573FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5574{
5575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5577
5578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5579 {
5580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5581
5582 switch (pVCpu->iem.s.enmEffOpSize)
5583 {
5584 case IEMMODE_16BIT:
5585 IEM_MC_BEGIN(4, 0);
5586 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5587 IEM_MC_ARG(uint16_t, u16Src, 1);
5588 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5589 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5590
5591 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5592 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5593 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5594 IEM_MC_REF_EFLAGS(pEFlags);
5595 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5596
5597 IEM_MC_ADVANCE_RIP();
5598 IEM_MC_END();
5599 return VINF_SUCCESS;
5600
5601 case IEMMODE_32BIT:
5602 IEM_MC_BEGIN(4, 0);
5603 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5604 IEM_MC_ARG(uint32_t, u32Src, 1);
5605 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5606 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5607
5608 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5609 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5610 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5611 IEM_MC_REF_EFLAGS(pEFlags);
5612 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5613
5614 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5615 IEM_MC_ADVANCE_RIP();
5616 IEM_MC_END();
5617 return VINF_SUCCESS;
5618
5619 case IEMMODE_64BIT:
5620 IEM_MC_BEGIN(4, 0);
5621 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5622 IEM_MC_ARG(uint64_t, u64Src, 1);
5623 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5624 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5625
5626 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5627 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5628 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5629 IEM_MC_REF_EFLAGS(pEFlags);
5630 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5631
5632 IEM_MC_ADVANCE_RIP();
5633 IEM_MC_END();
5634 return VINF_SUCCESS;
5635
5636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5637 }
5638 }
5639 else
5640 {
5641 switch (pVCpu->iem.s.enmEffOpSize)
5642 {
5643 case IEMMODE_16BIT:
5644 IEM_MC_BEGIN(4, 2);
5645 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5646 IEM_MC_ARG(uint16_t, u16Src, 1);
5647 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5648 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5650
5651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5653 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5654 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5655 IEM_MC_FETCH_EFLAGS(EFlags);
5656 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5657 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5658
5659 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5660 IEM_MC_COMMIT_EFLAGS(EFlags);
5661 IEM_MC_ADVANCE_RIP();
5662 IEM_MC_END();
5663 return VINF_SUCCESS;
5664
5665 case IEMMODE_32BIT:
5666 IEM_MC_BEGIN(4, 2);
5667 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5668 IEM_MC_ARG(uint32_t, u32Src, 1);
5669 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5670 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5672
5673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5675 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5676 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5677 IEM_MC_FETCH_EFLAGS(EFlags);
5678 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5679 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5680
5681 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5682 IEM_MC_COMMIT_EFLAGS(EFlags);
5683 IEM_MC_ADVANCE_RIP();
5684 IEM_MC_END();
5685 return VINF_SUCCESS;
5686
5687 case IEMMODE_64BIT:
5688 IEM_MC_BEGIN(4, 2);
5689 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5690 IEM_MC_ARG(uint64_t, u64Src, 1);
5691 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5692 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5694
5695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5697 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5698 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5699 IEM_MC_FETCH_EFLAGS(EFlags);
5700 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5701 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5702
5703 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5704 IEM_MC_COMMIT_EFLAGS(EFlags);
5705 IEM_MC_ADVANCE_RIP();
5706 IEM_MC_END();
5707 return VINF_SUCCESS;
5708
5709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5710 }
5711 }
5712}
5713
5714
5715
5716/** Opcode 0x0f 0xa4. */
5717FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5718{
5719 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5720 IEMOP_HLP_MIN_386();
5721 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5722}
5723
5724
5725/** Opcode 0x0f 0xa5. */
5726FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5727{
5728 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5729 IEMOP_HLP_MIN_386();
5730 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5731}
5732
5733
5734/** Opcode 0x0f 0xa8. */
5735FNIEMOP_DEF(iemOp_push_gs)
5736{
5737 IEMOP_MNEMONIC(push_gs, "push gs");
5738 IEMOP_HLP_MIN_386();
5739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5740 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5741}
5742
5743
5744/** Opcode 0x0f 0xa9. */
5745FNIEMOP_DEF(iemOp_pop_gs)
5746{
5747 IEMOP_MNEMONIC(pop_gs, "pop gs");
5748 IEMOP_HLP_MIN_386();
5749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5750 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5751}
5752
5753
5754/** Opcode 0x0f 0xaa. */
5755FNIEMOP_STUB(iemOp_rsm);
5756//IEMOP_HLP_MIN_386();
5757
5758
5759/** Opcode 0x0f 0xab. */
5760FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5761{
5762 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5763 IEMOP_HLP_MIN_386();
5764 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5765}
5766
5767
5768/** Opcode 0x0f 0xac. */
5769FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5770{
5771 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5772 IEMOP_HLP_MIN_386();
5773 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5774}
5775
5776
5777/** Opcode 0x0f 0xad. */
5778FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5779{
5780 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5781 IEMOP_HLP_MIN_386();
5782 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5783}
5784
5785
5786/** Opcode 0x0f 0xae mem/0. */
5787FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5788{
5789 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5790 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5791 return IEMOP_RAISE_INVALID_OPCODE();
5792
5793 IEM_MC_BEGIN(3, 1);
5794 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5795 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5796 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5799 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5800 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5801 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5802 IEM_MC_END();
5803 return VINF_SUCCESS;
5804}
5805
5806
5807/** Opcode 0x0f 0xae mem/1. */
5808FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5809{
5810 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5811 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5812 return IEMOP_RAISE_INVALID_OPCODE();
5813
5814 IEM_MC_BEGIN(3, 1);
5815 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5816 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5817 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5820 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5821 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5822 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5823 IEM_MC_END();
5824 return VINF_SUCCESS;
5825}
5826
5827
5828/**
5829 * @opmaps grp15
5830 * @opcode !11/2
5831 * @oppfx none
5832 * @opcpuid sse
5833 * @opgroup og_sse_mxcsrsm
5834 * @opxcpttype 5
5835 * @optest op1=0 -> mxcsr=0
5836 * @optest op1=0x2083 -> mxcsr=0x2083
5837 * @optest op1=0xfffffffe -> value.xcpt=0xd
5838 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5839 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5840 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5841 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5842 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5843 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5844 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5845 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5846 */
5847FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
5848{
5849 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5850 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5851 return IEMOP_RAISE_INVALID_OPCODE();
5852
5853 IEM_MC_BEGIN(2, 0);
5854 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5855 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5858 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5859 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5860 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5861 IEM_MC_END();
5862 return VINF_SUCCESS;
5863}
5864
5865
5866/**
5867 * @opmaps grp15
5868 * @opcode !11/3
5869 * @oppfx none
5870 * @opcpuid sse
5871 * @opgroup og_sse_mxcsrsm
5872 * @opxcpttype 5
5873 * @optest mxcsr=0 -> op1=0
5874 * @optest mxcsr=0x2083 -> op1=0x2083
5875 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5876 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5877 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5878 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
5879 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5880 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5881 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5882 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5883 */
5884FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
5885{
5886 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5887 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5888 return IEMOP_RAISE_INVALID_OPCODE();
5889
5890 IEM_MC_BEGIN(2, 0);
5891 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5892 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5895 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5896 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5897 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
5898 IEM_MC_END();
5899 return VINF_SUCCESS;
5900}
5901
5902
5903/**
5904 * @opmaps vexgrp15
5905 * @opcode !11/3
5906 * @oppfx none
5907 * @opcpuid avx
5908 * @opgroup og_avx_mxcsrsm
5909 * @opxcpttype 5
5910 * @optest mxcsr=0 -> op1=0
5911 * @optest mxcsr=0x2083 -> op1=0x2083
5912 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5913 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
5914 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5915 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5916 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
5917 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
5918 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
5919 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
5920 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5921 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
5922 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5923 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
5924 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5925 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
5926 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5927 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
5928 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
5929 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
5930 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
5931 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
5932 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
5933 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
5934 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
5935 * -> value.xcpt=0x6
5936 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
5937 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
5938 * APMv4 rev 3.17 page 509.
5939 * @todo Test this instruction on AMD Ryzen.
5940 */
5941FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
5942{
5943 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5944 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5945 return IEMOP_RAISE_INVALID_OPCODE();
5946
5947 IEM_MC_BEGIN(2, 0);
5948 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5949 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5951 IEMOP_HLP_DONE_VEX_DECODING_L_ZERO_NO_VVV();
5952 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5953 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5954 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
5955 IEM_MC_END();
5956 return VINF_SUCCESS;
5957}
5958
5959
5960/**
5961 * @opmaps vexgrp15
5962 * @opcode !11/4
5963 * @oppfx none
5964 * @opcpuid xsave
5965 * @opgroup og_system
5966 * @opxcpttype none
5967 */
5968FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
5969{
5970 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
5971 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5972 return IEMOP_RAISE_INVALID_OPCODE();
5973
5974 IEM_MC_BEGIN(3, 0);
5975 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5976 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5977 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5980 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5981 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5982 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
5983 IEM_MC_END();
5984 return VINF_SUCCESS;
5985}
5986
5987
5988/**
5989 * @opmaps vexgrp15
5990 * @opcode !11/5
5991 * @oppfx none
5992 * @opcpuid xsave
5993 * @opgroup og_system
5994 * @opxcpttype none
5995 */
5996FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
5997{
5998 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
5999 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6000 return IEMOP_RAISE_INVALID_OPCODE();
6001
6002 IEM_MC_BEGIN(3, 0);
6003 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6004 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6005 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6008 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6009 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6010 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6011 IEM_MC_END();
6012 return VINF_SUCCESS;
6013}
6014
6015/** Opcode 0x0f 0xae mem/6. */
6016FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6017
6018/**
6019 * @opmaps grp15
6020 * @opcode !11/7
6021 * @oppfx none
6022 * @opcpuid clfsh
6023 * @opgroup og_cachectl
6024 * @optest op1=1 ->
6025 */
6026FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6027{
6028 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6029 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6030 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6031
6032 IEM_MC_BEGIN(2, 0);
6033 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6034 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6037 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6038 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6039 IEM_MC_END();
6040 return VINF_SUCCESS;
6041}
6042
6043/**
6044 * @opmaps grp15
6045 * @opcode !11/7
6046 * @oppfx 0x66
6047 * @opcpuid clflushopt
6048 * @opgroup og_cachectl
6049 * @optest op1=1 ->
6050 */
6051FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6052{
6053 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6054 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6055 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6056
6057 IEM_MC_BEGIN(2, 0);
6058 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6059 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6062 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6063 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6064 IEM_MC_END();
6065 return VINF_SUCCESS;
6066}
6067
6068
6069/** Opcode 0x0f 0xae 11b/5. */
6070FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6071{
6072 RT_NOREF_PV(bRm);
6073 IEMOP_MNEMONIC(lfence, "lfence");
6074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6075 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6076 return IEMOP_RAISE_INVALID_OPCODE();
6077
6078 IEM_MC_BEGIN(0, 0);
6079 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6080 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6081 else
6082 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6083 IEM_MC_ADVANCE_RIP();
6084 IEM_MC_END();
6085 return VINF_SUCCESS;
6086}
6087
6088
6089/** Opcode 0x0f 0xae 11b/6. */
6090FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6091{
6092 RT_NOREF_PV(bRm);
6093 IEMOP_MNEMONIC(mfence, "mfence");
6094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6095 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6096 return IEMOP_RAISE_INVALID_OPCODE();
6097
6098 IEM_MC_BEGIN(0, 0);
6099 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6100 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6101 else
6102 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6103 IEM_MC_ADVANCE_RIP();
6104 IEM_MC_END();
6105 return VINF_SUCCESS;
6106}
6107
6108
6109/** Opcode 0x0f 0xae 11b/7. */
6110FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6111{
6112 RT_NOREF_PV(bRm);
6113 IEMOP_MNEMONIC(sfence, "sfence");
6114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6115 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6116 return IEMOP_RAISE_INVALID_OPCODE();
6117
6118 IEM_MC_BEGIN(0, 0);
6119 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6120 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6121 else
6122 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6123 IEM_MC_ADVANCE_RIP();
6124 IEM_MC_END();
6125 return VINF_SUCCESS;
6126}
6127
6128
6129/** Opcode 0xf3 0x0f 0xae 11b/0. */
6130FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6131
6132/** Opcode 0xf3 0x0f 0xae 11b/1. */
6133FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6134
6135/** Opcode 0xf3 0x0f 0xae 11b/2. */
6136FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6137
6138/** Opcode 0xf3 0x0f 0xae 11b/3. */
6139FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6140
6141
6142/**
6143 * Group 15 jump table for register variant.
6144 */
6145IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6146{ /* pfx: none, 066h, 0f3h, 0f2h */
6147 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6148 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6149 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6150 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6151 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6152 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6153 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6154 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6155};
6156AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6157
6158
6159/**
6160 * Group 15 jump table for memory variant.
6161 */
6162IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6163{ /* pfx: none, 066h, 0f3h, 0f2h */
6164 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6165 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6166 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6167 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6168 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6169 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6170 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6171 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6172};
6173AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6174
6175
6176/** Opcode 0x0f 0xae. */
6177FNIEMOP_DEF(iemOp_Grp15)
6178{
6179 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6181 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6182 /* register, register */
6183 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6184 + pVCpu->iem.s.idxPrefix], bRm);
6185 /* memory, register */
6186 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6187 + pVCpu->iem.s.idxPrefix], bRm);
6188}
6189
6190
6191/**
6192 * Vex group 15 jump table for register variant.
6193 * @todo work in progress
6194 */
6195IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15RegReg[] =
6196{ /* pfx: none, 066h, 0f3h, 0f2h */
6197 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6198 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6199 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6200 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6201 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6202 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6203 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6204 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6205};
6206AssertCompile(RT_ELEMENTS(g_apfnVexGroup15RegReg) == 8*4);
6207
6208
6209/**
6210 * Vex group 15 jump table for memory variant.
6211 * @todo work in progress
6212 */
6213IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
6214{ /* pfx: none, 066h, 0f3h, 0f2h */
6215 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6216 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6217 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6218 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6219 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6220 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6221 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6222 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6223};
6224AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
6225
6226
6227/** Opcode vex. 0xae. */
6228FNIEMOP_DEF(iemOp_VGrp15)
6229{
6230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6232 /* register, register */
6233 return FNIEMOP_CALL_1(g_apfnVexGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6234 + pVCpu->iem.s.idxPrefix], bRm);
6235 /* memory, register */
6236 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6237 + pVCpu->iem.s.idxPrefix], bRm);
6238}
6239
6240
6241/** Opcode 0x0f 0xaf. */
6242FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6243{
6244 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6245 IEMOP_HLP_MIN_386();
6246 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6247 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6248}
6249
6250
6251/** Opcode 0x0f 0xb0. */
6252FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6253{
6254 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6255 IEMOP_HLP_MIN_486();
6256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6257
6258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6259 {
6260 IEMOP_HLP_DONE_DECODING();
6261 IEM_MC_BEGIN(4, 0);
6262 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6263 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6264 IEM_MC_ARG(uint8_t, u8Src, 2);
6265 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6266
6267 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6268 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6269 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6270 IEM_MC_REF_EFLAGS(pEFlags);
6271 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6272 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6273 else
6274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6275
6276 IEM_MC_ADVANCE_RIP();
6277 IEM_MC_END();
6278 }
6279 else
6280 {
6281 IEM_MC_BEGIN(4, 3);
6282 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6283 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6284 IEM_MC_ARG(uint8_t, u8Src, 2);
6285 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6287 IEM_MC_LOCAL(uint8_t, u8Al);
6288
6289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6290 IEMOP_HLP_DONE_DECODING();
6291 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6292 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6293 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6294 IEM_MC_FETCH_EFLAGS(EFlags);
6295 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6296 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6297 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6298 else
6299 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6300
6301 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6302 IEM_MC_COMMIT_EFLAGS(EFlags);
6303 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6304 IEM_MC_ADVANCE_RIP();
6305 IEM_MC_END();
6306 }
6307 return VINF_SUCCESS;
6308}
6309
6310/** Opcode 0x0f 0xb1. */
6311FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6312{
6313 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6314 IEMOP_HLP_MIN_486();
6315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6316
6317 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6318 {
6319 IEMOP_HLP_DONE_DECODING();
6320 switch (pVCpu->iem.s.enmEffOpSize)
6321 {
6322 case IEMMODE_16BIT:
6323 IEM_MC_BEGIN(4, 0);
6324 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6325 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6326 IEM_MC_ARG(uint16_t, u16Src, 2);
6327 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6328
6329 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6330 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6331 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6332 IEM_MC_REF_EFLAGS(pEFlags);
6333 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6334 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6335 else
6336 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6337
6338 IEM_MC_ADVANCE_RIP();
6339 IEM_MC_END();
6340 return VINF_SUCCESS;
6341
6342 case IEMMODE_32BIT:
6343 IEM_MC_BEGIN(4, 0);
6344 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6345 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6346 IEM_MC_ARG(uint32_t, u32Src, 2);
6347 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6348
6349 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6350 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6351 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6352 IEM_MC_REF_EFLAGS(pEFlags);
6353 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6354 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6355 else
6356 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6357
6358 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6359 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6360 IEM_MC_ADVANCE_RIP();
6361 IEM_MC_END();
6362 return VINF_SUCCESS;
6363
6364 case IEMMODE_64BIT:
6365 IEM_MC_BEGIN(4, 0);
6366 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6367 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6368#ifdef RT_ARCH_X86
6369 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6370#else
6371 IEM_MC_ARG(uint64_t, u64Src, 2);
6372#endif
6373 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6374
6375 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6376 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6377 IEM_MC_REF_EFLAGS(pEFlags);
6378#ifdef RT_ARCH_X86
6379 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6380 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6381 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6382 else
6383 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6384#else
6385 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6386 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6387 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6388 else
6389 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6390#endif
6391
6392 IEM_MC_ADVANCE_RIP();
6393 IEM_MC_END();
6394 return VINF_SUCCESS;
6395
6396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6397 }
6398 }
6399 else
6400 {
6401 switch (pVCpu->iem.s.enmEffOpSize)
6402 {
6403 case IEMMODE_16BIT:
6404 IEM_MC_BEGIN(4, 3);
6405 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6406 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6407 IEM_MC_ARG(uint16_t, u16Src, 2);
6408 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6410 IEM_MC_LOCAL(uint16_t, u16Ax);
6411
6412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6413 IEMOP_HLP_DONE_DECODING();
6414 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6415 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6416 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6417 IEM_MC_FETCH_EFLAGS(EFlags);
6418 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6419 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6420 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6421 else
6422 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6423
6424 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6425 IEM_MC_COMMIT_EFLAGS(EFlags);
6426 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6427 IEM_MC_ADVANCE_RIP();
6428 IEM_MC_END();
6429 return VINF_SUCCESS;
6430
6431 case IEMMODE_32BIT:
6432 IEM_MC_BEGIN(4, 3);
6433 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6434 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6435 IEM_MC_ARG(uint32_t, u32Src, 2);
6436 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6438 IEM_MC_LOCAL(uint32_t, u32Eax);
6439
6440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6441 IEMOP_HLP_DONE_DECODING();
6442 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6443 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6444 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6445 IEM_MC_FETCH_EFLAGS(EFlags);
6446 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6447 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6448 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6449 else
6450 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6451
6452 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6453 IEM_MC_COMMIT_EFLAGS(EFlags);
6454 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6455 IEM_MC_ADVANCE_RIP();
6456 IEM_MC_END();
6457 return VINF_SUCCESS;
6458
6459 case IEMMODE_64BIT:
6460 IEM_MC_BEGIN(4, 3);
6461 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6462 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6463#ifdef RT_ARCH_X86
6464 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6465#else
6466 IEM_MC_ARG(uint64_t, u64Src, 2);
6467#endif
6468 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6470 IEM_MC_LOCAL(uint64_t, u64Rax);
6471
6472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6473 IEMOP_HLP_DONE_DECODING();
6474 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6475 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6476 IEM_MC_FETCH_EFLAGS(EFlags);
6477 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6478#ifdef RT_ARCH_X86
6479 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6480 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6481 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6482 else
6483 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6484#else
6485 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6486 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6487 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6488 else
6489 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6490#endif
6491
6492 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6493 IEM_MC_COMMIT_EFLAGS(EFlags);
6494 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6495 IEM_MC_ADVANCE_RIP();
6496 IEM_MC_END();
6497 return VINF_SUCCESS;
6498
6499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6500 }
6501 }
6502}
6503
6504
6505FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6506{
6507 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6508 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6509
6510 switch (pVCpu->iem.s.enmEffOpSize)
6511 {
6512 case IEMMODE_16BIT:
6513 IEM_MC_BEGIN(5, 1);
6514 IEM_MC_ARG(uint16_t, uSel, 0);
6515 IEM_MC_ARG(uint16_t, offSeg, 1);
6516 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6517 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6518 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6519 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6522 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6523 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6524 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6525 IEM_MC_END();
6526 return VINF_SUCCESS;
6527
6528 case IEMMODE_32BIT:
6529 IEM_MC_BEGIN(5, 1);
6530 IEM_MC_ARG(uint16_t, uSel, 0);
6531 IEM_MC_ARG(uint32_t, offSeg, 1);
6532 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6533 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6534 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6535 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6538 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6539 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6540 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6541 IEM_MC_END();
6542 return VINF_SUCCESS;
6543
6544 case IEMMODE_64BIT:
6545 IEM_MC_BEGIN(5, 1);
6546 IEM_MC_ARG(uint16_t, uSel, 0);
6547 IEM_MC_ARG(uint64_t, offSeg, 1);
6548 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6549 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6550 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6551 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6554 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6555 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6556 else
6557 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6558 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6559 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6560 IEM_MC_END();
6561 return VINF_SUCCESS;
6562
6563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6564 }
6565}
6566
6567
6568/** Opcode 0x0f 0xb2. */
6569FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6570{
6571 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6572 IEMOP_HLP_MIN_386();
6573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6574 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6575 return IEMOP_RAISE_INVALID_OPCODE();
6576 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6577}
6578
6579
6580/** Opcode 0x0f 0xb3. */
6581FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6582{
6583 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6584 IEMOP_HLP_MIN_386();
6585 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6586}
6587
6588
6589/** Opcode 0x0f 0xb4. */
6590FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6591{
6592 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6593 IEMOP_HLP_MIN_386();
6594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6596 return IEMOP_RAISE_INVALID_OPCODE();
6597 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6598}
6599
6600
6601/** Opcode 0x0f 0xb5. */
6602FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6603{
6604 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6605 IEMOP_HLP_MIN_386();
6606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6607 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6608 return IEMOP_RAISE_INVALID_OPCODE();
6609 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6610}
6611
6612
6613/** Opcode 0x0f 0xb6. */
6614FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6615{
6616 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6617 IEMOP_HLP_MIN_386();
6618
6619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6620
6621 /*
6622 * If rm is denoting a register, no more instruction bytes.
6623 */
6624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6625 {
6626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6627 switch (pVCpu->iem.s.enmEffOpSize)
6628 {
6629 case IEMMODE_16BIT:
6630 IEM_MC_BEGIN(0, 1);
6631 IEM_MC_LOCAL(uint16_t, u16Value);
6632 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6633 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6634 IEM_MC_ADVANCE_RIP();
6635 IEM_MC_END();
6636 return VINF_SUCCESS;
6637
6638 case IEMMODE_32BIT:
6639 IEM_MC_BEGIN(0, 1);
6640 IEM_MC_LOCAL(uint32_t, u32Value);
6641 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6642 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6643 IEM_MC_ADVANCE_RIP();
6644 IEM_MC_END();
6645 return VINF_SUCCESS;
6646
6647 case IEMMODE_64BIT:
6648 IEM_MC_BEGIN(0, 1);
6649 IEM_MC_LOCAL(uint64_t, u64Value);
6650 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6651 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6652 IEM_MC_ADVANCE_RIP();
6653 IEM_MC_END();
6654 return VINF_SUCCESS;
6655
6656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6657 }
6658 }
6659 else
6660 {
6661 /*
6662 * We're loading a register from memory.
6663 */
6664 switch (pVCpu->iem.s.enmEffOpSize)
6665 {
6666 case IEMMODE_16BIT:
6667 IEM_MC_BEGIN(0, 2);
6668 IEM_MC_LOCAL(uint16_t, u16Value);
6669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6672 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6673 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6674 IEM_MC_ADVANCE_RIP();
6675 IEM_MC_END();
6676 return VINF_SUCCESS;
6677
6678 case IEMMODE_32BIT:
6679 IEM_MC_BEGIN(0, 2);
6680 IEM_MC_LOCAL(uint32_t, u32Value);
6681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6684 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6685 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6686 IEM_MC_ADVANCE_RIP();
6687 IEM_MC_END();
6688 return VINF_SUCCESS;
6689
6690 case IEMMODE_64BIT:
6691 IEM_MC_BEGIN(0, 2);
6692 IEM_MC_LOCAL(uint64_t, u64Value);
6693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6696 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6697 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6698 IEM_MC_ADVANCE_RIP();
6699 IEM_MC_END();
6700 return VINF_SUCCESS;
6701
6702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6703 }
6704 }
6705}
6706
6707
6708/** Opcode 0x0f 0xb7. */
6709FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6710{
6711 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6712 IEMOP_HLP_MIN_386();
6713
6714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6715
6716 /** @todo Not entirely sure how the operand size prefix is handled here,
6717 * assuming that it will be ignored. Would be nice to have a few
6718 * test for this. */
6719 /*
6720 * If rm is denoting a register, no more instruction bytes.
6721 */
6722 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6723 {
6724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6725 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6726 {
6727 IEM_MC_BEGIN(0, 1);
6728 IEM_MC_LOCAL(uint32_t, u32Value);
6729 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6730 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6731 IEM_MC_ADVANCE_RIP();
6732 IEM_MC_END();
6733 }
6734 else
6735 {
6736 IEM_MC_BEGIN(0, 1);
6737 IEM_MC_LOCAL(uint64_t, u64Value);
6738 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6739 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6740 IEM_MC_ADVANCE_RIP();
6741 IEM_MC_END();
6742 }
6743 }
6744 else
6745 {
6746 /*
6747 * We're loading a register from memory.
6748 */
6749 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6750 {
6751 IEM_MC_BEGIN(0, 2);
6752 IEM_MC_LOCAL(uint32_t, u32Value);
6753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6756 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6757 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6758 IEM_MC_ADVANCE_RIP();
6759 IEM_MC_END();
6760 }
6761 else
6762 {
6763 IEM_MC_BEGIN(0, 2);
6764 IEM_MC_LOCAL(uint64_t, u64Value);
6765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6768 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6769 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6770 IEM_MC_ADVANCE_RIP();
6771 IEM_MC_END();
6772 }
6773 }
6774 return VINF_SUCCESS;
6775}
6776
6777
6778/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6779FNIEMOP_UD_STUB(iemOp_jmpe);
6780/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6781FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6782
6783
6784/**
6785 * @opcode 0xb9
6786 * @opinvalid intel-modrm
6787 * @optest ->
6788 */
6789FNIEMOP_DEF(iemOp_Grp10)
6790{
6791 /*
6792 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6793 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6794 */
6795 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6796 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6797 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6798}
6799
6800
6801/** Opcode 0x0f 0xba. */
6802FNIEMOP_DEF(iemOp_Grp8)
6803{
6804 IEMOP_HLP_MIN_386();
6805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6806 PCIEMOPBINSIZES pImpl;
6807 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6808 {
6809 case 0: case 1: case 2: case 3:
6810 /* Both AMD and Intel want full modr/m decoding and imm8. */
6811 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6812 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6813 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6814 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6815 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6817 }
6818 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6819
6820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6821 {
6822 /* register destination. */
6823 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6825
6826 switch (pVCpu->iem.s.enmEffOpSize)
6827 {
6828 case IEMMODE_16BIT:
6829 IEM_MC_BEGIN(3, 0);
6830 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6831 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6832 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6833
6834 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6835 IEM_MC_REF_EFLAGS(pEFlags);
6836 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6837
6838 IEM_MC_ADVANCE_RIP();
6839 IEM_MC_END();
6840 return VINF_SUCCESS;
6841
6842 case IEMMODE_32BIT:
6843 IEM_MC_BEGIN(3, 0);
6844 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6845 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6846 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6847
6848 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6849 IEM_MC_REF_EFLAGS(pEFlags);
6850 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6851
6852 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6853 IEM_MC_ADVANCE_RIP();
6854 IEM_MC_END();
6855 return VINF_SUCCESS;
6856
6857 case IEMMODE_64BIT:
6858 IEM_MC_BEGIN(3, 0);
6859 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6860 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6861 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6862
6863 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6864 IEM_MC_REF_EFLAGS(pEFlags);
6865 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6866
6867 IEM_MC_ADVANCE_RIP();
6868 IEM_MC_END();
6869 return VINF_SUCCESS;
6870
6871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6872 }
6873 }
6874 else
6875 {
6876 /* memory destination. */
6877
6878 uint32_t fAccess;
6879 if (pImpl->pfnLockedU16)
6880 fAccess = IEM_ACCESS_DATA_RW;
6881 else /* BT */
6882 fAccess = IEM_ACCESS_DATA_R;
6883
6884 /** @todo test negative bit offsets! */
6885 switch (pVCpu->iem.s.enmEffOpSize)
6886 {
6887 case IEMMODE_16BIT:
6888 IEM_MC_BEGIN(3, 1);
6889 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6890 IEM_MC_ARG(uint16_t, u16Src, 1);
6891 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6893
6894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6895 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6896 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6897 if (pImpl->pfnLockedU16)
6898 IEMOP_HLP_DONE_DECODING();
6899 else
6900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6901 IEM_MC_FETCH_EFLAGS(EFlags);
6902 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6903 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6904 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6905 else
6906 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6907 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6908
6909 IEM_MC_COMMIT_EFLAGS(EFlags);
6910 IEM_MC_ADVANCE_RIP();
6911 IEM_MC_END();
6912 return VINF_SUCCESS;
6913
6914 case IEMMODE_32BIT:
6915 IEM_MC_BEGIN(3, 1);
6916 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6917 IEM_MC_ARG(uint32_t, u32Src, 1);
6918 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6920
6921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6922 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6923 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6924 if (pImpl->pfnLockedU16)
6925 IEMOP_HLP_DONE_DECODING();
6926 else
6927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6928 IEM_MC_FETCH_EFLAGS(EFlags);
6929 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6930 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6931 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6932 else
6933 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6934 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6935
6936 IEM_MC_COMMIT_EFLAGS(EFlags);
6937 IEM_MC_ADVANCE_RIP();
6938 IEM_MC_END();
6939 return VINF_SUCCESS;
6940
6941 case IEMMODE_64BIT:
6942 IEM_MC_BEGIN(3, 1);
6943 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6944 IEM_MC_ARG(uint64_t, u64Src, 1);
6945 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6947
6948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6949 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6950 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6951 if (pImpl->pfnLockedU16)
6952 IEMOP_HLP_DONE_DECODING();
6953 else
6954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6955 IEM_MC_FETCH_EFLAGS(EFlags);
6956 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6957 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6959 else
6960 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6961 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6962
6963 IEM_MC_COMMIT_EFLAGS(EFlags);
6964 IEM_MC_ADVANCE_RIP();
6965 IEM_MC_END();
6966 return VINF_SUCCESS;
6967
6968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6969 }
6970 }
6971}
6972
6973
6974/** Opcode 0x0f 0xbb. */
6975FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6976{
6977 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6978 IEMOP_HLP_MIN_386();
6979 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6980}
6981
6982
6983/** Opcode 0x0f 0xbc. */
6984FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6985{
6986 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6987 IEMOP_HLP_MIN_386();
6988 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6989 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6990}
6991
6992
6993/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6994FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6995
6996
6997/** Opcode 0x0f 0xbd. */
6998FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6999{
7000 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7001 IEMOP_HLP_MIN_386();
7002 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7003 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7004}
7005
7006
7007/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7008FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7009
7010
7011/** Opcode 0x0f 0xbe. */
7012FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7013{
7014 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7015 IEMOP_HLP_MIN_386();
7016
7017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7018
7019 /*
7020 * If rm is denoting a register, no more instruction bytes.
7021 */
7022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7023 {
7024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7025 switch (pVCpu->iem.s.enmEffOpSize)
7026 {
7027 case IEMMODE_16BIT:
7028 IEM_MC_BEGIN(0, 1);
7029 IEM_MC_LOCAL(uint16_t, u16Value);
7030 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7031 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7032 IEM_MC_ADVANCE_RIP();
7033 IEM_MC_END();
7034 return VINF_SUCCESS;
7035
7036 case IEMMODE_32BIT:
7037 IEM_MC_BEGIN(0, 1);
7038 IEM_MC_LOCAL(uint32_t, u32Value);
7039 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7040 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7041 IEM_MC_ADVANCE_RIP();
7042 IEM_MC_END();
7043 return VINF_SUCCESS;
7044
7045 case IEMMODE_64BIT:
7046 IEM_MC_BEGIN(0, 1);
7047 IEM_MC_LOCAL(uint64_t, u64Value);
7048 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7049 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7050 IEM_MC_ADVANCE_RIP();
7051 IEM_MC_END();
7052 return VINF_SUCCESS;
7053
7054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7055 }
7056 }
7057 else
7058 {
7059 /*
7060 * We're loading a register from memory.
7061 */
7062 switch (pVCpu->iem.s.enmEffOpSize)
7063 {
7064 case IEMMODE_16BIT:
7065 IEM_MC_BEGIN(0, 2);
7066 IEM_MC_LOCAL(uint16_t, u16Value);
7067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7070 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7071 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7072 IEM_MC_ADVANCE_RIP();
7073 IEM_MC_END();
7074 return VINF_SUCCESS;
7075
7076 case IEMMODE_32BIT:
7077 IEM_MC_BEGIN(0, 2);
7078 IEM_MC_LOCAL(uint32_t, u32Value);
7079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7082 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7083 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7084 IEM_MC_ADVANCE_RIP();
7085 IEM_MC_END();
7086 return VINF_SUCCESS;
7087
7088 case IEMMODE_64BIT:
7089 IEM_MC_BEGIN(0, 2);
7090 IEM_MC_LOCAL(uint64_t, u64Value);
7091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7094 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7095 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7096 IEM_MC_ADVANCE_RIP();
7097 IEM_MC_END();
7098 return VINF_SUCCESS;
7099
7100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7101 }
7102 }
7103}
7104
7105
7106/** Opcode 0x0f 0xbf. */
7107FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7108{
7109 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7110 IEMOP_HLP_MIN_386();
7111
7112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7113
7114 /** @todo Not entirely sure how the operand size prefix is handled here,
7115 * assuming that it will be ignored. Would be nice to have a few
7116 * test for this. */
7117 /*
7118 * If rm is denoting a register, no more instruction bytes.
7119 */
7120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7121 {
7122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7123 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7124 {
7125 IEM_MC_BEGIN(0, 1);
7126 IEM_MC_LOCAL(uint32_t, u32Value);
7127 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7128 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7129 IEM_MC_ADVANCE_RIP();
7130 IEM_MC_END();
7131 }
7132 else
7133 {
7134 IEM_MC_BEGIN(0, 1);
7135 IEM_MC_LOCAL(uint64_t, u64Value);
7136 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7137 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7138 IEM_MC_ADVANCE_RIP();
7139 IEM_MC_END();
7140 }
7141 }
7142 else
7143 {
7144 /*
7145 * We're loading a register from memory.
7146 */
7147 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7148 {
7149 IEM_MC_BEGIN(0, 2);
7150 IEM_MC_LOCAL(uint32_t, u32Value);
7151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7154 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7155 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7156 IEM_MC_ADVANCE_RIP();
7157 IEM_MC_END();
7158 }
7159 else
7160 {
7161 IEM_MC_BEGIN(0, 2);
7162 IEM_MC_LOCAL(uint64_t, u64Value);
7163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7166 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7167 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7168 IEM_MC_ADVANCE_RIP();
7169 IEM_MC_END();
7170 }
7171 }
7172 return VINF_SUCCESS;
7173}
7174
7175
7176/** Opcode 0x0f 0xc0. */
7177FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7178{
7179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7180 IEMOP_HLP_MIN_486();
7181 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7182
7183 /*
7184 * If rm is denoting a register, no more instruction bytes.
7185 */
7186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7187 {
7188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7189
7190 IEM_MC_BEGIN(3, 0);
7191 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7192 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7194
7195 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7196 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7197 IEM_MC_REF_EFLAGS(pEFlags);
7198 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7199
7200 IEM_MC_ADVANCE_RIP();
7201 IEM_MC_END();
7202 }
7203 else
7204 {
7205 /*
7206 * We're accessing memory.
7207 */
7208 IEM_MC_BEGIN(3, 3);
7209 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7210 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7211 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7212 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7214
7215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7216 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7217 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7218 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7219 IEM_MC_FETCH_EFLAGS(EFlags);
7220 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7221 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7222 else
7223 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7224
7225 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7226 IEM_MC_COMMIT_EFLAGS(EFlags);
7227 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7228 IEM_MC_ADVANCE_RIP();
7229 IEM_MC_END();
7230 return VINF_SUCCESS;
7231 }
7232 return VINF_SUCCESS;
7233}
7234
7235
7236/** Opcode 0x0f 0xc1. */
7237FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7238{
7239 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7240 IEMOP_HLP_MIN_486();
7241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7242
7243 /*
7244 * If rm is denoting a register, no more instruction bytes.
7245 */
7246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7247 {
7248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7249
7250 switch (pVCpu->iem.s.enmEffOpSize)
7251 {
7252 case IEMMODE_16BIT:
7253 IEM_MC_BEGIN(3, 0);
7254 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7255 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7256 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7257
7258 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7259 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7260 IEM_MC_REF_EFLAGS(pEFlags);
7261 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7262
7263 IEM_MC_ADVANCE_RIP();
7264 IEM_MC_END();
7265 return VINF_SUCCESS;
7266
7267 case IEMMODE_32BIT:
7268 IEM_MC_BEGIN(3, 0);
7269 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7270 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7272
7273 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7274 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7275 IEM_MC_REF_EFLAGS(pEFlags);
7276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7277
7278 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7279 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7280 IEM_MC_ADVANCE_RIP();
7281 IEM_MC_END();
7282 return VINF_SUCCESS;
7283
7284 case IEMMODE_64BIT:
7285 IEM_MC_BEGIN(3, 0);
7286 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7287 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7288 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7289
7290 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7291 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7292 IEM_MC_REF_EFLAGS(pEFlags);
7293 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7294
7295 IEM_MC_ADVANCE_RIP();
7296 IEM_MC_END();
7297 return VINF_SUCCESS;
7298
7299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7300 }
7301 }
7302 else
7303 {
7304 /*
7305 * We're accessing memory.
7306 */
7307 switch (pVCpu->iem.s.enmEffOpSize)
7308 {
7309 case IEMMODE_16BIT:
7310 IEM_MC_BEGIN(3, 3);
7311 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7312 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7313 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7314 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7316
7317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7318 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7319 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7320 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7321 IEM_MC_FETCH_EFLAGS(EFlags);
7322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7323 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7324 else
7325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7326
7327 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7328 IEM_MC_COMMIT_EFLAGS(EFlags);
7329 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7330 IEM_MC_ADVANCE_RIP();
7331 IEM_MC_END();
7332 return VINF_SUCCESS;
7333
7334 case IEMMODE_32BIT:
7335 IEM_MC_BEGIN(3, 3);
7336 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7337 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7338 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7339 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7341
7342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7343 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7344 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7345 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7346 IEM_MC_FETCH_EFLAGS(EFlags);
7347 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7348 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7349 else
7350 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7351
7352 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7353 IEM_MC_COMMIT_EFLAGS(EFlags);
7354 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7355 IEM_MC_ADVANCE_RIP();
7356 IEM_MC_END();
7357 return VINF_SUCCESS;
7358
7359 case IEMMODE_64BIT:
7360 IEM_MC_BEGIN(3, 3);
7361 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7362 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7363 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7364 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7366
7367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7368 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7369 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7370 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7371 IEM_MC_FETCH_EFLAGS(EFlags);
7372 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7373 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7374 else
7375 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7376
7377 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7378 IEM_MC_COMMIT_EFLAGS(EFlags);
7379 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7380 IEM_MC_ADVANCE_RIP();
7381 IEM_MC_END();
7382 return VINF_SUCCESS;
7383
7384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7385 }
7386 }
7387}
7388
7389
7390/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7391FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7392/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7393FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7394/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7395FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7396/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7397FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7398
7399
7400/** Opcode 0x0f 0xc3. */
7401FNIEMOP_DEF(iemOp_movnti_My_Gy)
7402{
7403 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7404
7405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7406
7407 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7408 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7409 {
7410 switch (pVCpu->iem.s.enmEffOpSize)
7411 {
7412 case IEMMODE_32BIT:
7413 IEM_MC_BEGIN(0, 2);
7414 IEM_MC_LOCAL(uint32_t, u32Value);
7415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7416
7417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7419 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7420 return IEMOP_RAISE_INVALID_OPCODE();
7421
7422 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7423 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7424 IEM_MC_ADVANCE_RIP();
7425 IEM_MC_END();
7426 break;
7427
7428 case IEMMODE_64BIT:
7429 IEM_MC_BEGIN(0, 2);
7430 IEM_MC_LOCAL(uint64_t, u64Value);
7431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7432
7433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7435 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7436 return IEMOP_RAISE_INVALID_OPCODE();
7437
7438 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7439 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7440 IEM_MC_ADVANCE_RIP();
7441 IEM_MC_END();
7442 break;
7443
7444 case IEMMODE_16BIT:
7445 /** @todo check this form. */
7446 return IEMOP_RAISE_INVALID_OPCODE();
7447 }
7448 }
7449 else
7450 return IEMOP_RAISE_INVALID_OPCODE();
7451 return VINF_SUCCESS;
7452}
7453/* Opcode 0x66 0x0f 0xc3 - invalid */
7454/* Opcode 0xf3 0x0f 0xc3 - invalid */
7455/* Opcode 0xf2 0x0f 0xc3 - invalid */
7456
7457/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7458FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7459/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7460FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7461/* Opcode 0xf3 0x0f 0xc4 - invalid */
7462/* Opcode 0xf2 0x0f 0xc4 - invalid */
7463
7464/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7465FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7466/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7467FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7468/* Opcode 0xf3 0x0f 0xc5 - invalid */
7469/* Opcode 0xf2 0x0f 0xc5 - invalid */
7470
7471/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7472FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7473/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7474FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7475/* Opcode 0xf3 0x0f 0xc6 - invalid */
7476/* Opcode 0xf2 0x0f 0xc6 - invalid */
7477
7478
7479/** Opcode 0x0f 0xc7 !11/1. */
7480FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7481{
7482 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7483
7484 IEM_MC_BEGIN(4, 3);
7485 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7486 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7487 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7488 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7489 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7490 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7492
7493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7494 IEMOP_HLP_DONE_DECODING();
7495 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7496
7497 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7498 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7499 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7500
7501 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7502 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7503 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7504
7505 IEM_MC_FETCH_EFLAGS(EFlags);
7506 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7507 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7508 else
7509 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7510
7511 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7512 IEM_MC_COMMIT_EFLAGS(EFlags);
7513 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7514 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7515 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7516 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7517 IEM_MC_ENDIF();
7518 IEM_MC_ADVANCE_RIP();
7519
7520 IEM_MC_END();
7521 return VINF_SUCCESS;
7522}
7523
7524
7525/** Opcode REX.W 0x0f 0xc7 !11/1. */
7526FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7527{
7528 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7529 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7530 {
7531#if 0
7532 RT_NOREF(bRm);
7533 IEMOP_BITCH_ABOUT_STUB();
7534 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7535#else
7536 IEM_MC_BEGIN(4, 3);
7537 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7538 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7539 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7540 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7541 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7542 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7544
7545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7546 IEMOP_HLP_DONE_DECODING();
7547 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7548 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7549
7550 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7551 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7552 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7553
7554 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7555 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7556 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7557
7558 IEM_MC_FETCH_EFLAGS(EFlags);
7559# ifdef RT_ARCH_AMD64
7560 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7561 {
7562 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7563 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7564 else
7565 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7566 }
7567 else
7568# endif
7569 {
7570 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7571 accesses and not all all atomic, which works fine on in UNI CPU guest
7572 configuration (ignoring DMA). If guest SMP is active we have no choice
7573 but to use a rendezvous callback here. Sigh. */
7574 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7575 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7576 else
7577 {
7578 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7579 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7580 }
7581 }
7582
7583 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7584 IEM_MC_COMMIT_EFLAGS(EFlags);
7585 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7586 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7587 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7588 IEM_MC_ENDIF();
7589 IEM_MC_ADVANCE_RIP();
7590
7591 IEM_MC_END();
7592 return VINF_SUCCESS;
7593#endif
7594 }
7595 Log(("cmpxchg16b -> #UD\n"));
7596 return IEMOP_RAISE_INVALID_OPCODE();
7597}
7598
7599FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7600{
7601 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7602 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7603 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7604}
7605
7606/** Opcode 0x0f 0xc7 11/6. */
7607FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7608
7609/** Opcode 0x0f 0xc7 !11/6. */
7610FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7611
7612/** Opcode 0x66 0x0f 0xc7 !11/6. */
7613FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7614
7615/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7616FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7617
7618/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7619FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7620
7621/** Opcode 0x0f 0xc7 11/7. */
7622FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7623
7624
7625/**
7626 * Group 9 jump table for register variant.
7627 */
7628IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7629{ /* pfx: none, 066h, 0f3h, 0f2h */
7630 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7631 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7632 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7633 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7634 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7635 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7636 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7637 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7638};
7639AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7640
7641
7642/**
7643 * Group 9 jump table for memory variant.
7644 */
7645IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7646{ /* pfx: none, 066h, 0f3h, 0f2h */
7647 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7648 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7649 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7650 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7651 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7652 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7653 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7654 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7655};
7656AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7657
7658
7659/** Opcode 0x0f 0xc7. */
7660FNIEMOP_DEF(iemOp_Grp9)
7661{
7662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7664 /* register, register */
7665 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7666 + pVCpu->iem.s.idxPrefix], bRm);
7667 /* memory, register */
7668 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7669 + pVCpu->iem.s.idxPrefix], bRm);
7670}
7671
7672
7673/**
7674 * Common 'bswap register' helper.
7675 */
7676FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7677{
7678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7679 switch (pVCpu->iem.s.enmEffOpSize)
7680 {
7681 case IEMMODE_16BIT:
7682 IEM_MC_BEGIN(1, 0);
7683 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7684 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7685 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7686 IEM_MC_ADVANCE_RIP();
7687 IEM_MC_END();
7688 return VINF_SUCCESS;
7689
7690 case IEMMODE_32BIT:
7691 IEM_MC_BEGIN(1, 0);
7692 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7693 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7694 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7695 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7696 IEM_MC_ADVANCE_RIP();
7697 IEM_MC_END();
7698 return VINF_SUCCESS;
7699
7700 case IEMMODE_64BIT:
7701 IEM_MC_BEGIN(1, 0);
7702 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7703 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7704 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7705 IEM_MC_ADVANCE_RIP();
7706 IEM_MC_END();
7707 return VINF_SUCCESS;
7708
7709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7710 }
7711}
7712
7713
7714/** Opcode 0x0f 0xc8. */
7715FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7716{
7717 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7718 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7719 prefix. REX.B is the correct prefix it appears. For a parallel
7720 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7721 IEMOP_HLP_MIN_486();
7722 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7723}
7724
7725
7726/** Opcode 0x0f 0xc9. */
7727FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7728{
7729 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7730 IEMOP_HLP_MIN_486();
7731 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7732}
7733
7734
7735/** Opcode 0x0f 0xca. */
7736FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7737{
7738 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7739 IEMOP_HLP_MIN_486();
7740 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7741}
7742
7743
7744/** Opcode 0x0f 0xcb. */
7745FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7746{
7747 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7748 IEMOP_HLP_MIN_486();
7749 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7750}
7751
7752
7753/** Opcode 0x0f 0xcc. */
7754FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7755{
7756 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7757 IEMOP_HLP_MIN_486();
7758 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7759}
7760
7761
7762/** Opcode 0x0f 0xcd. */
7763FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7764{
7765 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7766 IEMOP_HLP_MIN_486();
7767 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7768}
7769
7770
7771/** Opcode 0x0f 0xce. */
7772FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7773{
7774 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7775 IEMOP_HLP_MIN_486();
7776 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7777}
7778
7779
7780/** Opcode 0x0f 0xcf. */
7781FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7782{
7783 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7784 IEMOP_HLP_MIN_486();
7785 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7786}
7787
7788
7789/* Opcode 0x0f 0xd0 - invalid */
7790/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7791FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7792/* Opcode 0xf3 0x0f 0xd0 - invalid */
7793/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7794FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7795
7796/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7797FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7798/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7799FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7800/* Opcode 0xf3 0x0f 0xd1 - invalid */
7801/* Opcode 0xf2 0x0f 0xd1 - invalid */
7802
7803/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7804FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7805/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7806FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7807/* Opcode 0xf3 0x0f 0xd2 - invalid */
7808/* Opcode 0xf2 0x0f 0xd2 - invalid */
7809
7810/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7811FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7812/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7813FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7814/* Opcode 0xf3 0x0f 0xd3 - invalid */
7815/* Opcode 0xf2 0x0f 0xd3 - invalid */
7816
7817/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7818FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7819/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7820FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7821/* Opcode 0xf3 0x0f 0xd4 - invalid */
7822/* Opcode 0xf2 0x0f 0xd4 - invalid */
7823
7824/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7825FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7826/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7827FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7828/* Opcode 0xf3 0x0f 0xd5 - invalid */
7829/* Opcode 0xf2 0x0f 0xd5 - invalid */
7830
7831/* Opcode 0x0f 0xd6 - invalid */
7832
7833/**
7834 * @opcode 0xd6
7835 * @oppfx 0x66
7836 * @opcpuid sse2
7837 * @opgroup og_sse2_pcksclr_datamove
7838 * @opxcpttype none
7839 * @optest op1=-1 op2=2 -> op1=2
7840 * @optest op1=0 op2=-42 -> op1=-42
7841 */
7842FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
7843{
7844 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7847 {
7848 /*
7849 * Register, register.
7850 */
7851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7852 IEM_MC_BEGIN(0, 2);
7853 IEM_MC_LOCAL(uint64_t, uSrc);
7854
7855 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7856 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7857
7858 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7859 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7860
7861 IEM_MC_ADVANCE_RIP();
7862 IEM_MC_END();
7863 }
7864 else
7865 {
7866 /*
7867 * Memory, register.
7868 */
7869 IEM_MC_BEGIN(0, 2);
7870 IEM_MC_LOCAL(uint64_t, uSrc);
7871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7872
7873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7875 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7876 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7877
7878 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7879 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7880
7881 IEM_MC_ADVANCE_RIP();
7882 IEM_MC_END();
7883 }
7884 return VINF_SUCCESS;
7885}
7886
7887
7888/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7889FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7890/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7891FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7892#if 0
7893FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7894{
7895 /* Docs says register only. */
7896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7897
7898 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7899 {
7900 case IEM_OP_PRF_SIZE_OP: /* SSE */
7901 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
7902 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7903 IEM_MC_BEGIN(2, 0);
7904 IEM_MC_ARG(uint64_t *, pDst, 0);
7905 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7906 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7907 IEM_MC_PREPARE_SSE_USAGE();
7908 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7909 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7910 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7911 IEM_MC_ADVANCE_RIP();
7912 IEM_MC_END();
7913 return VINF_SUCCESS;
7914
7915 case 0: /* MMX */
7916 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7917 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7918 IEM_MC_BEGIN(2, 0);
7919 IEM_MC_ARG(uint64_t *, pDst, 0);
7920 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7921 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7922 IEM_MC_PREPARE_FPU_USAGE();
7923 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7924 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7925 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7926 IEM_MC_ADVANCE_RIP();
7927 IEM_MC_END();
7928 return VINF_SUCCESS;
7929
7930 default:
7931 return IEMOP_RAISE_INVALID_OPCODE();
7932 }
7933}
7934#endif
7935
7936
7937/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7938FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7939{
7940 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7941 /** @todo testcase: Check that the instruction implicitly clears the high
7942 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7943 * and opcode modifications are made to work with the whole width (not
7944 * just 128). */
7945 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7946 /* Docs says register only. */
7947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7949 {
7950 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7951 IEM_MC_BEGIN(2, 0);
7952 IEM_MC_ARG(uint64_t *, pDst, 0);
7953 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7954 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7955 IEM_MC_PREPARE_FPU_USAGE();
7956 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7957 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7958 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 return VINF_SUCCESS;
7962 }
7963 return IEMOP_RAISE_INVALID_OPCODE();
7964}
7965
7966/** Opcode 0x66 0x0f 0xd7 - */
7967FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7968{
7969 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7970 /** @todo testcase: Check that the instruction implicitly clears the high
7971 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7972 * and opcode modifications are made to work with the whole width (not
7973 * just 128). */
7974 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7975 /* Docs says register only. */
7976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7978 {
7979 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7980 IEM_MC_BEGIN(2, 0);
7981 IEM_MC_ARG(uint64_t *, pDst, 0);
7982 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7983 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7984 IEM_MC_PREPARE_SSE_USAGE();
7985 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7986 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7987 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7988 IEM_MC_ADVANCE_RIP();
7989 IEM_MC_END();
7990 return VINF_SUCCESS;
7991 }
7992 return IEMOP_RAISE_INVALID_OPCODE();
7993}
7994
7995/* Opcode 0xf3 0x0f 0xd7 - invalid */
7996/* Opcode 0xf2 0x0f 0xd7 - invalid */
7997
7998
7999/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8000FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8001/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
8002FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
8003/* Opcode 0xf3 0x0f 0xd8 - invalid */
8004/* Opcode 0xf2 0x0f 0xd8 - invalid */
8005
8006/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8007FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8008/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
8009FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
8010/* Opcode 0xf3 0x0f 0xd9 - invalid */
8011/* Opcode 0xf2 0x0f 0xd9 - invalid */
8012
8013/** Opcode 0x0f 0xda - pminub Pq, Qq */
8014FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8015/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
8016FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
8017/* Opcode 0xf3 0x0f 0xda - invalid */
8018/* Opcode 0xf2 0x0f 0xda - invalid */
8019
8020/** Opcode 0x0f 0xdb - pand Pq, Qq */
8021FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8022/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
8023FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
8024/* Opcode 0xf3 0x0f 0xdb - invalid */
8025/* Opcode 0xf2 0x0f 0xdb - invalid */
8026
8027/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8028FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8029/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
8030FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
8031/* Opcode 0xf3 0x0f 0xdc - invalid */
8032/* Opcode 0xf2 0x0f 0xdc - invalid */
8033
8034/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8035FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8036/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
8037FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
8038/* Opcode 0xf3 0x0f 0xdd - invalid */
8039/* Opcode 0xf2 0x0f 0xdd - invalid */
8040
8041/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8042FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8043/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
8044FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
8045/* Opcode 0xf3 0x0f 0xde - invalid */
8046/* Opcode 0xf2 0x0f 0xde - invalid */
8047
8048/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8049FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8050/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
8051FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
8052/* Opcode 0xf3 0x0f 0xdf - invalid */
8053/* Opcode 0xf2 0x0f 0xdf - invalid */
8054
8055/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8056FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8057/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
8058FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
8059/* Opcode 0xf3 0x0f 0xe0 - invalid */
8060/* Opcode 0xf2 0x0f 0xe0 - invalid */
8061
8062/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8063FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8064/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
8065FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
8066/* Opcode 0xf3 0x0f 0xe1 - invalid */
8067/* Opcode 0xf2 0x0f 0xe1 - invalid */
8068
8069/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8070FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8071/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
8072FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
8073/* Opcode 0xf3 0x0f 0xe2 - invalid */
8074/* Opcode 0xf2 0x0f 0xe2 - invalid */
8075
8076/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8077FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8078/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
8079FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
8080/* Opcode 0xf3 0x0f 0xe3 - invalid */
8081/* Opcode 0xf2 0x0f 0xe3 - invalid */
8082
8083/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8084FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8085/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
8086FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
8087/* Opcode 0xf3 0x0f 0xe4 - invalid */
8088/* Opcode 0xf2 0x0f 0xe4 - invalid */
8089
8090/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8091FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8092/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
8093FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
8094/* Opcode 0xf3 0x0f 0xe5 - invalid */
8095/* Opcode 0xf2 0x0f 0xe5 - invalid */
8096
8097/* Opcode 0x0f 0xe6 - invalid */
8098/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
8099FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
8100/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
8101FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
8102/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
8103FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
8104
8105
8106/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8107FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8108{
8109 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8111 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8112 {
8113 /* Register, memory. */
8114 IEM_MC_BEGIN(0, 2);
8115 IEM_MC_LOCAL(uint64_t, uSrc);
8116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8117
8118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8121 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8122
8123 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8124 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8125
8126 IEM_MC_ADVANCE_RIP();
8127 IEM_MC_END();
8128 return VINF_SUCCESS;
8129 }
8130 /* The register, register encoding is invalid. */
8131 return IEMOP_RAISE_INVALID_OPCODE();
8132}
8133
8134/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
8135FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
8136{
8137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8138 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8139 {
8140 /* Register, memory. */
8141 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
8142 IEM_MC_BEGIN(0, 2);
8143 IEM_MC_LOCAL(RTUINT128U, uSrc);
8144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8145
8146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8148 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8149 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8150
8151 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8152 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8153
8154 IEM_MC_ADVANCE_RIP();
8155 IEM_MC_END();
8156 return VINF_SUCCESS;
8157 }
8158
8159 /* The register, register encoding is invalid. */
8160 return IEMOP_RAISE_INVALID_OPCODE();
8161}
8162
8163/* Opcode 0xf3 0x0f 0xe7 - invalid */
8164/* Opcode 0xf2 0x0f 0xe7 - invalid */
8165
8166
8167/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8168FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8169/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
8170FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
8171/* Opcode 0xf3 0x0f 0xe8 - invalid */
8172/* Opcode 0xf2 0x0f 0xe8 - invalid */
8173
8174/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8175FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8176/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
8177FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
8178/* Opcode 0xf3 0x0f 0xe9 - invalid */
8179/* Opcode 0xf2 0x0f 0xe9 - invalid */
8180
8181/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8182FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8183/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
8184FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
8185/* Opcode 0xf3 0x0f 0xea - invalid */
8186/* Opcode 0xf2 0x0f 0xea - invalid */
8187
8188/** Opcode 0x0f 0xeb - por Pq, Qq */
8189FNIEMOP_STUB(iemOp_por_Pq_Qq);
8190/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
8191FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
8192/* Opcode 0xf3 0x0f 0xeb - invalid */
8193/* Opcode 0xf2 0x0f 0xeb - invalid */
8194
8195/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8196FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8197/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
8198FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
8199/* Opcode 0xf3 0x0f 0xec - invalid */
8200/* Opcode 0xf2 0x0f 0xec - invalid */
8201
8202/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8203FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8204/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
8205FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
8206/* Opcode 0xf3 0x0f 0xed - invalid */
8207/* Opcode 0xf2 0x0f 0xed - invalid */
8208
8209/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8210FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8211/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
8212FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
8213/* Opcode 0xf3 0x0f 0xee - invalid */
8214/* Opcode 0xf2 0x0f 0xee - invalid */
8215
8216
8217/** Opcode 0x0f 0xef - pxor Pq, Qq */
8218FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8219{
8220 IEMOP_MNEMONIC(pxor, "pxor");
8221 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8222}
8223
8224/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
8225FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
8226{
8227 IEMOP_MNEMONIC(vpxor, "vpxor");
8228 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8229}
8230
8231/* Opcode 0xf3 0x0f 0xef - invalid */
8232/* Opcode 0xf2 0x0f 0xef - invalid */
8233
8234/* Opcode 0x0f 0xf0 - invalid */
8235/* Opcode 0x66 0x0f 0xf0 - invalid */
8236/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
8237FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
8238
8239/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8240FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8241/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
8242FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
8243/* Opcode 0xf2 0x0f 0xf1 - invalid */
8244
8245/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8246FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8247/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
8248FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
8249/* Opcode 0xf2 0x0f 0xf2 - invalid */
8250
8251/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8252FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8253/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
8254FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
8255/* Opcode 0xf2 0x0f 0xf3 - invalid */
8256
8257/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8258FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8259/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
8260FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
8261/* Opcode 0xf2 0x0f 0xf4 - invalid */
8262
8263/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8264FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8265/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
8266FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
8267/* Opcode 0xf2 0x0f 0xf5 - invalid */
8268
8269/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8270FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8271/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
8272FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
8273/* Opcode 0xf2 0x0f 0xf6 - invalid */
8274
8275/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8276FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8277/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
8278FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
8279/* Opcode 0xf2 0x0f 0xf7 - invalid */
8280
8281/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8282FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8283/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
8284FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
8285/* Opcode 0xf2 0x0f 0xf8 - invalid */
8286
8287/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8288FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8289/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
8290FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
8291/* Opcode 0xf2 0x0f 0xf9 - invalid */
8292
8293/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8294FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8295/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
8296FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
8297/* Opcode 0xf2 0x0f 0xfa - invalid */
8298
8299/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8300FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8301/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
8302FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
8303/* Opcode 0xf2 0x0f 0xfb - invalid */
8304
8305/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8306FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8307/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
8308FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
8309/* Opcode 0xf2 0x0f 0xfc - invalid */
8310
8311/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8312FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8313/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
8314FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
8315/* Opcode 0xf2 0x0f 0xfd - invalid */
8316
8317/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8318FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8319/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
8320FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
8321/* Opcode 0xf2 0x0f 0xfe - invalid */
8322
8323
8324/** Opcode **** 0x0f 0xff - UD0 */
8325FNIEMOP_DEF(iemOp_ud0)
8326{
8327 IEMOP_MNEMONIC(ud0, "ud0");
8328 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8329 {
8330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8331#ifndef TST_IEM_CHECK_MC
8332 RTGCPTR GCPtrEff;
8333 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8334 if (rcStrict != VINF_SUCCESS)
8335 return rcStrict;
8336#endif
8337 IEMOP_HLP_DONE_DECODING();
8338 }
8339 return IEMOP_RAISE_INVALID_OPCODE();
8340}
8341
8342
8343
8344/**
8345 * Two byte opcode map, first byte 0x0f.
8346 *
8347 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8348 * check if it needs updating as well when making changes.
8349 */
8350IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8351{
8352 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8353 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8354 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8355 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8356 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8357 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8358 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8359 /* 0x06 */ IEMOP_X4(iemOp_clts),
8360 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8361 /* 0x08 */ IEMOP_X4(iemOp_invd),
8362 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8363 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8364 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8365 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8366 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8367 /* 0x0e */ IEMOP_X4(iemOp_femms),
8368 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8369
8370 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8371 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8372 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8373 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8374 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8375 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8376 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8377 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8378 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8379 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8380 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8381 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8382 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8383 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8384 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8385 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8386
8387 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8388 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8389 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8390 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8391 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8392 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8393 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8394 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8395 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8396 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8397 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8398 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8399 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8400 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8401 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8402 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8403
8404 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8405 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8406 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8407 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8408 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8409 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8410 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8411 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8412 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8413 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8414 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8415 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8416 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8417 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8418 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8419 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8420
8421 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8422 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8423 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8424 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8425 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8426 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8427 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8428 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8429 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8430 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8431 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8432 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8433 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8434 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8435 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8436 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8437
8438 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8439 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8440 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8441 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8442 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8443 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8444 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8445 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8446 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8447 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8448 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8449 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8450 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8451 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8452 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8453 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8454
8455 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8456 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8457 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8458 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8459 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8460 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8461 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8462 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8463 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8464 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8465 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8466 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8467 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8468 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8469 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8470 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8471
8472 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8473 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8474 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8475 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8476 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8477 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8478 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8479 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8480
8481 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8482 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8483 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8484 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8485 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8486 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8487 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8488 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8489
8490 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8491 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8492 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8493 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8494 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8495 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8496 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8497 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8498 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8499 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8500 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8501 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8502 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8503 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8504 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8505 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8506
8507 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8508 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8509 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8510 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8511 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8512 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8513 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8514 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8515 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8516 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8517 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8518 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8519 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8520 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8521 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8522 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8523
8524 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8525 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8526 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8527 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8528 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8529 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8530 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8531 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8532 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8533 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8534 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8535 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8536 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8537 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8538 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8539 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8540
8541 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8542 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8543 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8544 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8545 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8546 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8547 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8548 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8549 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8550 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8551 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8552 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8553 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8554 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8555 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8556 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8557
8558 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8559 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8560 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8561 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8562 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8563 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8564 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8565 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8566 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8567 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8568 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8569 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8570 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8571 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8572 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8573 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8574
8575 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8576 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8577 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8578 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8579 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8580 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8581 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8582 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8583 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8584 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8585 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8586 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8587 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8588 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8589 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8590 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8591
8592 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8593 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8594 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8595 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8596 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8597 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8598 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8599 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8600 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8601 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8602 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8603 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8604 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8605 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8606 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8607 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8608
8609 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8610 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8611 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8612 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8613 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8614 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8615 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8616 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8617 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8618 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8619 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8620 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8621 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8622 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8623 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8624 /* 0xff */ IEMOP_X4(iemOp_ud0),
8625};
8626AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8627
8628
8629/**
8630 * VEX opcode map \#1.
8631 *
8632 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if
8633 * it it needs updating too when making changes.
8634 */
8635IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
8636{
8637 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8638 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
8639 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
8640 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
8641 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
8642 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
8643 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
8644 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
8645 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
8646 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
8647 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
8648 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
8649 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),
8650 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
8651 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
8652 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
8653 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
8654
8655 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8656 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8657 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8658 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8659 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8660 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8661 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8662 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8663 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
8664 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
8665 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
8666 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
8667 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
8668 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
8669 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
8670 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
8671
8672 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
8673 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
8674 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
8675 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
8676 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
8677 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
8678 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
8679 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
8680 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8681 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8682 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8683 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8684 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8685 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8686 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8687 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8688
8689 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
8690 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
8691 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
8692 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
8693 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
8694 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
8695 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
8696 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
8697 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8698 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8699 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8700 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8701 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8702 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8703 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8704 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
8705
8706 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
8707 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
8708 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
8709 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
8710 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
8711 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
8712 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
8713 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
8714 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
8715 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
8716 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
8717 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
8718 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
8719 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
8720 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
8721 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
8722
8723 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8724 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8725 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8726 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8727 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8728 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8729 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8730 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8731 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8732 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8733 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8734 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8735 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8736 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8737 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8738 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8739
8740 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8741 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8742 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8743 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8744 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8745 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8746 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8747 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8748 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8749 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8750 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8751 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8752 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8753 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8754 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8755 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8756
8757 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8758 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8759 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8760 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8761 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8762 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8763 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8764 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8765 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
8766 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
8767 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
8768 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
8769 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8770 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8771 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8772 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8773
8774 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
8775 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
8776 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
8777 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
8778 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
8779 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
8780 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
8781 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
8782 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
8783 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
8784 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
8785 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
8786 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
8787 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
8788 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
8789 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
8790
8791 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
8792 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
8793 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
8794 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
8795 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
8796 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
8797 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
8798 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
8799 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
8800 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
8801 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
8802 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
8803 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
8804 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
8805 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
8806 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
8807
8808 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8809 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8810 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8811 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8812 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8813 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8814 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8815 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8816 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8817 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8818 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
8819 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
8820 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
8821 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
8822 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
8823 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
8824
8825 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8826 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8827 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
8828 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8829 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
8830 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
8831 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8832 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8833 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8834 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8835 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
8836 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
8837 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
8838 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
8839 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
8840 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
8841
8842 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
8843 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
8844 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8845 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
8846 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8847 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8848 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8849 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8850 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
8851 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
8852 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
8853 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
8854 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
8855 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
8856 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
8857 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
8858
8859 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8860 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8861 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8862 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8863 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8864 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8865 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8866 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8867 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8868 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8869 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8870 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8871 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8872 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8873 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8874 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8875
8876 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8877 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8878 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8879 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8880 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8881 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8882 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8883 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8884 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8885 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8886 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8887 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8888 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8889 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8890 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8891 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8892
8893 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8894 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8895 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8896 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8897 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8898 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8899 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8900 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8901 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8902 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8903 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8904 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8905 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8906 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8907 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8908 /* 0xff */ IEMOP_X4(iemOp_ud0),
8909};
8910AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
8911/** @} */
8912
8913
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette