VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66787

最後變更 在這個檔案從66787是 66787,由 vboxsync 提交於 8 年 前

IEM: Implemented movhpd Vdq,Mq (66 0f 16).

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 315.4 KB
 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66787 2017-05-04 11:55:53Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.alldomusa.eu.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZxReg, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse_simdfp_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1925FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1926
1927/**
1928 * @opdone
1929 * @opmnemonic udf30f16
1930 * @opcode 0x16
1931 * @oppfx 0xf2
1932 * @opunused intel-modrm
1933 * @opcpuid sse
1934 * @optest ->
1935 * @opdone
1936 */
1937
1938/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1939FNIEMOP_STUB(iemOp_movhps_Mq_Vq); //NEXT
1940/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1941FNIEMOP_STUB(iemOp_movhpd_Mq_Vq); //NEXT
1942
1943/**
1944 * @opdone
1945 * @opmnemonic udf30f17
1946 * @opcode 0x17
1947 * @oppfx 0xf3
1948 * @opunused intel-modrm
1949 * @opcpuid sse
1950 * @optest ->
1951 * @opdone
1952 */
1953
1954/**
1955 * @opmnemonic udf20f17
1956 * @opcode 0x17
1957 * @oppfx 0xf2
1958 * @opunused intel-modrm
1959 * @opcpuid sse
1960 * @optest ->
1961 * @opdone
1962 */
1963
1964
1965/** Opcode 0x0f 0x18. */
1966FNIEMOP_DEF(iemOp_prefetch_Grp16)
1967{
1968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1969 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1970 {
1971 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1972 {
1973 case 4: /* Aliased to /0 for the time being according to AMD. */
1974 case 5: /* Aliased to /0 for the time being according to AMD. */
1975 case 6: /* Aliased to /0 for the time being according to AMD. */
1976 case 7: /* Aliased to /0 for the time being according to AMD. */
1977 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1978 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1979 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1980 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1982 }
1983
1984 IEM_MC_BEGIN(0, 1);
1985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1988 /* Currently a NOP. */
1989 NOREF(GCPtrEffSrc);
1990 IEM_MC_ADVANCE_RIP();
1991 IEM_MC_END();
1992 return VINF_SUCCESS;
1993 }
1994
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x19..0x1f. */
2000FNIEMOP_DEF(iemOp_nop_Ev)
2001{
2002 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2007 IEM_MC_BEGIN(0, 0);
2008 IEM_MC_ADVANCE_RIP();
2009 IEM_MC_END();
2010 }
2011 else
2012 {
2013 IEM_MC_BEGIN(0, 1);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2017 /* Currently a NOP. */
2018 NOREF(GCPtrEffSrc);
2019 IEM_MC_ADVANCE_RIP();
2020 IEM_MC_END();
2021 }
2022 return VINF_SUCCESS;
2023}
2024
2025
2026/** Opcode 0x0f 0x20. */
2027FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2028{
2029 /* mod is ignored, as is operand size overrides. */
2030 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2031 IEMOP_HLP_MIN_386();
2032 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2033 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2034 else
2035 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2036
2037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2038 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2039 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2040 {
2041 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2042 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2043 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2044 iCrReg |= 8;
2045 }
2046 switch (iCrReg)
2047 {
2048 case 0: case 2: case 3: case 4: case 8:
2049 break;
2050 default:
2051 return IEMOP_RAISE_INVALID_OPCODE();
2052 }
2053 IEMOP_HLP_DONE_DECODING();
2054
2055 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2056}
2057
2058
2059/** Opcode 0x0f 0x21. */
2060FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2061{
2062 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2063 IEMOP_HLP_MIN_386();
2064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2067 return IEMOP_RAISE_INVALID_OPCODE();
2068 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2069 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2070 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2071}
2072
2073
2074/** Opcode 0x0f 0x22. */
2075FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2076{
2077 /* mod is ignored, as is operand size overrides. */
2078 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2079 IEMOP_HLP_MIN_386();
2080 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2081 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2082 else
2083 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2084
2085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2086 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2087 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2088 {
2089 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2090 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2091 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2092 iCrReg |= 8;
2093 }
2094 switch (iCrReg)
2095 {
2096 case 0: case 2: case 3: case 4: case 8:
2097 break;
2098 default:
2099 return IEMOP_RAISE_INVALID_OPCODE();
2100 }
2101 IEMOP_HLP_DONE_DECODING();
2102
2103 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2104}
2105
2106
2107/** Opcode 0x0f 0x23. */
2108FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2109{
2110 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2111 IEMOP_HLP_MIN_386();
2112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2114 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2115 return IEMOP_RAISE_INVALID_OPCODE();
2116 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2117 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2118 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2119}
2120
2121
2122/** Opcode 0x0f 0x24. */
2123FNIEMOP_DEF(iemOp_mov_Rd_Td)
2124{
2125 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2126 /** @todo works on 386 and 486. */
2127 /* The RM byte is not considered, see testcase. */
2128 return IEMOP_RAISE_INVALID_OPCODE();
2129}
2130
2131
2132/** Opcode 0x0f 0x26. */
2133FNIEMOP_DEF(iemOp_mov_Td_Rd)
2134{
2135 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2136 /** @todo works on 386 and 486. */
2137 /* The RM byte is not considered, see testcase. */
2138 return IEMOP_RAISE_INVALID_OPCODE();
2139}
2140
2141
2142/** Opcode 0x0f 0x28 - movaps Vps, Wps */
2143FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2144{
2145 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2148 {
2149 /*
2150 * Register, register.
2151 */
2152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2153 IEM_MC_BEGIN(0, 0);
2154 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2156 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2157 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2158 IEM_MC_ADVANCE_RIP();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 /*
2164 * Register, memory.
2165 */
2166 IEM_MC_BEGIN(0, 2);
2167 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2169
2170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2172 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2173 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2174
2175 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2176 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2177
2178 IEM_MC_ADVANCE_RIP();
2179 IEM_MC_END();
2180 }
2181 return VINF_SUCCESS;
2182}
2183
2184/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
2185FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2186{
2187 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2189 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2190 {
2191 /*
2192 * Register, register.
2193 */
2194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2195 IEM_MC_BEGIN(0, 0);
2196 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2197 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2198 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2199 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2200 IEM_MC_ADVANCE_RIP();
2201 IEM_MC_END();
2202 }
2203 else
2204 {
2205 /*
2206 * Register, memory.
2207 */
2208 IEM_MC_BEGIN(0, 2);
2209 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2211
2212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2215 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2216
2217 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2218 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2219
2220 IEM_MC_ADVANCE_RIP();
2221 IEM_MC_END();
2222 }
2223 return VINF_SUCCESS;
2224}
2225
2226/* Opcode 0xf3 0x0f 0x28 - invalid */
2227/* Opcode 0xf2 0x0f 0x28 - invalid */
2228
2229/** Opcode 0x0f 0x29 - movaps Wps, Vps */
2230FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2231{
2232 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2235 {
2236 /*
2237 * Register, register.
2238 */
2239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2240 IEM_MC_BEGIN(0, 0);
2241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2244 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2245 IEM_MC_ADVANCE_RIP();
2246 IEM_MC_END();
2247 }
2248 else
2249 {
2250 /*
2251 * Memory, register.
2252 */
2253 IEM_MC_BEGIN(0, 2);
2254 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2256
2257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2259 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2260 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2261
2262 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2263 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2264
2265 IEM_MC_ADVANCE_RIP();
2266 IEM_MC_END();
2267 }
2268 return VINF_SUCCESS;
2269}
2270
2271/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
2272FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2273{
2274 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2277 {
2278 /*
2279 * Register, register.
2280 */
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_BEGIN(0, 0);
2283 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2284 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2285 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2286 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2287 IEM_MC_ADVANCE_RIP();
2288 IEM_MC_END();
2289 }
2290 else
2291 {
2292 /*
2293 * Memory, register.
2294 */
2295 IEM_MC_BEGIN(0, 2);
2296 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2298
2299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2301 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2302 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2303
2304 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2305 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2306
2307 IEM_MC_ADVANCE_RIP();
2308 IEM_MC_END();
2309 }
2310 return VINF_SUCCESS;
2311}
2312
2313/* Opcode 0xf3 0x0f 0x29 - invalid */
2314/* Opcode 0xf2 0x0f 0x29 - invalid */
2315
2316
2317/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2318FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2319/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2320FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2321/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2322FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2323/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2324FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2325
2326
2327/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2328FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2329{
2330 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2332 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2333 {
2334 /*
2335 * memory, register.
2336 */
2337 IEM_MC_BEGIN(0, 2);
2338 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2340
2341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2343 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2344 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2345
2346 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2347 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2348
2349 IEM_MC_ADVANCE_RIP();
2350 IEM_MC_END();
2351 }
2352 /* The register, register encoding is invalid. */
2353 else
2354 return IEMOP_RAISE_INVALID_OPCODE();
2355 return VINF_SUCCESS;
2356}
2357
2358/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2359FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2360{
2361 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2363 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2364 {
2365 /*
2366 * memory, register.
2367 */
2368 IEM_MC_BEGIN(0, 2);
2369 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2371
2372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2375 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2376
2377 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2378 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2379
2380 IEM_MC_ADVANCE_RIP();
2381 IEM_MC_END();
2382 }
2383 /* The register, register encoding is invalid. */
2384 else
2385 return IEMOP_RAISE_INVALID_OPCODE();
2386 return VINF_SUCCESS;
2387}
2388/* Opcode 0xf3 0x0f 0x2b - invalid */
2389/* Opcode 0xf2 0x0f 0x2b - invalid */
2390
2391
2392/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2393FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2394/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2395FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2396/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2397FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2398/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2399FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2400
2401/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2402FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2403/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2404FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2405/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2406FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2407/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2408FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2409
2410/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2411FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2412/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2413FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2414/* Opcode 0xf3 0x0f 0x2e - invalid */
2415/* Opcode 0xf2 0x0f 0x2e - invalid */
2416
2417/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2418FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2419/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2420FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2421/* Opcode 0xf3 0x0f 0x2f - invalid */
2422/* Opcode 0xf2 0x0f 0x2f - invalid */
2423
2424/** Opcode 0x0f 0x30. */
2425FNIEMOP_DEF(iemOp_wrmsr)
2426{
2427 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2429 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2430}
2431
2432
2433/** Opcode 0x0f 0x31. */
2434FNIEMOP_DEF(iemOp_rdtsc)
2435{
2436 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2438 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2439}
2440
2441
2442/** Opcode 0x0f 0x33. */
2443FNIEMOP_DEF(iemOp_rdmsr)
2444{
2445 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2447 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2448}
2449
2450
2451/** Opcode 0x0f 0x34. */
2452FNIEMOP_DEF(iemOp_rdpmc)
2453{
2454 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2456 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2457}
2458
2459
2460/** Opcode 0x0f 0x34. */
2461FNIEMOP_STUB(iemOp_sysenter);
2462/** Opcode 0x0f 0x35. */
2463FNIEMOP_STUB(iemOp_sysexit);
2464/** Opcode 0x0f 0x37. */
2465FNIEMOP_STUB(iemOp_getsec);
2466
2467
2468/** Opcode 0x0f 0x38. */
2469FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2470{
2471#ifdef IEM_WITH_THREE_0F_38
2472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2473 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2474#else
2475 IEMOP_BITCH_ABOUT_STUB();
2476 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2477#endif
2478}
2479
2480
2481/** Opcode 0x0f 0x3a. */
2482FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2483{
2484#ifdef IEM_WITH_THREE_0F_3A
2485 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2486 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2487#else
2488 IEMOP_BITCH_ABOUT_STUB();
2489 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2490#endif
2491}
2492
2493
2494/**
2495 * Implements a conditional move.
2496 *
2497 * Wish there was an obvious way to do this where we could share and reduce
2498 * code bloat.
2499 *
2500 * @param a_Cnd The conditional "microcode" operation.
2501 */
2502#define CMOV_X(a_Cnd) \
2503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2505 { \
2506 switch (pVCpu->iem.s.enmEffOpSize) \
2507 { \
2508 case IEMMODE_16BIT: \
2509 IEM_MC_BEGIN(0, 1); \
2510 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2511 a_Cnd { \
2512 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2513 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2514 } IEM_MC_ENDIF(); \
2515 IEM_MC_ADVANCE_RIP(); \
2516 IEM_MC_END(); \
2517 return VINF_SUCCESS; \
2518 \
2519 case IEMMODE_32BIT: \
2520 IEM_MC_BEGIN(0, 1); \
2521 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2522 a_Cnd { \
2523 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2524 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2525 } IEM_MC_ELSE() { \
2526 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2527 } IEM_MC_ENDIF(); \
2528 IEM_MC_ADVANCE_RIP(); \
2529 IEM_MC_END(); \
2530 return VINF_SUCCESS; \
2531 \
2532 case IEMMODE_64BIT: \
2533 IEM_MC_BEGIN(0, 1); \
2534 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2535 a_Cnd { \
2536 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2537 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2538 } IEM_MC_ENDIF(); \
2539 IEM_MC_ADVANCE_RIP(); \
2540 IEM_MC_END(); \
2541 return VINF_SUCCESS; \
2542 \
2543 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2544 } \
2545 } \
2546 else \
2547 { \
2548 switch (pVCpu->iem.s.enmEffOpSize) \
2549 { \
2550 case IEMMODE_16BIT: \
2551 IEM_MC_BEGIN(0, 2); \
2552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2553 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2555 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2556 a_Cnd { \
2557 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2558 } IEM_MC_ENDIF(); \
2559 IEM_MC_ADVANCE_RIP(); \
2560 IEM_MC_END(); \
2561 return VINF_SUCCESS; \
2562 \
2563 case IEMMODE_32BIT: \
2564 IEM_MC_BEGIN(0, 2); \
2565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2566 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2568 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2569 a_Cnd { \
2570 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2571 } IEM_MC_ELSE() { \
2572 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2573 } IEM_MC_ENDIF(); \
2574 IEM_MC_ADVANCE_RIP(); \
2575 IEM_MC_END(); \
2576 return VINF_SUCCESS; \
2577 \
2578 case IEMMODE_64BIT: \
2579 IEM_MC_BEGIN(0, 2); \
2580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2581 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2583 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2584 a_Cnd { \
2585 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2586 } IEM_MC_ENDIF(); \
2587 IEM_MC_ADVANCE_RIP(); \
2588 IEM_MC_END(); \
2589 return VINF_SUCCESS; \
2590 \
2591 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2592 } \
2593 } do {} while (0)
2594
2595
2596
2597/** Opcode 0x0f 0x40. */
2598FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2599{
2600 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2601 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2602}
2603
2604
2605/** Opcode 0x0f 0x41. */
2606FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2607{
2608 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2609 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2610}
2611
2612
2613/** Opcode 0x0f 0x42. */
2614FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2615{
2616 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2617 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2618}
2619
2620
2621/** Opcode 0x0f 0x43. */
2622FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2623{
2624 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2625 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2626}
2627
2628
2629/** Opcode 0x0f 0x44. */
2630FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2631{
2632 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2633 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2634}
2635
2636
2637/** Opcode 0x0f 0x45. */
2638FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2639{
2640 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2641 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2642}
2643
2644
2645/** Opcode 0x0f 0x46. */
2646FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2647{
2648 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2649 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2650}
2651
2652
2653/** Opcode 0x0f 0x47. */
2654FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2655{
2656 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2657 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2658}
2659
2660
2661/** Opcode 0x0f 0x48. */
2662FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2663{
2664 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2665 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2666}
2667
2668
2669/** Opcode 0x0f 0x49. */
2670FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2671{
2672 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2673 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2674}
2675
2676
2677/** Opcode 0x0f 0x4a. */
2678FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2679{
2680 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2681 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2682}
2683
2684
2685/** Opcode 0x0f 0x4b. */
2686FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2687{
2688 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2689 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2690}
2691
2692
2693/** Opcode 0x0f 0x4c. */
2694FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2695{
2696 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2697 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2698}
2699
2700
2701/** Opcode 0x0f 0x4d. */
2702FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2703{
2704 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2705 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2706}
2707
2708
2709/** Opcode 0x0f 0x4e. */
2710FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2711{
2712 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2713 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2714}
2715
2716
2717/** Opcode 0x0f 0x4f. */
2718FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2719{
2720 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2721 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2722}
2723
2724#undef CMOV_X
2725
2726/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2727FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2728/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2729FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2730/* Opcode 0xf3 0x0f 0x50 - invalid */
2731/* Opcode 0xf2 0x0f 0x50 - invalid */
2732
2733/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2734FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2735/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2736FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2737/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2738FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2739/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2740FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2741
2742/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2743FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2744/* Opcode 0x66 0x0f 0x52 - invalid */
2745/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2746FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2747/* Opcode 0xf2 0x0f 0x52 - invalid */
2748
2749/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2750FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2751/* Opcode 0x66 0x0f 0x53 - invalid */
2752/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2753FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2754/* Opcode 0xf2 0x0f 0x53 - invalid */
2755
2756/** Opcode 0x0f 0x54 - andps Vps, Wps */
2757FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2758/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2759FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2760/* Opcode 0xf3 0x0f 0x54 - invalid */
2761/* Opcode 0xf2 0x0f 0x54 - invalid */
2762
2763/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2764FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2765/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2766FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2767/* Opcode 0xf3 0x0f 0x55 - invalid */
2768/* Opcode 0xf2 0x0f 0x55 - invalid */
2769
2770/** Opcode 0x0f 0x56 - orps Vps, Wps */
2771FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2772/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2773FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2774/* Opcode 0xf3 0x0f 0x56 - invalid */
2775/* Opcode 0xf2 0x0f 0x56 - invalid */
2776
2777/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2778FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2779/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2780FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2781/* Opcode 0xf3 0x0f 0x57 - invalid */
2782/* Opcode 0xf2 0x0f 0x57 - invalid */
2783
2784/** Opcode 0x0f 0x58 - addps Vps, Wps */
2785FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2786/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2787FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2788/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2789FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2790/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2791FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2792
2793/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2794FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2795/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2796FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2797/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2798FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2799/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2800FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2801
2802/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2803FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2804/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2805FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2806/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2807FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2808/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2809FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2810
2811/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2812FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2813/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2814FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2815/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2816FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2817/* Opcode 0xf2 0x0f 0x5b - invalid */
2818
2819/** Opcode 0x0f 0x5c - subps Vps, Wps */
2820FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2821/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2822FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2823/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2824FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2825/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2826FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2827
2828/** Opcode 0x0f 0x5d - minps Vps, Wps */
2829FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2830/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2831FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2832/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2833FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2834/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2835FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2836
2837/** Opcode 0x0f 0x5e - divps Vps, Wps */
2838FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2839/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2840FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2841/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2842FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2843/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2844FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2845
2846/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2847FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2848/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2849FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2850/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2851FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2852/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2853FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2854
2855/**
2856 * Common worker for MMX instructions on the forms:
2857 * pxxxx mm1, mm2/mem32
2858 *
2859 * The 2nd operand is the first half of a register, which in the memory case
2860 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2861 * memory accessed for MMX.
2862 *
2863 * Exceptions type 4.
2864 */
2865FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2866{
2867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2869 {
2870 /*
2871 * Register, register.
2872 */
2873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2874 IEM_MC_BEGIN(2, 0);
2875 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2876 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2877 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2878 IEM_MC_PREPARE_SSE_USAGE();
2879 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2880 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2881 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2882 IEM_MC_ADVANCE_RIP();
2883 IEM_MC_END();
2884 }
2885 else
2886 {
2887 /*
2888 * Register, memory.
2889 */
2890 IEM_MC_BEGIN(2, 2);
2891 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2892 IEM_MC_LOCAL(uint64_t, uSrc);
2893 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2895
2896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2898 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2899 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2900
2901 IEM_MC_PREPARE_SSE_USAGE();
2902 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2903 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2904
2905 IEM_MC_ADVANCE_RIP();
2906 IEM_MC_END();
2907 }
2908 return VINF_SUCCESS;
2909}
2910
2911
2912/**
2913 * Common worker for SSE2 instructions on the forms:
2914 * pxxxx xmm1, xmm2/mem128
2915 *
2916 * The 2nd operand is the first half of a register, which in the memory case
2917 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2918 * memory accessed for MMX.
2919 *
2920 * Exceptions type 4.
2921 */
2922FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2923{
2924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2925 if (!pImpl->pfnU64)
2926 return IEMOP_RAISE_INVALID_OPCODE();
2927 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2928 {
2929 /*
2930 * Register, register.
2931 */
2932 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2933 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935 IEM_MC_BEGIN(2, 0);
2936 IEM_MC_ARG(uint64_t *, pDst, 0);
2937 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2938 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2939 IEM_MC_PREPARE_FPU_USAGE();
2940 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2941 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2942 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2943 IEM_MC_ADVANCE_RIP();
2944 IEM_MC_END();
2945 }
2946 else
2947 {
2948 /*
2949 * Register, memory.
2950 */
2951 IEM_MC_BEGIN(2, 2);
2952 IEM_MC_ARG(uint64_t *, pDst, 0);
2953 IEM_MC_LOCAL(uint32_t, uSrc);
2954 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2959 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2960 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2961
2962 IEM_MC_PREPARE_FPU_USAGE();
2963 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2964 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2965
2966 IEM_MC_ADVANCE_RIP();
2967 IEM_MC_END();
2968 }
2969 return VINF_SUCCESS;
2970}
2971
2972
2973/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2974FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2975{
2976 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2977 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2978}
2979
2980/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2981FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2982{
2983 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2984 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2985}
2986
2987/* Opcode 0xf3 0x0f 0x60 - invalid */
2988
2989
2990/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2991FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2992{
2993 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2994 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2995}
2996
2997/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2998FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2999{
3000 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3001 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3002}
3003
3004/* Opcode 0xf3 0x0f 0x61 - invalid */
3005
3006
3007/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3008FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3009{
3010 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3011 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3012}
3013
3014/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3015FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3016{
3017 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3018 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3019}
3020
3021/* Opcode 0xf3 0x0f 0x62 - invalid */
3022
3023
3024
3025/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3026FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3027/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3028FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3029/* Opcode 0xf3 0x0f 0x63 - invalid */
3030
3031/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3032FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3033/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3034FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3035/* Opcode 0xf3 0x0f 0x64 - invalid */
3036
3037/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3038FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3039/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3040FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3041/* Opcode 0xf3 0x0f 0x65 - invalid */
3042
3043/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3044FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3045/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3046FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3047/* Opcode 0xf3 0x0f 0x66 - invalid */
3048
3049/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3050FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3051/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3052FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3053/* Opcode 0xf3 0x0f 0x67 - invalid */
3054
3055
3056/**
3057 * Common worker for MMX instructions on the form:
3058 * pxxxx mm1, mm2/mem64
3059 *
3060 * The 2nd operand is the second half of a register, which in the memory case
3061 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3062 * where it may read the full 128 bits or only the upper 64 bits.
3063 *
3064 * Exceptions type 4.
3065 */
3066FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3067{
3068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3069 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3071 {
3072 /*
3073 * Register, register.
3074 */
3075 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3076 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3078 IEM_MC_BEGIN(2, 0);
3079 IEM_MC_ARG(uint64_t *, pDst, 0);
3080 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3081 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3082 IEM_MC_PREPARE_FPU_USAGE();
3083 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3084 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3085 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3086 IEM_MC_ADVANCE_RIP();
3087 IEM_MC_END();
3088 }
3089 else
3090 {
3091 /*
3092 * Register, memory.
3093 */
3094 IEM_MC_BEGIN(2, 2);
3095 IEM_MC_ARG(uint64_t *, pDst, 0);
3096 IEM_MC_LOCAL(uint64_t, uSrc);
3097 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3099
3100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3102 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3103 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3104
3105 IEM_MC_PREPARE_FPU_USAGE();
3106 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3107 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3108
3109 IEM_MC_ADVANCE_RIP();
3110 IEM_MC_END();
3111 }
3112 return VINF_SUCCESS;
3113}
3114
3115
3116/**
3117 * Common worker for SSE2 instructions on the form:
3118 * pxxxx xmm1, xmm2/mem128
3119 *
3120 * The 2nd operand is the second half of a register, which in the memory case
3121 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3122 * where it may read the full 128 bits or only the upper 64 bits.
3123 *
3124 * Exceptions type 4.
3125 */
3126FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3127{
3128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3130 {
3131 /*
3132 * Register, register.
3133 */
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_BEGIN(2, 0);
3136 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3137 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3139 IEM_MC_PREPARE_SSE_USAGE();
3140 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3141 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3142 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3143 IEM_MC_ADVANCE_RIP();
3144 IEM_MC_END();
3145 }
3146 else
3147 {
3148 /*
3149 * Register, memory.
3150 */
3151 IEM_MC_BEGIN(2, 2);
3152 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3153 IEM_MC_LOCAL(RTUINT128U, uSrc);
3154 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3156
3157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3159 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3160 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3161
3162 IEM_MC_PREPARE_SSE_USAGE();
3163 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3164 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3165
3166 IEM_MC_ADVANCE_RIP();
3167 IEM_MC_END();
3168 }
3169 return VINF_SUCCESS;
3170}
3171
3172
3173/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3174FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3175{
3176 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3177 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3178}
3179
3180/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3181FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3182{
3183 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3184 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3185}
3186/* Opcode 0xf3 0x0f 0x68 - invalid */
3187
3188
3189/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3190FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3191{
3192 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3193 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3194}
3195
3196/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3197FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3198{
3199 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3200 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3201
3202}
3203/* Opcode 0xf3 0x0f 0x69 - invalid */
3204
3205
3206/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3207FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3208{
3209 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3210 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3211}
3212
3213/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3214FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3215{
3216 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3217 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3218}
3219/* Opcode 0xf3 0x0f 0x6a - invalid */
3220
3221
3222/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3223FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3224/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3225FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3226/* Opcode 0xf3 0x0f 0x6b - invalid */
3227
3228
3229/* Opcode 0x0f 0x6c - invalid */
3230
3231/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3232FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3233{
3234 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3235 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3236}
3237
3238/* Opcode 0xf3 0x0f 0x6c - invalid */
3239/* Opcode 0xf2 0x0f 0x6c - invalid */
3240
3241
3242/* Opcode 0x0f 0x6d - invalid */
3243
3244/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3245FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3246{
3247 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3248 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3249}
3250
3251/* Opcode 0xf3 0x0f 0x6d - invalid */
3252
3253
3254/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3255FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3256{
3257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3258 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3259 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3260 else
3261 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3263 {
3264 /* MMX, greg */
3265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3266 IEM_MC_BEGIN(0, 1);
3267 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3268 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3269 IEM_MC_LOCAL(uint64_t, u64Tmp);
3270 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3271 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3272 else
3273 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3274 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3275 IEM_MC_ADVANCE_RIP();
3276 IEM_MC_END();
3277 }
3278 else
3279 {
3280 /* MMX, [mem] */
3281 IEM_MC_BEGIN(0, 2);
3282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3286 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3287 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3288 {
3289 IEM_MC_LOCAL(uint64_t, u64Tmp);
3290 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3291 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3292 }
3293 else
3294 {
3295 IEM_MC_LOCAL(uint32_t, u32Tmp);
3296 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3297 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3298 }
3299 IEM_MC_ADVANCE_RIP();
3300 IEM_MC_END();
3301 }
3302 return VINF_SUCCESS;
3303}
3304
3305/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3306FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3307{
3308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3310 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3311 else
3312 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3314 {
3315 /* XMM, greg*/
3316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3317 IEM_MC_BEGIN(0, 1);
3318 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3319 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3320 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3321 {
3322 IEM_MC_LOCAL(uint64_t, u64Tmp);
3323 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3324 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3325 }
3326 else
3327 {
3328 IEM_MC_LOCAL(uint32_t, u32Tmp);
3329 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3330 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3331 }
3332 IEM_MC_ADVANCE_RIP();
3333 IEM_MC_END();
3334 }
3335 else
3336 {
3337 /* XMM, [mem] */
3338 IEM_MC_BEGIN(0, 2);
3339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3340 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3343 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3344 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3345 {
3346 IEM_MC_LOCAL(uint64_t, u64Tmp);
3347 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3348 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3349 }
3350 else
3351 {
3352 IEM_MC_LOCAL(uint32_t, u32Tmp);
3353 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3354 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3355 }
3356 IEM_MC_ADVANCE_RIP();
3357 IEM_MC_END();
3358 }
3359 return VINF_SUCCESS;
3360}
3361
3362/* Opcode 0xf3 0x0f 0x6e - invalid */
3363
3364
3365/** Opcode 0x0f 0x6f - movq Pq, Qq */
3366FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3367{
3368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3369 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3370 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3371 {
3372 /*
3373 * Register, register.
3374 */
3375 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3376 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3378 IEM_MC_BEGIN(0, 1);
3379 IEM_MC_LOCAL(uint64_t, u64Tmp);
3380 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3381 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3382 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3383 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3384 IEM_MC_ADVANCE_RIP();
3385 IEM_MC_END();
3386 }
3387 else
3388 {
3389 /*
3390 * Register, memory.
3391 */
3392 IEM_MC_BEGIN(0, 2);
3393 IEM_MC_LOCAL(uint64_t, u64Tmp);
3394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3395
3396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3398 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3399 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3400 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3401 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3402
3403 IEM_MC_ADVANCE_RIP();
3404 IEM_MC_END();
3405 }
3406 return VINF_SUCCESS;
3407}
3408
3409/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3410FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3411{
3412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3413 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3415 {
3416 /*
3417 * Register, register.
3418 */
3419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3420 IEM_MC_BEGIN(0, 0);
3421 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3422 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3423 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3424 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3425 IEM_MC_ADVANCE_RIP();
3426 IEM_MC_END();
3427 }
3428 else
3429 {
3430 /*
3431 * Register, memory.
3432 */
3433 IEM_MC_BEGIN(0, 2);
3434 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3436
3437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3439 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3440 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3441 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3442 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3443
3444 IEM_MC_ADVANCE_RIP();
3445 IEM_MC_END();
3446 }
3447 return VINF_SUCCESS;
3448}
3449
3450/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3451FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3452{
3453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3454 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3456 {
3457 /*
3458 * Register, register.
3459 */
3460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3461 IEM_MC_BEGIN(0, 0);
3462 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3463 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3464 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3465 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3466 IEM_MC_ADVANCE_RIP();
3467 IEM_MC_END();
3468 }
3469 else
3470 {
3471 /*
3472 * Register, memory.
3473 */
3474 IEM_MC_BEGIN(0, 2);
3475 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3477
3478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3480 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3481 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3482 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3483 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3484
3485 IEM_MC_ADVANCE_RIP();
3486 IEM_MC_END();
3487 }
3488 return VINF_SUCCESS;
3489}
3490
3491
3492/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3493FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3494{
3495 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3498 {
3499 /*
3500 * Register, register.
3501 */
3502 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3504
3505 IEM_MC_BEGIN(3, 0);
3506 IEM_MC_ARG(uint64_t *, pDst, 0);
3507 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3508 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3509 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3510 IEM_MC_PREPARE_FPU_USAGE();
3511 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3512 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3513 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3514 IEM_MC_ADVANCE_RIP();
3515 IEM_MC_END();
3516 }
3517 else
3518 {
3519 /*
3520 * Register, memory.
3521 */
3522 IEM_MC_BEGIN(3, 2);
3523 IEM_MC_ARG(uint64_t *, pDst, 0);
3524 IEM_MC_LOCAL(uint64_t, uSrc);
3525 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3527
3528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3529 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3530 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3532 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3533
3534 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3535 IEM_MC_PREPARE_FPU_USAGE();
3536 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3537 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3538
3539 IEM_MC_ADVANCE_RIP();
3540 IEM_MC_END();
3541 }
3542 return VINF_SUCCESS;
3543}
3544
3545/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3546FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3547{
3548 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3551 {
3552 /*
3553 * Register, register.
3554 */
3555 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3557
3558 IEM_MC_BEGIN(3, 0);
3559 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3560 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3561 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3562 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3563 IEM_MC_PREPARE_SSE_USAGE();
3564 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3565 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3566 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3567 IEM_MC_ADVANCE_RIP();
3568 IEM_MC_END();
3569 }
3570 else
3571 {
3572 /*
3573 * Register, memory.
3574 */
3575 IEM_MC_BEGIN(3, 2);
3576 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3577 IEM_MC_LOCAL(RTUINT128U, uSrc);
3578 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3580
3581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3582 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3583 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3586
3587 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3588 IEM_MC_PREPARE_SSE_USAGE();
3589 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3590 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3591
3592 IEM_MC_ADVANCE_RIP();
3593 IEM_MC_END();
3594 }
3595 return VINF_SUCCESS;
3596}
3597
3598/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3599FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3600{
3601 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3603 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3604 {
3605 /*
3606 * Register, register.
3607 */
3608 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3610
3611 IEM_MC_BEGIN(3, 0);
3612 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3613 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3614 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3615 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3616 IEM_MC_PREPARE_SSE_USAGE();
3617 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3618 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3619 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3620 IEM_MC_ADVANCE_RIP();
3621 IEM_MC_END();
3622 }
3623 else
3624 {
3625 /*
3626 * Register, memory.
3627 */
3628 IEM_MC_BEGIN(3, 2);
3629 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3630 IEM_MC_LOCAL(RTUINT128U, uSrc);
3631 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3633
3634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3635 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3636 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3638 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3639
3640 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3641 IEM_MC_PREPARE_SSE_USAGE();
3642 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3643 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3644
3645 IEM_MC_ADVANCE_RIP();
3646 IEM_MC_END();
3647 }
3648 return VINF_SUCCESS;
3649}
3650
3651/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3652FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3653{
3654 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3657 {
3658 /*
3659 * Register, register.
3660 */
3661 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3663
3664 IEM_MC_BEGIN(3, 0);
3665 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3666 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3667 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3668 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3669 IEM_MC_PREPARE_SSE_USAGE();
3670 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3671 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3672 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3673 IEM_MC_ADVANCE_RIP();
3674 IEM_MC_END();
3675 }
3676 else
3677 {
3678 /*
3679 * Register, memory.
3680 */
3681 IEM_MC_BEGIN(3, 2);
3682 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3683 IEM_MC_LOCAL(RTUINT128U, uSrc);
3684 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3686
3687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3688 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3689 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3691 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3692
3693 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3694 IEM_MC_PREPARE_SSE_USAGE();
3695 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3696 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3697
3698 IEM_MC_ADVANCE_RIP();
3699 IEM_MC_END();
3700 }
3701 return VINF_SUCCESS;
3702}
3703
3704
3705/** Opcode 0x0f 0x71 11/2. */
3706FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3707
3708/** Opcode 0x66 0x0f 0x71 11/2. */
3709FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3710
3711/** Opcode 0x0f 0x71 11/4. */
3712FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3713
3714/** Opcode 0x66 0x0f 0x71 11/4. */
3715FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3716
3717/** Opcode 0x0f 0x71 11/6. */
3718FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3719
3720/** Opcode 0x66 0x0f 0x71 11/6. */
3721FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3722
3723
3724/**
3725 * Group 12 jump table for register variant.
3726 */
3727IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3728{
3729 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3730 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3731 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3732 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3733 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3734 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3735 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3736 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3737};
3738AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3739
3740
3741/** Opcode 0x0f 0x71. */
3742FNIEMOP_DEF(iemOp_Grp12)
3743{
3744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3746 /* register, register */
3747 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3748 + pVCpu->iem.s.idxPrefix], bRm);
3749 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3750}
3751
3752
3753/** Opcode 0x0f 0x72 11/2. */
3754FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3755
3756/** Opcode 0x66 0x0f 0x72 11/2. */
3757FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3758
3759/** Opcode 0x0f 0x72 11/4. */
3760FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3761
3762/** Opcode 0x66 0x0f 0x72 11/4. */
3763FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3764
3765/** Opcode 0x0f 0x72 11/6. */
3766FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3767
3768/** Opcode 0x66 0x0f 0x72 11/6. */
3769FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3770
3771
3772/**
3773 * Group 13 jump table for register variant.
3774 */
3775IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3776{
3777 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3778 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3779 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3780 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3781 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3782 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3783 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3784 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3785};
3786AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3787
3788/** Opcode 0x0f 0x72. */
3789FNIEMOP_DEF(iemOp_Grp13)
3790{
3791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3793 /* register, register */
3794 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3795 + pVCpu->iem.s.idxPrefix], bRm);
3796 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3797}
3798
3799
3800/** Opcode 0x0f 0x73 11/2. */
3801FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3802
3803/** Opcode 0x66 0x0f 0x73 11/2. */
3804FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3805
3806/** Opcode 0x66 0x0f 0x73 11/3. */
3807FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3808
3809/** Opcode 0x0f 0x73 11/6. */
3810FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3811
3812/** Opcode 0x66 0x0f 0x73 11/6. */
3813FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3814
3815/** Opcode 0x66 0x0f 0x73 11/7. */
3816FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3817
3818/**
3819 * Group 14 jump table for register variant.
3820 */
3821IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3822{
3823 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3824 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3825 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3826 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3827 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3828 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3829 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3830 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3831};
3832AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3833
3834
3835/** Opcode 0x0f 0x73. */
3836FNIEMOP_DEF(iemOp_Grp14)
3837{
3838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3840 /* register, register */
3841 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3842 + pVCpu->iem.s.idxPrefix], bRm);
3843 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3844}
3845
3846
3847/**
3848 * Common worker for MMX instructions on the form:
3849 * pxxx mm1, mm2/mem64
3850 */
3851FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3852{
3853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3854 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3855 {
3856 /*
3857 * Register, register.
3858 */
3859 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3860 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_BEGIN(2, 0);
3863 IEM_MC_ARG(uint64_t *, pDst, 0);
3864 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3865 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3866 IEM_MC_PREPARE_FPU_USAGE();
3867 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3868 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3869 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3870 IEM_MC_ADVANCE_RIP();
3871 IEM_MC_END();
3872 }
3873 else
3874 {
3875 /*
3876 * Register, memory.
3877 */
3878 IEM_MC_BEGIN(2, 2);
3879 IEM_MC_ARG(uint64_t *, pDst, 0);
3880 IEM_MC_LOCAL(uint64_t, uSrc);
3881 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3883
3884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3887 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3888
3889 IEM_MC_PREPARE_FPU_USAGE();
3890 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3891 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3892
3893 IEM_MC_ADVANCE_RIP();
3894 IEM_MC_END();
3895 }
3896 return VINF_SUCCESS;
3897}
3898
3899
3900/**
3901 * Common worker for SSE2 instructions on the forms:
3902 * pxxx xmm1, xmm2/mem128
3903 *
3904 * Proper alignment of the 128-bit operand is enforced.
3905 * Exceptions type 4. SSE2 cpuid checks.
3906 */
3907FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3908{
3909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3911 {
3912 /*
3913 * Register, register.
3914 */
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3916 IEM_MC_BEGIN(2, 0);
3917 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3918 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3919 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3920 IEM_MC_PREPARE_SSE_USAGE();
3921 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3922 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3923 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3924 IEM_MC_ADVANCE_RIP();
3925 IEM_MC_END();
3926 }
3927 else
3928 {
3929 /*
3930 * Register, memory.
3931 */
3932 IEM_MC_BEGIN(2, 2);
3933 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3934 IEM_MC_LOCAL(RTUINT128U, uSrc);
3935 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3937
3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3940 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3941 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3942
3943 IEM_MC_PREPARE_SSE_USAGE();
3944 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3945 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3946
3947 IEM_MC_ADVANCE_RIP();
3948 IEM_MC_END();
3949 }
3950 return VINF_SUCCESS;
3951}
3952
3953
3954/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3955FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3956{
3957 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3958 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3959}
3960
3961/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3962FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3963{
3964 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3965 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3966}
3967
3968/* Opcode 0xf3 0x0f 0x74 - invalid */
3969/* Opcode 0xf2 0x0f 0x74 - invalid */
3970
3971
3972/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3973FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3974{
3975 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3976 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3977}
3978
3979/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3980FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3981{
3982 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3983 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3984}
3985
3986/* Opcode 0xf3 0x0f 0x75 - invalid */
3987/* Opcode 0xf2 0x0f 0x75 - invalid */
3988
3989
3990/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3991FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3992{
3993 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3994 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3995}
3996
3997/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3998FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3999{
4000 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4001 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4002}
4003
4004/* Opcode 0xf3 0x0f 0x76 - invalid */
4005/* Opcode 0xf2 0x0f 0x76 - invalid */
4006
4007
4008/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4009FNIEMOP_STUB(iemOp_emms);
4010/* Opcode 0x66 0x0f 0x77 - invalid */
4011/* Opcode 0xf3 0x0f 0x77 - invalid */
4012/* Opcode 0xf2 0x0f 0x77 - invalid */
4013
4014/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4015FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4016/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4017FNIEMOP_STUB(iemOp_AmdGrp17);
4018/* Opcode 0xf3 0x0f 0x78 - invalid */
4019/* Opcode 0xf2 0x0f 0x78 - invalid */
4020
4021/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4022FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4023/* Opcode 0x66 0x0f 0x79 - invalid */
4024/* Opcode 0xf3 0x0f 0x79 - invalid */
4025/* Opcode 0xf2 0x0f 0x79 - invalid */
4026
4027/* Opcode 0x0f 0x7a - invalid */
4028/* Opcode 0x66 0x0f 0x7a - invalid */
4029/* Opcode 0xf3 0x0f 0x7a - invalid */
4030/* Opcode 0xf2 0x0f 0x7a - invalid */
4031
4032/* Opcode 0x0f 0x7b - invalid */
4033/* Opcode 0x66 0x0f 0x7b - invalid */
4034/* Opcode 0xf3 0x0f 0x7b - invalid */
4035/* Opcode 0xf2 0x0f 0x7b - invalid */
4036
4037/* Opcode 0x0f 0x7c - invalid */
4038/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4039FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4040/* Opcode 0xf3 0x0f 0x7c - invalid */
4041/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4042FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4043
4044/* Opcode 0x0f 0x7d - invalid */
4045/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4046FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4047/* Opcode 0xf3 0x0f 0x7d - invalid */
4048/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4049FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4050
4051
4052/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4053FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4054{
4055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4056 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4057 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4058 else
4059 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4060 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4061 {
4062 /* greg, MMX */
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4064 IEM_MC_BEGIN(0, 1);
4065 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4066 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4067 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4068 {
4069 IEM_MC_LOCAL(uint64_t, u64Tmp);
4070 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4071 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4072 }
4073 else
4074 {
4075 IEM_MC_LOCAL(uint32_t, u32Tmp);
4076 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4077 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4078 }
4079 IEM_MC_ADVANCE_RIP();
4080 IEM_MC_END();
4081 }
4082 else
4083 {
4084 /* [mem], MMX */
4085 IEM_MC_BEGIN(0, 2);
4086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4087 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4090 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4092 {
4093 IEM_MC_LOCAL(uint64_t, u64Tmp);
4094 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4095 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4096 }
4097 else
4098 {
4099 IEM_MC_LOCAL(uint32_t, u32Tmp);
4100 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4101 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4102 }
4103 IEM_MC_ADVANCE_RIP();
4104 IEM_MC_END();
4105 }
4106 return VINF_SUCCESS;
4107}
4108
4109/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4110FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4111{
4112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4113 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4114 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4115 else
4116 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4118 {
4119 /* greg, XMM */
4120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4121 IEM_MC_BEGIN(0, 1);
4122 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4123 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4124 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4125 {
4126 IEM_MC_LOCAL(uint64_t, u64Tmp);
4127 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4128 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4129 }
4130 else
4131 {
4132 IEM_MC_LOCAL(uint32_t, u32Tmp);
4133 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4134 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4135 }
4136 IEM_MC_ADVANCE_RIP();
4137 IEM_MC_END();
4138 }
4139 else
4140 {
4141 /* [mem], XMM */
4142 IEM_MC_BEGIN(0, 2);
4143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4144 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4147 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4148 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4149 {
4150 IEM_MC_LOCAL(uint64_t, u64Tmp);
4151 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4152 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4153 }
4154 else
4155 {
4156 IEM_MC_LOCAL(uint32_t, u32Tmp);
4157 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4158 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4159 }
4160 IEM_MC_ADVANCE_RIP();
4161 IEM_MC_END();
4162 }
4163 return VINF_SUCCESS;
4164}
4165
4166/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
4167FNIEMOP_STUB(iemOp_movq_Vq_Wq);
4168/* Opcode 0xf2 0x0f 0x7e - invalid */
4169
4170
4171/** Opcode 0x0f 0x7f - movq Qq, Pq */
4172FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4173{
4174 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4176 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4177 {
4178 /*
4179 * Register, register.
4180 */
4181 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4182 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4184 IEM_MC_BEGIN(0, 1);
4185 IEM_MC_LOCAL(uint64_t, u64Tmp);
4186 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4187 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4188 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4189 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4190 IEM_MC_ADVANCE_RIP();
4191 IEM_MC_END();
4192 }
4193 else
4194 {
4195 /*
4196 * Register, memory.
4197 */
4198 IEM_MC_BEGIN(0, 2);
4199 IEM_MC_LOCAL(uint64_t, u64Tmp);
4200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4201
4202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4204 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4205 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4206
4207 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4208 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4209
4210 IEM_MC_ADVANCE_RIP();
4211 IEM_MC_END();
4212 }
4213 return VINF_SUCCESS;
4214}
4215
4216/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4217FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4218{
4219 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4221 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4222 {
4223 /*
4224 * Register, register.
4225 */
4226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4227 IEM_MC_BEGIN(0, 0);
4228 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4229 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4230 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4231 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4232 IEM_MC_ADVANCE_RIP();
4233 IEM_MC_END();
4234 }
4235 else
4236 {
4237 /*
4238 * Register, memory.
4239 */
4240 IEM_MC_BEGIN(0, 2);
4241 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4243
4244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4246 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4248
4249 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4250 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4251
4252 IEM_MC_ADVANCE_RIP();
4253 IEM_MC_END();
4254 }
4255 return VINF_SUCCESS;
4256}
4257
4258/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4259FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4260{
4261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4262 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4263 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4264 {
4265 /*
4266 * Register, register.
4267 */
4268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4269 IEM_MC_BEGIN(0, 0);
4270 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4271 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4272 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4273 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4274 IEM_MC_ADVANCE_RIP();
4275 IEM_MC_END();
4276 }
4277 else
4278 {
4279 /*
4280 * Register, memory.
4281 */
4282 IEM_MC_BEGIN(0, 2);
4283 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4285
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4288 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4289 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4290
4291 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4292 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4293
4294 IEM_MC_ADVANCE_RIP();
4295 IEM_MC_END();
4296 }
4297 return VINF_SUCCESS;
4298}
4299
4300/* Opcode 0xf2 0x0f 0x7f - invalid */
4301
4302
4303
4304/** Opcode 0x0f 0x80. */
4305FNIEMOP_DEF(iemOp_jo_Jv)
4306{
4307 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4308 IEMOP_HLP_MIN_386();
4309 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4310 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4311 {
4312 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314
4315 IEM_MC_BEGIN(0, 0);
4316 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4317 IEM_MC_REL_JMP_S16(i16Imm);
4318 } IEM_MC_ELSE() {
4319 IEM_MC_ADVANCE_RIP();
4320 } IEM_MC_ENDIF();
4321 IEM_MC_END();
4322 }
4323 else
4324 {
4325 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4327
4328 IEM_MC_BEGIN(0, 0);
4329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4330 IEM_MC_REL_JMP_S32(i32Imm);
4331 } IEM_MC_ELSE() {
4332 IEM_MC_ADVANCE_RIP();
4333 } IEM_MC_ENDIF();
4334 IEM_MC_END();
4335 }
4336 return VINF_SUCCESS;
4337}
4338
4339
4340/** Opcode 0x0f 0x81. */
4341FNIEMOP_DEF(iemOp_jno_Jv)
4342{
4343 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4344 IEMOP_HLP_MIN_386();
4345 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4346 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4347 {
4348 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4350
4351 IEM_MC_BEGIN(0, 0);
4352 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4353 IEM_MC_ADVANCE_RIP();
4354 } IEM_MC_ELSE() {
4355 IEM_MC_REL_JMP_S16(i16Imm);
4356 } IEM_MC_ENDIF();
4357 IEM_MC_END();
4358 }
4359 else
4360 {
4361 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4363
4364 IEM_MC_BEGIN(0, 0);
4365 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4366 IEM_MC_ADVANCE_RIP();
4367 } IEM_MC_ELSE() {
4368 IEM_MC_REL_JMP_S32(i32Imm);
4369 } IEM_MC_ENDIF();
4370 IEM_MC_END();
4371 }
4372 return VINF_SUCCESS;
4373}
4374
4375
4376/** Opcode 0x0f 0x82. */
4377FNIEMOP_DEF(iemOp_jc_Jv)
4378{
4379 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4380 IEMOP_HLP_MIN_386();
4381 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4382 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4383 {
4384 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4386
4387 IEM_MC_BEGIN(0, 0);
4388 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4389 IEM_MC_REL_JMP_S16(i16Imm);
4390 } IEM_MC_ELSE() {
4391 IEM_MC_ADVANCE_RIP();
4392 } IEM_MC_ENDIF();
4393 IEM_MC_END();
4394 }
4395 else
4396 {
4397 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4399
4400 IEM_MC_BEGIN(0, 0);
4401 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4402 IEM_MC_REL_JMP_S32(i32Imm);
4403 } IEM_MC_ELSE() {
4404 IEM_MC_ADVANCE_RIP();
4405 } IEM_MC_ENDIF();
4406 IEM_MC_END();
4407 }
4408 return VINF_SUCCESS;
4409}
4410
4411
4412/** Opcode 0x0f 0x83. */
4413FNIEMOP_DEF(iemOp_jnc_Jv)
4414{
4415 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4416 IEMOP_HLP_MIN_386();
4417 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4418 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4419 {
4420 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4422
4423 IEM_MC_BEGIN(0, 0);
4424 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4425 IEM_MC_ADVANCE_RIP();
4426 } IEM_MC_ELSE() {
4427 IEM_MC_REL_JMP_S16(i16Imm);
4428 } IEM_MC_ENDIF();
4429 IEM_MC_END();
4430 }
4431 else
4432 {
4433 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4435
4436 IEM_MC_BEGIN(0, 0);
4437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4438 IEM_MC_ADVANCE_RIP();
4439 } IEM_MC_ELSE() {
4440 IEM_MC_REL_JMP_S32(i32Imm);
4441 } IEM_MC_ENDIF();
4442 IEM_MC_END();
4443 }
4444 return VINF_SUCCESS;
4445}
4446
4447
4448/** Opcode 0x0f 0x84. */
4449FNIEMOP_DEF(iemOp_je_Jv)
4450{
4451 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4452 IEMOP_HLP_MIN_386();
4453 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4454 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4455 {
4456 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4458
4459 IEM_MC_BEGIN(0, 0);
4460 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4461 IEM_MC_REL_JMP_S16(i16Imm);
4462 } IEM_MC_ELSE() {
4463 IEM_MC_ADVANCE_RIP();
4464 } IEM_MC_ENDIF();
4465 IEM_MC_END();
4466 }
4467 else
4468 {
4469 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4471
4472 IEM_MC_BEGIN(0, 0);
4473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4474 IEM_MC_REL_JMP_S32(i32Imm);
4475 } IEM_MC_ELSE() {
4476 IEM_MC_ADVANCE_RIP();
4477 } IEM_MC_ENDIF();
4478 IEM_MC_END();
4479 }
4480 return VINF_SUCCESS;
4481}
4482
4483
4484/** Opcode 0x0f 0x85. */
4485FNIEMOP_DEF(iemOp_jne_Jv)
4486{
4487 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4488 IEMOP_HLP_MIN_386();
4489 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4490 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4491 {
4492 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4494
4495 IEM_MC_BEGIN(0, 0);
4496 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4497 IEM_MC_ADVANCE_RIP();
4498 } IEM_MC_ELSE() {
4499 IEM_MC_REL_JMP_S16(i16Imm);
4500 } IEM_MC_ENDIF();
4501 IEM_MC_END();
4502 }
4503 else
4504 {
4505 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4507
4508 IEM_MC_BEGIN(0, 0);
4509 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4510 IEM_MC_ADVANCE_RIP();
4511 } IEM_MC_ELSE() {
4512 IEM_MC_REL_JMP_S32(i32Imm);
4513 } IEM_MC_ENDIF();
4514 IEM_MC_END();
4515 }
4516 return VINF_SUCCESS;
4517}
4518
4519
4520/** Opcode 0x0f 0x86. */
4521FNIEMOP_DEF(iemOp_jbe_Jv)
4522{
4523 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4524 IEMOP_HLP_MIN_386();
4525 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4526 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4527 {
4528 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4530
4531 IEM_MC_BEGIN(0, 0);
4532 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4533 IEM_MC_REL_JMP_S16(i16Imm);
4534 } IEM_MC_ELSE() {
4535 IEM_MC_ADVANCE_RIP();
4536 } IEM_MC_ENDIF();
4537 IEM_MC_END();
4538 }
4539 else
4540 {
4541 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4543
4544 IEM_MC_BEGIN(0, 0);
4545 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4546 IEM_MC_REL_JMP_S32(i32Imm);
4547 } IEM_MC_ELSE() {
4548 IEM_MC_ADVANCE_RIP();
4549 } IEM_MC_ENDIF();
4550 IEM_MC_END();
4551 }
4552 return VINF_SUCCESS;
4553}
4554
4555
4556/** Opcode 0x0f 0x87. */
4557FNIEMOP_DEF(iemOp_jnbe_Jv)
4558{
4559 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4560 IEMOP_HLP_MIN_386();
4561 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4562 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4563 {
4564 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566
4567 IEM_MC_BEGIN(0, 0);
4568 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4569 IEM_MC_ADVANCE_RIP();
4570 } IEM_MC_ELSE() {
4571 IEM_MC_REL_JMP_S16(i16Imm);
4572 } IEM_MC_ENDIF();
4573 IEM_MC_END();
4574 }
4575 else
4576 {
4577 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4579
4580 IEM_MC_BEGIN(0, 0);
4581 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4582 IEM_MC_ADVANCE_RIP();
4583 } IEM_MC_ELSE() {
4584 IEM_MC_REL_JMP_S32(i32Imm);
4585 } IEM_MC_ENDIF();
4586 IEM_MC_END();
4587 }
4588 return VINF_SUCCESS;
4589}
4590
4591
4592/** Opcode 0x0f 0x88. */
4593FNIEMOP_DEF(iemOp_js_Jv)
4594{
4595 IEMOP_MNEMONIC(js_Jv, "js Jv");
4596 IEMOP_HLP_MIN_386();
4597 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4598 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4599 {
4600 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4602
4603 IEM_MC_BEGIN(0, 0);
4604 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4605 IEM_MC_REL_JMP_S16(i16Imm);
4606 } IEM_MC_ELSE() {
4607 IEM_MC_ADVANCE_RIP();
4608 } IEM_MC_ENDIF();
4609 IEM_MC_END();
4610 }
4611 else
4612 {
4613 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4615
4616 IEM_MC_BEGIN(0, 0);
4617 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4618 IEM_MC_REL_JMP_S32(i32Imm);
4619 } IEM_MC_ELSE() {
4620 IEM_MC_ADVANCE_RIP();
4621 } IEM_MC_ENDIF();
4622 IEM_MC_END();
4623 }
4624 return VINF_SUCCESS;
4625}
4626
4627
4628/** Opcode 0x0f 0x89. */
4629FNIEMOP_DEF(iemOp_jns_Jv)
4630{
4631 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4632 IEMOP_HLP_MIN_386();
4633 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4634 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4635 {
4636 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4638
4639 IEM_MC_BEGIN(0, 0);
4640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4641 IEM_MC_ADVANCE_RIP();
4642 } IEM_MC_ELSE() {
4643 IEM_MC_REL_JMP_S16(i16Imm);
4644 } IEM_MC_ENDIF();
4645 IEM_MC_END();
4646 }
4647 else
4648 {
4649 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4651
4652 IEM_MC_BEGIN(0, 0);
4653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4654 IEM_MC_ADVANCE_RIP();
4655 } IEM_MC_ELSE() {
4656 IEM_MC_REL_JMP_S32(i32Imm);
4657 } IEM_MC_ENDIF();
4658 IEM_MC_END();
4659 }
4660 return VINF_SUCCESS;
4661}
4662
4663
4664/** Opcode 0x0f 0x8a. */
4665FNIEMOP_DEF(iemOp_jp_Jv)
4666{
4667 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4668 IEMOP_HLP_MIN_386();
4669 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4670 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4671 {
4672 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4674
4675 IEM_MC_BEGIN(0, 0);
4676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4677 IEM_MC_REL_JMP_S16(i16Imm);
4678 } IEM_MC_ELSE() {
4679 IEM_MC_ADVANCE_RIP();
4680 } IEM_MC_ENDIF();
4681 IEM_MC_END();
4682 }
4683 else
4684 {
4685 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4687
4688 IEM_MC_BEGIN(0, 0);
4689 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4690 IEM_MC_REL_JMP_S32(i32Imm);
4691 } IEM_MC_ELSE() {
4692 IEM_MC_ADVANCE_RIP();
4693 } IEM_MC_ENDIF();
4694 IEM_MC_END();
4695 }
4696 return VINF_SUCCESS;
4697}
4698
4699
4700/** Opcode 0x0f 0x8b. */
4701FNIEMOP_DEF(iemOp_jnp_Jv)
4702{
4703 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4704 IEMOP_HLP_MIN_386();
4705 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4706 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4707 {
4708 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4710
4711 IEM_MC_BEGIN(0, 0);
4712 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4713 IEM_MC_ADVANCE_RIP();
4714 } IEM_MC_ELSE() {
4715 IEM_MC_REL_JMP_S16(i16Imm);
4716 } IEM_MC_ENDIF();
4717 IEM_MC_END();
4718 }
4719 else
4720 {
4721 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4723
4724 IEM_MC_BEGIN(0, 0);
4725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4726 IEM_MC_ADVANCE_RIP();
4727 } IEM_MC_ELSE() {
4728 IEM_MC_REL_JMP_S32(i32Imm);
4729 } IEM_MC_ENDIF();
4730 IEM_MC_END();
4731 }
4732 return VINF_SUCCESS;
4733}
4734
4735
4736/** Opcode 0x0f 0x8c. */
4737FNIEMOP_DEF(iemOp_jl_Jv)
4738{
4739 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4740 IEMOP_HLP_MIN_386();
4741 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4742 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4743 {
4744 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4746
4747 IEM_MC_BEGIN(0, 0);
4748 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4749 IEM_MC_REL_JMP_S16(i16Imm);
4750 } IEM_MC_ELSE() {
4751 IEM_MC_ADVANCE_RIP();
4752 } IEM_MC_ENDIF();
4753 IEM_MC_END();
4754 }
4755 else
4756 {
4757 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4759
4760 IEM_MC_BEGIN(0, 0);
4761 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4762 IEM_MC_REL_JMP_S32(i32Imm);
4763 } IEM_MC_ELSE() {
4764 IEM_MC_ADVANCE_RIP();
4765 } IEM_MC_ENDIF();
4766 IEM_MC_END();
4767 }
4768 return VINF_SUCCESS;
4769}
4770
4771
4772/** Opcode 0x0f 0x8d. */
4773FNIEMOP_DEF(iemOp_jnl_Jv)
4774{
4775 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4776 IEMOP_HLP_MIN_386();
4777 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4778 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4779 {
4780 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782
4783 IEM_MC_BEGIN(0, 0);
4784 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4785 IEM_MC_ADVANCE_RIP();
4786 } IEM_MC_ELSE() {
4787 IEM_MC_REL_JMP_S16(i16Imm);
4788 } IEM_MC_ENDIF();
4789 IEM_MC_END();
4790 }
4791 else
4792 {
4793 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4795
4796 IEM_MC_BEGIN(0, 0);
4797 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4798 IEM_MC_ADVANCE_RIP();
4799 } IEM_MC_ELSE() {
4800 IEM_MC_REL_JMP_S32(i32Imm);
4801 } IEM_MC_ENDIF();
4802 IEM_MC_END();
4803 }
4804 return VINF_SUCCESS;
4805}
4806
4807
4808/** Opcode 0x0f 0x8e. */
4809FNIEMOP_DEF(iemOp_jle_Jv)
4810{
4811 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4812 IEMOP_HLP_MIN_386();
4813 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4814 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4815 {
4816 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4818
4819 IEM_MC_BEGIN(0, 0);
4820 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4821 IEM_MC_REL_JMP_S16(i16Imm);
4822 } IEM_MC_ELSE() {
4823 IEM_MC_ADVANCE_RIP();
4824 } IEM_MC_ENDIF();
4825 IEM_MC_END();
4826 }
4827 else
4828 {
4829 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4831
4832 IEM_MC_BEGIN(0, 0);
4833 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4834 IEM_MC_REL_JMP_S32(i32Imm);
4835 } IEM_MC_ELSE() {
4836 IEM_MC_ADVANCE_RIP();
4837 } IEM_MC_ENDIF();
4838 IEM_MC_END();
4839 }
4840 return VINF_SUCCESS;
4841}
4842
4843
4844/** Opcode 0x0f 0x8f. */
4845FNIEMOP_DEF(iemOp_jnle_Jv)
4846{
4847 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4848 IEMOP_HLP_MIN_386();
4849 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4850 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4851 {
4852 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4854
4855 IEM_MC_BEGIN(0, 0);
4856 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4857 IEM_MC_ADVANCE_RIP();
4858 } IEM_MC_ELSE() {
4859 IEM_MC_REL_JMP_S16(i16Imm);
4860 } IEM_MC_ENDIF();
4861 IEM_MC_END();
4862 }
4863 else
4864 {
4865 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4867
4868 IEM_MC_BEGIN(0, 0);
4869 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4870 IEM_MC_ADVANCE_RIP();
4871 } IEM_MC_ELSE() {
4872 IEM_MC_REL_JMP_S32(i32Imm);
4873 } IEM_MC_ENDIF();
4874 IEM_MC_END();
4875 }
4876 return VINF_SUCCESS;
4877}
4878
4879
4880/** Opcode 0x0f 0x90. */
4881FNIEMOP_DEF(iemOp_seto_Eb)
4882{
4883 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4884 IEMOP_HLP_MIN_386();
4885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4886
4887 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4888 * any way. AMD says it's "unused", whatever that means. We're
4889 * ignoring for now. */
4890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4891 {
4892 /* register target */
4893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4894 IEM_MC_BEGIN(0, 0);
4895 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4896 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4897 } IEM_MC_ELSE() {
4898 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4899 } IEM_MC_ENDIF();
4900 IEM_MC_ADVANCE_RIP();
4901 IEM_MC_END();
4902 }
4903 else
4904 {
4905 /* memory target */
4906 IEM_MC_BEGIN(0, 1);
4907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4910 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4911 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4912 } IEM_MC_ELSE() {
4913 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4914 } IEM_MC_ENDIF();
4915 IEM_MC_ADVANCE_RIP();
4916 IEM_MC_END();
4917 }
4918 return VINF_SUCCESS;
4919}
4920
4921
4922/** Opcode 0x0f 0x91. */
4923FNIEMOP_DEF(iemOp_setno_Eb)
4924{
4925 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4926 IEMOP_HLP_MIN_386();
4927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4928
4929 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4930 * any way. AMD says it's "unused", whatever that means. We're
4931 * ignoring for now. */
4932 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4933 {
4934 /* register target */
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4936 IEM_MC_BEGIN(0, 0);
4937 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4938 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4939 } IEM_MC_ELSE() {
4940 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4941 } IEM_MC_ENDIF();
4942 IEM_MC_ADVANCE_RIP();
4943 IEM_MC_END();
4944 }
4945 else
4946 {
4947 /* memory target */
4948 IEM_MC_BEGIN(0, 1);
4949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4953 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4954 } IEM_MC_ELSE() {
4955 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4956 } IEM_MC_ENDIF();
4957 IEM_MC_ADVANCE_RIP();
4958 IEM_MC_END();
4959 }
4960 return VINF_SUCCESS;
4961}
4962
4963
4964/** Opcode 0x0f 0x92. */
4965FNIEMOP_DEF(iemOp_setc_Eb)
4966{
4967 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4968 IEMOP_HLP_MIN_386();
4969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4970
4971 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4972 * any way. AMD says it's "unused", whatever that means. We're
4973 * ignoring for now. */
4974 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4975 {
4976 /* register target */
4977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4978 IEM_MC_BEGIN(0, 0);
4979 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4980 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4981 } IEM_MC_ELSE() {
4982 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4983 } IEM_MC_ENDIF();
4984 IEM_MC_ADVANCE_RIP();
4985 IEM_MC_END();
4986 }
4987 else
4988 {
4989 /* memory target */
4990 IEM_MC_BEGIN(0, 1);
4991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4994 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4995 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4996 } IEM_MC_ELSE() {
4997 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4998 } IEM_MC_ENDIF();
4999 IEM_MC_ADVANCE_RIP();
5000 IEM_MC_END();
5001 }
5002 return VINF_SUCCESS;
5003}
5004
5005
5006/** Opcode 0x0f 0x93. */
5007FNIEMOP_DEF(iemOp_setnc_Eb)
5008{
5009 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5010 IEMOP_HLP_MIN_386();
5011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5012
5013 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5014 * any way. AMD says it's "unused", whatever that means. We're
5015 * ignoring for now. */
5016 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5017 {
5018 /* register target */
5019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5020 IEM_MC_BEGIN(0, 0);
5021 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5022 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5023 } IEM_MC_ELSE() {
5024 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5025 } IEM_MC_ENDIF();
5026 IEM_MC_ADVANCE_RIP();
5027 IEM_MC_END();
5028 }
5029 else
5030 {
5031 /* memory target */
5032 IEM_MC_BEGIN(0, 1);
5033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5037 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5038 } IEM_MC_ELSE() {
5039 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5040 } IEM_MC_ENDIF();
5041 IEM_MC_ADVANCE_RIP();
5042 IEM_MC_END();
5043 }
5044 return VINF_SUCCESS;
5045}
5046
5047
5048/** Opcode 0x0f 0x94. */
5049FNIEMOP_DEF(iemOp_sete_Eb)
5050{
5051 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5052 IEMOP_HLP_MIN_386();
5053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5054
5055 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5056 * any way. AMD says it's "unused", whatever that means. We're
5057 * ignoring for now. */
5058 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5059 {
5060 /* register target */
5061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5062 IEM_MC_BEGIN(0, 0);
5063 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5064 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5065 } IEM_MC_ELSE() {
5066 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5067 } IEM_MC_ENDIF();
5068 IEM_MC_ADVANCE_RIP();
5069 IEM_MC_END();
5070 }
5071 else
5072 {
5073 /* memory target */
5074 IEM_MC_BEGIN(0, 1);
5075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5078 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5079 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5080 } IEM_MC_ELSE() {
5081 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5082 } IEM_MC_ENDIF();
5083 IEM_MC_ADVANCE_RIP();
5084 IEM_MC_END();
5085 }
5086 return VINF_SUCCESS;
5087}
5088
5089
5090/** Opcode 0x0f 0x95. */
5091FNIEMOP_DEF(iemOp_setne_Eb)
5092{
5093 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5094 IEMOP_HLP_MIN_386();
5095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5096
5097 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5098 * any way. AMD says it's "unused", whatever that means. We're
5099 * ignoring for now. */
5100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5101 {
5102 /* register target */
5103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5104 IEM_MC_BEGIN(0, 0);
5105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5106 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5107 } IEM_MC_ELSE() {
5108 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5109 } IEM_MC_ENDIF();
5110 IEM_MC_ADVANCE_RIP();
5111 IEM_MC_END();
5112 }
5113 else
5114 {
5115 /* memory target */
5116 IEM_MC_BEGIN(0, 1);
5117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5122 } IEM_MC_ELSE() {
5123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5124 } IEM_MC_ENDIF();
5125 IEM_MC_ADVANCE_RIP();
5126 IEM_MC_END();
5127 }
5128 return VINF_SUCCESS;
5129}
5130
5131
5132/** Opcode 0x0f 0x96. */
5133FNIEMOP_DEF(iemOp_setbe_Eb)
5134{
5135 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5136 IEMOP_HLP_MIN_386();
5137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5138
5139 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5140 * any way. AMD says it's "unused", whatever that means. We're
5141 * ignoring for now. */
5142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5143 {
5144 /* register target */
5145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5146 IEM_MC_BEGIN(0, 0);
5147 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5149 } IEM_MC_ELSE() {
5150 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5151 } IEM_MC_ENDIF();
5152 IEM_MC_ADVANCE_RIP();
5153 IEM_MC_END();
5154 }
5155 else
5156 {
5157 /* memory target */
5158 IEM_MC_BEGIN(0, 1);
5159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5162 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5164 } IEM_MC_ELSE() {
5165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5166 } IEM_MC_ENDIF();
5167 IEM_MC_ADVANCE_RIP();
5168 IEM_MC_END();
5169 }
5170 return VINF_SUCCESS;
5171}
5172
5173
5174/** Opcode 0x0f 0x97. */
5175FNIEMOP_DEF(iemOp_setnbe_Eb)
5176{
5177 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5178 IEMOP_HLP_MIN_386();
5179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5180
5181 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5182 * any way. AMD says it's "unused", whatever that means. We're
5183 * ignoring for now. */
5184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5185 {
5186 /* register target */
5187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5188 IEM_MC_BEGIN(0, 0);
5189 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5190 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5191 } IEM_MC_ELSE() {
5192 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5193 } IEM_MC_ENDIF();
5194 IEM_MC_ADVANCE_RIP();
5195 IEM_MC_END();
5196 }
5197 else
5198 {
5199 /* memory target */
5200 IEM_MC_BEGIN(0, 1);
5201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5204 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5205 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5206 } IEM_MC_ELSE() {
5207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5208 } IEM_MC_ENDIF();
5209 IEM_MC_ADVANCE_RIP();
5210 IEM_MC_END();
5211 }
5212 return VINF_SUCCESS;
5213}
5214
5215
5216/** Opcode 0x0f 0x98. */
5217FNIEMOP_DEF(iemOp_sets_Eb)
5218{
5219 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5220 IEMOP_HLP_MIN_386();
5221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5222
5223 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5224 * any way. AMD says it's "unused", whatever that means. We're
5225 * ignoring for now. */
5226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5227 {
5228 /* register target */
5229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5230 IEM_MC_BEGIN(0, 0);
5231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5232 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5233 } IEM_MC_ELSE() {
5234 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5235 } IEM_MC_ENDIF();
5236 IEM_MC_ADVANCE_RIP();
5237 IEM_MC_END();
5238 }
5239 else
5240 {
5241 /* memory target */
5242 IEM_MC_BEGIN(0, 1);
5243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5247 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5248 } IEM_MC_ELSE() {
5249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5250 } IEM_MC_ENDIF();
5251 IEM_MC_ADVANCE_RIP();
5252 IEM_MC_END();
5253 }
5254 return VINF_SUCCESS;
5255}
5256
5257
5258/** Opcode 0x0f 0x99. */
5259FNIEMOP_DEF(iemOp_setns_Eb)
5260{
5261 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5262 IEMOP_HLP_MIN_386();
5263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5264
5265 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5266 * any way. AMD says it's "unused", whatever that means. We're
5267 * ignoring for now. */
5268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5269 {
5270 /* register target */
5271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5272 IEM_MC_BEGIN(0, 0);
5273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5274 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5275 } IEM_MC_ELSE() {
5276 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5277 } IEM_MC_ENDIF();
5278 IEM_MC_ADVANCE_RIP();
5279 IEM_MC_END();
5280 }
5281 else
5282 {
5283 /* memory target */
5284 IEM_MC_BEGIN(0, 1);
5285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5289 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5290 } IEM_MC_ELSE() {
5291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5292 } IEM_MC_ENDIF();
5293 IEM_MC_ADVANCE_RIP();
5294 IEM_MC_END();
5295 }
5296 return VINF_SUCCESS;
5297}
5298
5299
5300/** Opcode 0x0f 0x9a. */
5301FNIEMOP_DEF(iemOp_setp_Eb)
5302{
5303 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5304 IEMOP_HLP_MIN_386();
5305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5306
5307 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5308 * any way. AMD says it's "unused", whatever that means. We're
5309 * ignoring for now. */
5310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5311 {
5312 /* register target */
5313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5314 IEM_MC_BEGIN(0, 0);
5315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5316 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5317 } IEM_MC_ELSE() {
5318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5319 } IEM_MC_ENDIF();
5320 IEM_MC_ADVANCE_RIP();
5321 IEM_MC_END();
5322 }
5323 else
5324 {
5325 /* memory target */
5326 IEM_MC_BEGIN(0, 1);
5327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5332 } IEM_MC_ELSE() {
5333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5334 } IEM_MC_ENDIF();
5335 IEM_MC_ADVANCE_RIP();
5336 IEM_MC_END();
5337 }
5338 return VINF_SUCCESS;
5339}
5340
5341
5342/** Opcode 0x0f 0x9b. */
5343FNIEMOP_DEF(iemOp_setnp_Eb)
5344{
5345 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5346 IEMOP_HLP_MIN_386();
5347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5348
5349 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5350 * any way. AMD says it's "unused", whatever that means. We're
5351 * ignoring for now. */
5352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5353 {
5354 /* register target */
5355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5356 IEM_MC_BEGIN(0, 0);
5357 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5358 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5359 } IEM_MC_ELSE() {
5360 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5361 } IEM_MC_ENDIF();
5362 IEM_MC_ADVANCE_RIP();
5363 IEM_MC_END();
5364 }
5365 else
5366 {
5367 /* memory target */
5368 IEM_MC_BEGIN(0, 1);
5369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5372 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5373 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5374 } IEM_MC_ELSE() {
5375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5376 } IEM_MC_ENDIF();
5377 IEM_MC_ADVANCE_RIP();
5378 IEM_MC_END();
5379 }
5380 return VINF_SUCCESS;
5381}
5382
5383
5384/** Opcode 0x0f 0x9c. */
5385FNIEMOP_DEF(iemOp_setl_Eb)
5386{
5387 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5388 IEMOP_HLP_MIN_386();
5389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5390
5391 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5392 * any way. AMD says it's "unused", whatever that means. We're
5393 * ignoring for now. */
5394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5395 {
5396 /* register target */
5397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5398 IEM_MC_BEGIN(0, 0);
5399 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5400 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5401 } IEM_MC_ELSE() {
5402 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5403 } IEM_MC_ENDIF();
5404 IEM_MC_ADVANCE_RIP();
5405 IEM_MC_END();
5406 }
5407 else
5408 {
5409 /* memory target */
5410 IEM_MC_BEGIN(0, 1);
5411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5414 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5415 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5416 } IEM_MC_ELSE() {
5417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5418 } IEM_MC_ENDIF();
5419 IEM_MC_ADVANCE_RIP();
5420 IEM_MC_END();
5421 }
5422 return VINF_SUCCESS;
5423}
5424
5425
5426/** Opcode 0x0f 0x9d. */
5427FNIEMOP_DEF(iemOp_setnl_Eb)
5428{
5429 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5430 IEMOP_HLP_MIN_386();
5431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5432
5433 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5434 * any way. AMD says it's "unused", whatever that means. We're
5435 * ignoring for now. */
5436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5437 {
5438 /* register target */
5439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5440 IEM_MC_BEGIN(0, 0);
5441 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5442 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5443 } IEM_MC_ELSE() {
5444 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5445 } IEM_MC_ENDIF();
5446 IEM_MC_ADVANCE_RIP();
5447 IEM_MC_END();
5448 }
5449 else
5450 {
5451 /* memory target */
5452 IEM_MC_BEGIN(0, 1);
5453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5456 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5457 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5458 } IEM_MC_ELSE() {
5459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5460 } IEM_MC_ENDIF();
5461 IEM_MC_ADVANCE_RIP();
5462 IEM_MC_END();
5463 }
5464 return VINF_SUCCESS;
5465}
5466
5467
5468/** Opcode 0x0f 0x9e. */
5469FNIEMOP_DEF(iemOp_setle_Eb)
5470{
5471 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5472 IEMOP_HLP_MIN_386();
5473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5474
5475 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5476 * any way. AMD says it's "unused", whatever that means. We're
5477 * ignoring for now. */
5478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5479 {
5480 /* register target */
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482 IEM_MC_BEGIN(0, 0);
5483 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5484 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5485 } IEM_MC_ELSE() {
5486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5487 } IEM_MC_ENDIF();
5488 IEM_MC_ADVANCE_RIP();
5489 IEM_MC_END();
5490 }
5491 else
5492 {
5493 /* memory target */
5494 IEM_MC_BEGIN(0, 1);
5495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5498 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5500 } IEM_MC_ELSE() {
5501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5502 } IEM_MC_ENDIF();
5503 IEM_MC_ADVANCE_RIP();
5504 IEM_MC_END();
5505 }
5506 return VINF_SUCCESS;
5507}
5508
5509
5510/** Opcode 0x0f 0x9f. */
5511FNIEMOP_DEF(iemOp_setnle_Eb)
5512{
5513 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5514 IEMOP_HLP_MIN_386();
5515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5516
5517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5518 * any way. AMD says it's "unused", whatever that means. We're
5519 * ignoring for now. */
5520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5521 {
5522 /* register target */
5523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5524 IEM_MC_BEGIN(0, 0);
5525 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5526 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5527 } IEM_MC_ELSE() {
5528 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5529 } IEM_MC_ENDIF();
5530 IEM_MC_ADVANCE_RIP();
5531 IEM_MC_END();
5532 }
5533 else
5534 {
5535 /* memory target */
5536 IEM_MC_BEGIN(0, 1);
5537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5540 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5541 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5542 } IEM_MC_ELSE() {
5543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5544 } IEM_MC_ENDIF();
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 }
5548 return VINF_SUCCESS;
5549}
5550
5551
5552/**
5553 * Common 'push segment-register' helper.
5554 */
5555FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5556{
5557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5558 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5559 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5560
5561 switch (pVCpu->iem.s.enmEffOpSize)
5562 {
5563 case IEMMODE_16BIT:
5564 IEM_MC_BEGIN(0, 1);
5565 IEM_MC_LOCAL(uint16_t, u16Value);
5566 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5567 IEM_MC_PUSH_U16(u16Value);
5568 IEM_MC_ADVANCE_RIP();
5569 IEM_MC_END();
5570 break;
5571
5572 case IEMMODE_32BIT:
5573 IEM_MC_BEGIN(0, 1);
5574 IEM_MC_LOCAL(uint32_t, u32Value);
5575 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5576 IEM_MC_PUSH_U32_SREG(u32Value);
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 break;
5580
5581 case IEMMODE_64BIT:
5582 IEM_MC_BEGIN(0, 1);
5583 IEM_MC_LOCAL(uint64_t, u64Value);
5584 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5585 IEM_MC_PUSH_U64(u64Value);
5586 IEM_MC_ADVANCE_RIP();
5587 IEM_MC_END();
5588 break;
5589 }
5590
5591 return VINF_SUCCESS;
5592}
5593
5594
5595/** Opcode 0x0f 0xa0. */
5596FNIEMOP_DEF(iemOp_push_fs)
5597{
5598 IEMOP_MNEMONIC(push_fs, "push fs");
5599 IEMOP_HLP_MIN_386();
5600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5601 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5602}
5603
5604
5605/** Opcode 0x0f 0xa1. */
5606FNIEMOP_DEF(iemOp_pop_fs)
5607{
5608 IEMOP_MNEMONIC(pop_fs, "pop fs");
5609 IEMOP_HLP_MIN_386();
5610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5611 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5612}
5613
5614
5615/** Opcode 0x0f 0xa2. */
5616FNIEMOP_DEF(iemOp_cpuid)
5617{
5618 IEMOP_MNEMONIC(cpuid, "cpuid");
5619 IEMOP_HLP_MIN_486(); /* not all 486es. */
5620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5621 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5622}
5623
5624
5625/**
5626 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5627 * iemOp_bts_Ev_Gv.
5628 */
5629FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5630{
5631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5632 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5633
5634 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5635 {
5636 /* register destination. */
5637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5638 switch (pVCpu->iem.s.enmEffOpSize)
5639 {
5640 case IEMMODE_16BIT:
5641 IEM_MC_BEGIN(3, 0);
5642 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5643 IEM_MC_ARG(uint16_t, u16Src, 1);
5644 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5645
5646 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5647 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5648 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5649 IEM_MC_REF_EFLAGS(pEFlags);
5650 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5651
5652 IEM_MC_ADVANCE_RIP();
5653 IEM_MC_END();
5654 return VINF_SUCCESS;
5655
5656 case IEMMODE_32BIT:
5657 IEM_MC_BEGIN(3, 0);
5658 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5659 IEM_MC_ARG(uint32_t, u32Src, 1);
5660 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5661
5662 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5663 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5664 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5665 IEM_MC_REF_EFLAGS(pEFlags);
5666 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5667
5668 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5669 IEM_MC_ADVANCE_RIP();
5670 IEM_MC_END();
5671 return VINF_SUCCESS;
5672
5673 case IEMMODE_64BIT:
5674 IEM_MC_BEGIN(3, 0);
5675 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5676 IEM_MC_ARG(uint64_t, u64Src, 1);
5677 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5678
5679 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5680 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5681 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5682 IEM_MC_REF_EFLAGS(pEFlags);
5683 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5684
5685 IEM_MC_ADVANCE_RIP();
5686 IEM_MC_END();
5687 return VINF_SUCCESS;
5688
5689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5690 }
5691 }
5692 else
5693 {
5694 /* memory destination. */
5695
5696 uint32_t fAccess;
5697 if (pImpl->pfnLockedU16)
5698 fAccess = IEM_ACCESS_DATA_RW;
5699 else /* BT */
5700 fAccess = IEM_ACCESS_DATA_R;
5701
5702 /** @todo test negative bit offsets! */
5703 switch (pVCpu->iem.s.enmEffOpSize)
5704 {
5705 case IEMMODE_16BIT:
5706 IEM_MC_BEGIN(3, 2);
5707 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5708 IEM_MC_ARG(uint16_t, u16Src, 1);
5709 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5711 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5712
5713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5714 if (pImpl->pfnLockedU16)
5715 IEMOP_HLP_DONE_DECODING();
5716 else
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5719 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5720 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5721 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5722 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5723 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5724 IEM_MC_FETCH_EFLAGS(EFlags);
5725
5726 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5727 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5728 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5729 else
5730 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5731 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5732
5733 IEM_MC_COMMIT_EFLAGS(EFlags);
5734 IEM_MC_ADVANCE_RIP();
5735 IEM_MC_END();
5736 return VINF_SUCCESS;
5737
5738 case IEMMODE_32BIT:
5739 IEM_MC_BEGIN(3, 2);
5740 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5741 IEM_MC_ARG(uint32_t, u32Src, 1);
5742 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5744 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5745
5746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5747 if (pImpl->pfnLockedU16)
5748 IEMOP_HLP_DONE_DECODING();
5749 else
5750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5751 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5752 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5753 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5754 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5755 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5756 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5757 IEM_MC_FETCH_EFLAGS(EFlags);
5758
5759 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5760 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5761 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5762 else
5763 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5764 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5765
5766 IEM_MC_COMMIT_EFLAGS(EFlags);
5767 IEM_MC_ADVANCE_RIP();
5768 IEM_MC_END();
5769 return VINF_SUCCESS;
5770
5771 case IEMMODE_64BIT:
5772 IEM_MC_BEGIN(3, 2);
5773 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5774 IEM_MC_ARG(uint64_t, u64Src, 1);
5775 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5777 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5778
5779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5780 if (pImpl->pfnLockedU16)
5781 IEMOP_HLP_DONE_DECODING();
5782 else
5783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5784 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5785 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5786 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5787 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5788 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5789 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5790 IEM_MC_FETCH_EFLAGS(EFlags);
5791
5792 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5793 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5794 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5795 else
5796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5797 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5798
5799 IEM_MC_COMMIT_EFLAGS(EFlags);
5800 IEM_MC_ADVANCE_RIP();
5801 IEM_MC_END();
5802 return VINF_SUCCESS;
5803
5804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5805 }
5806 }
5807}
5808
5809
5810/** Opcode 0x0f 0xa3. */
5811FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5812{
5813 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5814 IEMOP_HLP_MIN_386();
5815 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5816}
5817
5818
5819/**
5820 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5821 */
5822FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5823{
5824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5825 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5826
5827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5828 {
5829 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5831
5832 switch (pVCpu->iem.s.enmEffOpSize)
5833 {
5834 case IEMMODE_16BIT:
5835 IEM_MC_BEGIN(4, 0);
5836 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5837 IEM_MC_ARG(uint16_t, u16Src, 1);
5838 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5839 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5840
5841 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5842 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5843 IEM_MC_REF_EFLAGS(pEFlags);
5844 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5845
5846 IEM_MC_ADVANCE_RIP();
5847 IEM_MC_END();
5848 return VINF_SUCCESS;
5849
5850 case IEMMODE_32BIT:
5851 IEM_MC_BEGIN(4, 0);
5852 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5853 IEM_MC_ARG(uint32_t, u32Src, 1);
5854 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5855 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5856
5857 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5858 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5859 IEM_MC_REF_EFLAGS(pEFlags);
5860 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5861
5862 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5863 IEM_MC_ADVANCE_RIP();
5864 IEM_MC_END();
5865 return VINF_SUCCESS;
5866
5867 case IEMMODE_64BIT:
5868 IEM_MC_BEGIN(4, 0);
5869 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5870 IEM_MC_ARG(uint64_t, u64Src, 1);
5871 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5872 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5873
5874 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5875 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5876 IEM_MC_REF_EFLAGS(pEFlags);
5877 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5878
5879 IEM_MC_ADVANCE_RIP();
5880 IEM_MC_END();
5881 return VINF_SUCCESS;
5882
5883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5884 }
5885 }
5886 else
5887 {
5888 switch (pVCpu->iem.s.enmEffOpSize)
5889 {
5890 case IEMMODE_16BIT:
5891 IEM_MC_BEGIN(4, 2);
5892 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5893 IEM_MC_ARG(uint16_t, u16Src, 1);
5894 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5895 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5897
5898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5899 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5900 IEM_MC_ASSIGN(cShiftArg, cShift);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5903 IEM_MC_FETCH_EFLAGS(EFlags);
5904 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5905 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5906
5907 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5908 IEM_MC_COMMIT_EFLAGS(EFlags);
5909 IEM_MC_ADVANCE_RIP();
5910 IEM_MC_END();
5911 return VINF_SUCCESS;
5912
5913 case IEMMODE_32BIT:
5914 IEM_MC_BEGIN(4, 2);
5915 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5916 IEM_MC_ARG(uint32_t, u32Src, 1);
5917 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5918 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5920
5921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5922 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5923 IEM_MC_ASSIGN(cShiftArg, cShift);
5924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5925 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5926 IEM_MC_FETCH_EFLAGS(EFlags);
5927 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5928 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5929
5930 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5931 IEM_MC_COMMIT_EFLAGS(EFlags);
5932 IEM_MC_ADVANCE_RIP();
5933 IEM_MC_END();
5934 return VINF_SUCCESS;
5935
5936 case IEMMODE_64BIT:
5937 IEM_MC_BEGIN(4, 2);
5938 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5939 IEM_MC_ARG(uint64_t, u64Src, 1);
5940 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5941 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5943
5944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5945 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5946 IEM_MC_ASSIGN(cShiftArg, cShift);
5947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5948 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5949 IEM_MC_FETCH_EFLAGS(EFlags);
5950 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5951 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5952
5953 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5954 IEM_MC_COMMIT_EFLAGS(EFlags);
5955 IEM_MC_ADVANCE_RIP();
5956 IEM_MC_END();
5957 return VINF_SUCCESS;
5958
5959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5960 }
5961 }
5962}
5963
5964
5965/**
5966 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5967 */
5968FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5969{
5970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5971 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5972
5973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5974 {
5975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5976
5977 switch (pVCpu->iem.s.enmEffOpSize)
5978 {
5979 case IEMMODE_16BIT:
5980 IEM_MC_BEGIN(4, 0);
5981 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5982 IEM_MC_ARG(uint16_t, u16Src, 1);
5983 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5984 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5985
5986 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5987 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5988 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5989 IEM_MC_REF_EFLAGS(pEFlags);
5990 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5991
5992 IEM_MC_ADVANCE_RIP();
5993 IEM_MC_END();
5994 return VINF_SUCCESS;
5995
5996 case IEMMODE_32BIT:
5997 IEM_MC_BEGIN(4, 0);
5998 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5999 IEM_MC_ARG(uint32_t, u32Src, 1);
6000 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6001 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6002
6003 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6004 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6005 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6006 IEM_MC_REF_EFLAGS(pEFlags);
6007 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6008
6009 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6010 IEM_MC_ADVANCE_RIP();
6011 IEM_MC_END();
6012 return VINF_SUCCESS;
6013
6014 case IEMMODE_64BIT:
6015 IEM_MC_BEGIN(4, 0);
6016 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6017 IEM_MC_ARG(uint64_t, u64Src, 1);
6018 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6019 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6020
6021 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6022 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6023 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6024 IEM_MC_REF_EFLAGS(pEFlags);
6025 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6026
6027 IEM_MC_ADVANCE_RIP();
6028 IEM_MC_END();
6029 return VINF_SUCCESS;
6030
6031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6032 }
6033 }
6034 else
6035 {
6036 switch (pVCpu->iem.s.enmEffOpSize)
6037 {
6038 case IEMMODE_16BIT:
6039 IEM_MC_BEGIN(4, 2);
6040 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6041 IEM_MC_ARG(uint16_t, u16Src, 1);
6042 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6043 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6045
6046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6048 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6049 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6050 IEM_MC_FETCH_EFLAGS(EFlags);
6051 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6052 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6053
6054 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6055 IEM_MC_COMMIT_EFLAGS(EFlags);
6056 IEM_MC_ADVANCE_RIP();
6057 IEM_MC_END();
6058 return VINF_SUCCESS;
6059
6060 case IEMMODE_32BIT:
6061 IEM_MC_BEGIN(4, 2);
6062 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6063 IEM_MC_ARG(uint32_t, u32Src, 1);
6064 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6065 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6067
6068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6070 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6071 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6072 IEM_MC_FETCH_EFLAGS(EFlags);
6073 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6074 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6075
6076 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6077 IEM_MC_COMMIT_EFLAGS(EFlags);
6078 IEM_MC_ADVANCE_RIP();
6079 IEM_MC_END();
6080 return VINF_SUCCESS;
6081
6082 case IEMMODE_64BIT:
6083 IEM_MC_BEGIN(4, 2);
6084 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6085 IEM_MC_ARG(uint64_t, u64Src, 1);
6086 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6087 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6089
6090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6092 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6093 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6094 IEM_MC_FETCH_EFLAGS(EFlags);
6095 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6096 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6097
6098 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6099 IEM_MC_COMMIT_EFLAGS(EFlags);
6100 IEM_MC_ADVANCE_RIP();
6101 IEM_MC_END();
6102 return VINF_SUCCESS;
6103
6104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6105 }
6106 }
6107}
6108
6109
6110
6111/** Opcode 0x0f 0xa4. */
6112FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6113{
6114 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6115 IEMOP_HLP_MIN_386();
6116 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6117}
6118
6119
6120/** Opcode 0x0f 0xa5. */
6121FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6122{
6123 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6124 IEMOP_HLP_MIN_386();
6125 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6126}
6127
6128
6129/** Opcode 0x0f 0xa8. */
6130FNIEMOP_DEF(iemOp_push_gs)
6131{
6132 IEMOP_MNEMONIC(push_gs, "push gs");
6133 IEMOP_HLP_MIN_386();
6134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6135 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6136}
6137
6138
6139/** Opcode 0x0f 0xa9. */
6140FNIEMOP_DEF(iemOp_pop_gs)
6141{
6142 IEMOP_MNEMONIC(pop_gs, "pop gs");
6143 IEMOP_HLP_MIN_386();
6144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6145 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6146}
6147
6148
6149/** Opcode 0x0f 0xaa. */
6150FNIEMOP_DEF(iemOp_rsm)
6151{
6152 IEMOP_MNEMONIC(rsm, "rsm");
6153 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6154 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6155 * intercept). */
6156 IEMOP_BITCH_ABOUT_STUB();
6157 return IEMOP_RAISE_INVALID_OPCODE();
6158}
6159
6160//IEMOP_HLP_MIN_386();
6161
6162
6163/** Opcode 0x0f 0xab. */
6164FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6165{
6166 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6167 IEMOP_HLP_MIN_386();
6168 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6169}
6170
6171
6172/** Opcode 0x0f 0xac. */
6173FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6174{
6175 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6176 IEMOP_HLP_MIN_386();
6177 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6178}
6179
6180
6181/** Opcode 0x0f 0xad. */
6182FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6183{
6184 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6185 IEMOP_HLP_MIN_386();
6186 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6187}
6188
6189
6190/** Opcode 0x0f 0xae mem/0. */
6191FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6192{
6193 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6194 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6195 return IEMOP_RAISE_INVALID_OPCODE();
6196
6197 IEM_MC_BEGIN(3, 1);
6198 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6199 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6200 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6203 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6204 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6205 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6206 IEM_MC_END();
6207 return VINF_SUCCESS;
6208}
6209
6210
6211/** Opcode 0x0f 0xae mem/1. */
6212FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6213{
6214 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6215 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6216 return IEMOP_RAISE_INVALID_OPCODE();
6217
6218 IEM_MC_BEGIN(3, 1);
6219 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6220 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6221 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6224 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6225 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6226 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6227 IEM_MC_END();
6228 return VINF_SUCCESS;
6229}
6230
6231
6232/**
6233 * @opmaps grp15
6234 * @opcode !11/2
6235 * @oppfx none
6236 * @opcpuid sse
6237 * @opgroup og_sse_mxcsrsm
6238 * @opxcpttype 5
6239 * @optest op1=0 -> mxcsr=0
6240 * @optest op1=0x2083 -> mxcsr=0x2083
6241 * @optest op1=0xfffffffe -> value.xcpt=0xd
6242 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6243 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6244 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6245 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6246 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6247 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6248 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6249 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6250 */
6251FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6252{
6253 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6254 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6255 return IEMOP_RAISE_INVALID_OPCODE();
6256
6257 IEM_MC_BEGIN(2, 0);
6258 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6259 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6262 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6263 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6264 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6265 IEM_MC_END();
6266 return VINF_SUCCESS;
6267}
6268
6269
6270/**
6271 * @opmaps grp15
6272 * @opcode !11/3
6273 * @oppfx none
6274 * @opcpuid sse
6275 * @opgroup og_sse_mxcsrsm
6276 * @opxcpttype 5
6277 * @optest mxcsr=0 -> op1=0
6278 * @optest mxcsr=0x2083 -> op1=0x2083
6279 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6280 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6281 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6282 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6283 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6284 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6285 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6286 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6287 */
6288FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6289{
6290 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6291 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6292 return IEMOP_RAISE_INVALID_OPCODE();
6293
6294 IEM_MC_BEGIN(2, 0);
6295 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6296 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6299 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6300 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6301 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6302 IEM_MC_END();
6303 return VINF_SUCCESS;
6304}
6305
6306
6307/**
6308 * @opmaps grp15
6309 * @opcode !11/4
6310 * @oppfx none
6311 * @opcpuid xsave
6312 * @opgroup og_system
6313 * @opxcpttype none
6314 */
6315FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6316{
6317 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
6318 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6319 return IEMOP_RAISE_INVALID_OPCODE();
6320
6321 IEM_MC_BEGIN(3, 0);
6322 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6323 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6324 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6327 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6328 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6329 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6330 IEM_MC_END();
6331 return VINF_SUCCESS;
6332}
6333
6334
6335/**
6336 * @opmaps grp15
6337 * @opcode !11/5
6338 * @oppfx none
6339 * @opcpuid xsave
6340 * @opgroup og_system
6341 * @opxcpttype none
6342 */
6343FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6344{
6345 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
6346 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6347 return IEMOP_RAISE_INVALID_OPCODE();
6348
6349 IEM_MC_BEGIN(3, 0);
6350 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6351 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6352 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6355 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6356 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6357 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6358 IEM_MC_END();
6359 return VINF_SUCCESS;
6360}
6361
6362/** Opcode 0x0f 0xae mem/6. */
6363FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6364
6365/**
6366 * @opmaps grp15
6367 * @opcode !11/7
6368 * @oppfx none
6369 * @opcpuid clfsh
6370 * @opgroup og_cachectl
6371 * @optest op1=1 ->
6372 */
6373FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6374{
6375 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6376 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6377 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6378
6379 IEM_MC_BEGIN(2, 0);
6380 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6381 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6385 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6386 IEM_MC_END();
6387 return VINF_SUCCESS;
6388}
6389
6390/**
6391 * @opmaps grp15
6392 * @opcode !11/7
6393 * @oppfx 0x66
6394 * @opcpuid clflushopt
6395 * @opgroup og_cachectl
6396 * @optest op1=1 ->
6397 */
6398FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6399{
6400 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6401 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6402 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6403
6404 IEM_MC_BEGIN(2, 0);
6405 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6406 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6409 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6410 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6411 IEM_MC_END();
6412 return VINF_SUCCESS;
6413}
6414
6415
6416/** Opcode 0x0f 0xae 11b/5. */
6417FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6418{
6419 RT_NOREF_PV(bRm);
6420 IEMOP_MNEMONIC(lfence, "lfence");
6421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6422 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6423 return IEMOP_RAISE_INVALID_OPCODE();
6424
6425 IEM_MC_BEGIN(0, 0);
6426 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6427 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6428 else
6429 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6430 IEM_MC_ADVANCE_RIP();
6431 IEM_MC_END();
6432 return VINF_SUCCESS;
6433}
6434
6435
6436/** Opcode 0x0f 0xae 11b/6. */
6437FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6438{
6439 RT_NOREF_PV(bRm);
6440 IEMOP_MNEMONIC(mfence, "mfence");
6441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6442 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6443 return IEMOP_RAISE_INVALID_OPCODE();
6444
6445 IEM_MC_BEGIN(0, 0);
6446 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6447 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6448 else
6449 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6450 IEM_MC_ADVANCE_RIP();
6451 IEM_MC_END();
6452 return VINF_SUCCESS;
6453}
6454
6455
6456/** Opcode 0x0f 0xae 11b/7. */
6457FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6458{
6459 RT_NOREF_PV(bRm);
6460 IEMOP_MNEMONIC(sfence, "sfence");
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6463 return IEMOP_RAISE_INVALID_OPCODE();
6464
6465 IEM_MC_BEGIN(0, 0);
6466 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6467 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6468 else
6469 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6470 IEM_MC_ADVANCE_RIP();
6471 IEM_MC_END();
6472 return VINF_SUCCESS;
6473}
6474
6475
6476/** Opcode 0xf3 0x0f 0xae 11b/0. */
6477FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6478
6479/** Opcode 0xf3 0x0f 0xae 11b/1. */
6480FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6481
6482/** Opcode 0xf3 0x0f 0xae 11b/2. */
6483FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6484
6485/** Opcode 0xf3 0x0f 0xae 11b/3. */
6486FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6487
6488
6489/**
6490 * Group 15 jump table for register variant.
6491 */
6492IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6493{ /* pfx: none, 066h, 0f3h, 0f2h */
6494 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6495 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6496 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6497 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6498 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6499 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6500 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6501 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6502};
6503AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6504
6505
6506/**
6507 * Group 15 jump table for memory variant.
6508 */
6509IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6510{ /* pfx: none, 066h, 0f3h, 0f2h */
6511 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6512 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6513 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6514 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6515 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6516 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6517 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6518 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6519};
6520AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6521
6522
6523/** Opcode 0x0f 0xae. */
6524FNIEMOP_DEF(iemOp_Grp15)
6525{
6526 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6529 /* register, register */
6530 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6531 + pVCpu->iem.s.idxPrefix], bRm);
6532 /* memory, register */
6533 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6534 + pVCpu->iem.s.idxPrefix], bRm);
6535}
6536
6537
6538/** Opcode 0x0f 0xaf. */
6539FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6540{
6541 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6542 IEMOP_HLP_MIN_386();
6543 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6544 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6545}
6546
6547
6548/** Opcode 0x0f 0xb0. */
6549FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6550{
6551 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6552 IEMOP_HLP_MIN_486();
6553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6554
6555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6556 {
6557 IEMOP_HLP_DONE_DECODING();
6558 IEM_MC_BEGIN(4, 0);
6559 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6560 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6561 IEM_MC_ARG(uint8_t, u8Src, 2);
6562 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6563
6564 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6565 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6566 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6567 IEM_MC_REF_EFLAGS(pEFlags);
6568 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6569 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6570 else
6571 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6572
6573 IEM_MC_ADVANCE_RIP();
6574 IEM_MC_END();
6575 }
6576 else
6577 {
6578 IEM_MC_BEGIN(4, 3);
6579 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6580 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6581 IEM_MC_ARG(uint8_t, u8Src, 2);
6582 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6584 IEM_MC_LOCAL(uint8_t, u8Al);
6585
6586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6587 IEMOP_HLP_DONE_DECODING();
6588 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6589 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6590 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6591 IEM_MC_FETCH_EFLAGS(EFlags);
6592 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6593 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6594 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6595 else
6596 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6597
6598 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6599 IEM_MC_COMMIT_EFLAGS(EFlags);
6600 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6601 IEM_MC_ADVANCE_RIP();
6602 IEM_MC_END();
6603 }
6604 return VINF_SUCCESS;
6605}
6606
6607/** Opcode 0x0f 0xb1. */
6608FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6609{
6610 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6611 IEMOP_HLP_MIN_486();
6612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6613
6614 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6615 {
6616 IEMOP_HLP_DONE_DECODING();
6617 switch (pVCpu->iem.s.enmEffOpSize)
6618 {
6619 case IEMMODE_16BIT:
6620 IEM_MC_BEGIN(4, 0);
6621 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6622 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6623 IEM_MC_ARG(uint16_t, u16Src, 2);
6624 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6625
6626 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6627 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6628 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6629 IEM_MC_REF_EFLAGS(pEFlags);
6630 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6631 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6632 else
6633 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6634
6635 IEM_MC_ADVANCE_RIP();
6636 IEM_MC_END();
6637 return VINF_SUCCESS;
6638
6639 case IEMMODE_32BIT:
6640 IEM_MC_BEGIN(4, 0);
6641 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6642 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6643 IEM_MC_ARG(uint32_t, u32Src, 2);
6644 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6645
6646 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6647 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6648 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6649 IEM_MC_REF_EFLAGS(pEFlags);
6650 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6651 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6652 else
6653 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6654
6655 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6656 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6657 IEM_MC_ADVANCE_RIP();
6658 IEM_MC_END();
6659 return VINF_SUCCESS;
6660
6661 case IEMMODE_64BIT:
6662 IEM_MC_BEGIN(4, 0);
6663 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6664 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6665#ifdef RT_ARCH_X86
6666 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6667#else
6668 IEM_MC_ARG(uint64_t, u64Src, 2);
6669#endif
6670 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6671
6672 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6673 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6674 IEM_MC_REF_EFLAGS(pEFlags);
6675#ifdef RT_ARCH_X86
6676 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6678 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6679 else
6680 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6681#else
6682 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6683 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6684 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6685 else
6686 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6687#endif
6688
6689 IEM_MC_ADVANCE_RIP();
6690 IEM_MC_END();
6691 return VINF_SUCCESS;
6692
6693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6694 }
6695 }
6696 else
6697 {
6698 switch (pVCpu->iem.s.enmEffOpSize)
6699 {
6700 case IEMMODE_16BIT:
6701 IEM_MC_BEGIN(4, 3);
6702 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6703 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6704 IEM_MC_ARG(uint16_t, u16Src, 2);
6705 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6707 IEM_MC_LOCAL(uint16_t, u16Ax);
6708
6709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6710 IEMOP_HLP_DONE_DECODING();
6711 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6712 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6713 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6714 IEM_MC_FETCH_EFLAGS(EFlags);
6715 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6716 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6717 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6718 else
6719 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6720
6721 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6722 IEM_MC_COMMIT_EFLAGS(EFlags);
6723 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6724 IEM_MC_ADVANCE_RIP();
6725 IEM_MC_END();
6726 return VINF_SUCCESS;
6727
6728 case IEMMODE_32BIT:
6729 IEM_MC_BEGIN(4, 3);
6730 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6731 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6732 IEM_MC_ARG(uint32_t, u32Src, 2);
6733 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6735 IEM_MC_LOCAL(uint32_t, u32Eax);
6736
6737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6738 IEMOP_HLP_DONE_DECODING();
6739 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6740 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6741 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6742 IEM_MC_FETCH_EFLAGS(EFlags);
6743 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6744 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6745 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6746 else
6747 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6748
6749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6750 IEM_MC_COMMIT_EFLAGS(EFlags);
6751 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6752 IEM_MC_ADVANCE_RIP();
6753 IEM_MC_END();
6754 return VINF_SUCCESS;
6755
6756 case IEMMODE_64BIT:
6757 IEM_MC_BEGIN(4, 3);
6758 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6759 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6760#ifdef RT_ARCH_X86
6761 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6762#else
6763 IEM_MC_ARG(uint64_t, u64Src, 2);
6764#endif
6765 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6767 IEM_MC_LOCAL(uint64_t, u64Rax);
6768
6769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6770 IEMOP_HLP_DONE_DECODING();
6771 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6772 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6773 IEM_MC_FETCH_EFLAGS(EFlags);
6774 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6775#ifdef RT_ARCH_X86
6776 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6777 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6778 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6779 else
6780 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6781#else
6782 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6783 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6784 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6785 else
6786 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6787#endif
6788
6789 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6790 IEM_MC_COMMIT_EFLAGS(EFlags);
6791 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6792 IEM_MC_ADVANCE_RIP();
6793 IEM_MC_END();
6794 return VINF_SUCCESS;
6795
6796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6797 }
6798 }
6799}
6800
6801
6802FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6803{
6804 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6805 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6806
6807 switch (pVCpu->iem.s.enmEffOpSize)
6808 {
6809 case IEMMODE_16BIT:
6810 IEM_MC_BEGIN(5, 1);
6811 IEM_MC_ARG(uint16_t, uSel, 0);
6812 IEM_MC_ARG(uint16_t, offSeg, 1);
6813 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6814 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6815 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6816 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6819 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6820 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6821 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6822 IEM_MC_END();
6823 return VINF_SUCCESS;
6824
6825 case IEMMODE_32BIT:
6826 IEM_MC_BEGIN(5, 1);
6827 IEM_MC_ARG(uint16_t, uSel, 0);
6828 IEM_MC_ARG(uint32_t, offSeg, 1);
6829 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6830 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6831 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6832 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6835 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6836 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6837 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6838 IEM_MC_END();
6839 return VINF_SUCCESS;
6840
6841 case IEMMODE_64BIT:
6842 IEM_MC_BEGIN(5, 1);
6843 IEM_MC_ARG(uint16_t, uSel, 0);
6844 IEM_MC_ARG(uint64_t, offSeg, 1);
6845 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6846 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6847 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6848 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6851 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6852 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6853 else
6854 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6855 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6856 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6857 IEM_MC_END();
6858 return VINF_SUCCESS;
6859
6860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6861 }
6862}
6863
6864
6865/** Opcode 0x0f 0xb2. */
6866FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6867{
6868 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6869 IEMOP_HLP_MIN_386();
6870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6872 return IEMOP_RAISE_INVALID_OPCODE();
6873 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6874}
6875
6876
6877/** Opcode 0x0f 0xb3. */
6878FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6879{
6880 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6881 IEMOP_HLP_MIN_386();
6882 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6883}
6884
6885
6886/** Opcode 0x0f 0xb4. */
6887FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6888{
6889 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6890 IEMOP_HLP_MIN_386();
6891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6893 return IEMOP_RAISE_INVALID_OPCODE();
6894 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6895}
6896
6897
6898/** Opcode 0x0f 0xb5. */
6899FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6900{
6901 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6902 IEMOP_HLP_MIN_386();
6903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6904 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6905 return IEMOP_RAISE_INVALID_OPCODE();
6906 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6907}
6908
6909
6910/** Opcode 0x0f 0xb6. */
6911FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6912{
6913 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6914 IEMOP_HLP_MIN_386();
6915
6916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6917
6918 /*
6919 * If rm is denoting a register, no more instruction bytes.
6920 */
6921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6922 {
6923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6924 switch (pVCpu->iem.s.enmEffOpSize)
6925 {
6926 case IEMMODE_16BIT:
6927 IEM_MC_BEGIN(0, 1);
6928 IEM_MC_LOCAL(uint16_t, u16Value);
6929 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6930 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6931 IEM_MC_ADVANCE_RIP();
6932 IEM_MC_END();
6933 return VINF_SUCCESS;
6934
6935 case IEMMODE_32BIT:
6936 IEM_MC_BEGIN(0, 1);
6937 IEM_MC_LOCAL(uint32_t, u32Value);
6938 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6939 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6940 IEM_MC_ADVANCE_RIP();
6941 IEM_MC_END();
6942 return VINF_SUCCESS;
6943
6944 case IEMMODE_64BIT:
6945 IEM_MC_BEGIN(0, 1);
6946 IEM_MC_LOCAL(uint64_t, u64Value);
6947 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6948 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6949 IEM_MC_ADVANCE_RIP();
6950 IEM_MC_END();
6951 return VINF_SUCCESS;
6952
6953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6954 }
6955 }
6956 else
6957 {
6958 /*
6959 * We're loading a register from memory.
6960 */
6961 switch (pVCpu->iem.s.enmEffOpSize)
6962 {
6963 case IEMMODE_16BIT:
6964 IEM_MC_BEGIN(0, 2);
6965 IEM_MC_LOCAL(uint16_t, u16Value);
6966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6970 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6971 IEM_MC_ADVANCE_RIP();
6972 IEM_MC_END();
6973 return VINF_SUCCESS;
6974
6975 case IEMMODE_32BIT:
6976 IEM_MC_BEGIN(0, 2);
6977 IEM_MC_LOCAL(uint32_t, u32Value);
6978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6981 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6982 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6983 IEM_MC_ADVANCE_RIP();
6984 IEM_MC_END();
6985 return VINF_SUCCESS;
6986
6987 case IEMMODE_64BIT:
6988 IEM_MC_BEGIN(0, 2);
6989 IEM_MC_LOCAL(uint64_t, u64Value);
6990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6993 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6994 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6995 IEM_MC_ADVANCE_RIP();
6996 IEM_MC_END();
6997 return VINF_SUCCESS;
6998
6999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7000 }
7001 }
7002}
7003
7004
7005/** Opcode 0x0f 0xb7. */
7006FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7007{
7008 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7009 IEMOP_HLP_MIN_386();
7010
7011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7012
7013 /** @todo Not entirely sure how the operand size prefix is handled here,
7014 * assuming that it will be ignored. Would be nice to have a few
7015 * test for this. */
7016 /*
7017 * If rm is denoting a register, no more instruction bytes.
7018 */
7019 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7020 {
7021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7022 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7023 {
7024 IEM_MC_BEGIN(0, 1);
7025 IEM_MC_LOCAL(uint32_t, u32Value);
7026 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7027 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7028 IEM_MC_ADVANCE_RIP();
7029 IEM_MC_END();
7030 }
7031 else
7032 {
7033 IEM_MC_BEGIN(0, 1);
7034 IEM_MC_LOCAL(uint64_t, u64Value);
7035 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7036 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7037 IEM_MC_ADVANCE_RIP();
7038 IEM_MC_END();
7039 }
7040 }
7041 else
7042 {
7043 /*
7044 * We're loading a register from memory.
7045 */
7046 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7047 {
7048 IEM_MC_BEGIN(0, 2);
7049 IEM_MC_LOCAL(uint32_t, u32Value);
7050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7054 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7055 IEM_MC_ADVANCE_RIP();
7056 IEM_MC_END();
7057 }
7058 else
7059 {
7060 IEM_MC_BEGIN(0, 2);
7061 IEM_MC_LOCAL(uint64_t, u64Value);
7062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7065 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7066 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7067 IEM_MC_ADVANCE_RIP();
7068 IEM_MC_END();
7069 }
7070 }
7071 return VINF_SUCCESS;
7072}
7073
7074
7075/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7076FNIEMOP_UD_STUB(iemOp_jmpe);
7077/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7078FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7079
7080
7081/**
7082 * @opcode 0xb9
7083 * @opinvalid intel-modrm
7084 * @optest ->
7085 */
7086FNIEMOP_DEF(iemOp_Grp10)
7087{
7088 /*
7089 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7090 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7091 */
7092 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7093 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
7094 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7095}
7096
7097
7098/** Opcode 0x0f 0xba. */
7099FNIEMOP_DEF(iemOp_Grp8)
7100{
7101 IEMOP_HLP_MIN_386();
7102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7103 PCIEMOPBINSIZES pImpl;
7104 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7105 {
7106 case 0: case 1: case 2: case 3:
7107 /* Both AMD and Intel want full modr/m decoding and imm8. */
7108 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7109 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7110 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7111 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7112 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7114 }
7115 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7116
7117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7118 {
7119 /* register destination. */
7120 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7122
7123 switch (pVCpu->iem.s.enmEffOpSize)
7124 {
7125 case IEMMODE_16BIT:
7126 IEM_MC_BEGIN(3, 0);
7127 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7128 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7129 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7130
7131 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7132 IEM_MC_REF_EFLAGS(pEFlags);
7133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7134
7135 IEM_MC_ADVANCE_RIP();
7136 IEM_MC_END();
7137 return VINF_SUCCESS;
7138
7139 case IEMMODE_32BIT:
7140 IEM_MC_BEGIN(3, 0);
7141 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7142 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7143 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7144
7145 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7146 IEM_MC_REF_EFLAGS(pEFlags);
7147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7148
7149 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7150 IEM_MC_ADVANCE_RIP();
7151 IEM_MC_END();
7152 return VINF_SUCCESS;
7153
7154 case IEMMODE_64BIT:
7155 IEM_MC_BEGIN(3, 0);
7156 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7157 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7158 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7159
7160 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7161 IEM_MC_REF_EFLAGS(pEFlags);
7162 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7163
7164 IEM_MC_ADVANCE_RIP();
7165 IEM_MC_END();
7166 return VINF_SUCCESS;
7167
7168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7169 }
7170 }
7171 else
7172 {
7173 /* memory destination. */
7174
7175 uint32_t fAccess;
7176 if (pImpl->pfnLockedU16)
7177 fAccess = IEM_ACCESS_DATA_RW;
7178 else /* BT */
7179 fAccess = IEM_ACCESS_DATA_R;
7180
7181 /** @todo test negative bit offsets! */
7182 switch (pVCpu->iem.s.enmEffOpSize)
7183 {
7184 case IEMMODE_16BIT:
7185 IEM_MC_BEGIN(3, 1);
7186 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7187 IEM_MC_ARG(uint16_t, u16Src, 1);
7188 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7190
7191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7192 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7193 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7194 if (pImpl->pfnLockedU16)
7195 IEMOP_HLP_DONE_DECODING();
7196 else
7197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7198 IEM_MC_FETCH_EFLAGS(EFlags);
7199 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7200 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7202 else
7203 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7204 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7205
7206 IEM_MC_COMMIT_EFLAGS(EFlags);
7207 IEM_MC_ADVANCE_RIP();
7208 IEM_MC_END();
7209 return VINF_SUCCESS;
7210
7211 case IEMMODE_32BIT:
7212 IEM_MC_BEGIN(3, 1);
7213 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7214 IEM_MC_ARG(uint32_t, u32Src, 1);
7215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7217
7218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7219 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7220 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7221 if (pImpl->pfnLockedU16)
7222 IEMOP_HLP_DONE_DECODING();
7223 else
7224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7225 IEM_MC_FETCH_EFLAGS(EFlags);
7226 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7227 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7229 else
7230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7231 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7232
7233 IEM_MC_COMMIT_EFLAGS(EFlags);
7234 IEM_MC_ADVANCE_RIP();
7235 IEM_MC_END();
7236 return VINF_SUCCESS;
7237
7238 case IEMMODE_64BIT:
7239 IEM_MC_BEGIN(3, 1);
7240 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7241 IEM_MC_ARG(uint64_t, u64Src, 1);
7242 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7244
7245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7246 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7247 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7248 if (pImpl->pfnLockedU16)
7249 IEMOP_HLP_DONE_DECODING();
7250 else
7251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7252 IEM_MC_FETCH_EFLAGS(EFlags);
7253 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7254 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7255 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7256 else
7257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7258 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7259
7260 IEM_MC_COMMIT_EFLAGS(EFlags);
7261 IEM_MC_ADVANCE_RIP();
7262 IEM_MC_END();
7263 return VINF_SUCCESS;
7264
7265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7266 }
7267 }
7268}
7269
7270
7271/** Opcode 0x0f 0xbb. */
7272FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7273{
7274 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7275 IEMOP_HLP_MIN_386();
7276 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7277}
7278
7279
7280/** Opcode 0x0f 0xbc. */
7281FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7282{
7283 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7284 IEMOP_HLP_MIN_386();
7285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7287}
7288
7289
7290/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7291FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7292
7293
7294/** Opcode 0x0f 0xbd. */
7295FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7296{
7297 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7298 IEMOP_HLP_MIN_386();
7299 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7300 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7301}
7302
7303
7304/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7305FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7306
7307
7308/** Opcode 0x0f 0xbe. */
7309FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7310{
7311 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7312 IEMOP_HLP_MIN_386();
7313
7314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7315
7316 /*
7317 * If rm is denoting a register, no more instruction bytes.
7318 */
7319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7320 {
7321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7322 switch (pVCpu->iem.s.enmEffOpSize)
7323 {
7324 case IEMMODE_16BIT:
7325 IEM_MC_BEGIN(0, 1);
7326 IEM_MC_LOCAL(uint16_t, u16Value);
7327 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7328 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7329 IEM_MC_ADVANCE_RIP();
7330 IEM_MC_END();
7331 return VINF_SUCCESS;
7332
7333 case IEMMODE_32BIT:
7334 IEM_MC_BEGIN(0, 1);
7335 IEM_MC_LOCAL(uint32_t, u32Value);
7336 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7337 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7338 IEM_MC_ADVANCE_RIP();
7339 IEM_MC_END();
7340 return VINF_SUCCESS;
7341
7342 case IEMMODE_64BIT:
7343 IEM_MC_BEGIN(0, 1);
7344 IEM_MC_LOCAL(uint64_t, u64Value);
7345 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7346 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7347 IEM_MC_ADVANCE_RIP();
7348 IEM_MC_END();
7349 return VINF_SUCCESS;
7350
7351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7352 }
7353 }
7354 else
7355 {
7356 /*
7357 * We're loading a register from memory.
7358 */
7359 switch (pVCpu->iem.s.enmEffOpSize)
7360 {
7361 case IEMMODE_16BIT:
7362 IEM_MC_BEGIN(0, 2);
7363 IEM_MC_LOCAL(uint16_t, u16Value);
7364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7367 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7368 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7369 IEM_MC_ADVANCE_RIP();
7370 IEM_MC_END();
7371 return VINF_SUCCESS;
7372
7373 case IEMMODE_32BIT:
7374 IEM_MC_BEGIN(0, 2);
7375 IEM_MC_LOCAL(uint32_t, u32Value);
7376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7379 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7380 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7381 IEM_MC_ADVANCE_RIP();
7382 IEM_MC_END();
7383 return VINF_SUCCESS;
7384
7385 case IEMMODE_64BIT:
7386 IEM_MC_BEGIN(0, 2);
7387 IEM_MC_LOCAL(uint64_t, u64Value);
7388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7391 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7392 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7393 IEM_MC_ADVANCE_RIP();
7394 IEM_MC_END();
7395 return VINF_SUCCESS;
7396
7397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7398 }
7399 }
7400}
7401
7402
7403/** Opcode 0x0f 0xbf. */
7404FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7405{
7406 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7407 IEMOP_HLP_MIN_386();
7408
7409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7410
7411 /** @todo Not entirely sure how the operand size prefix is handled here,
7412 * assuming that it will be ignored. Would be nice to have a few
7413 * test for this. */
7414 /*
7415 * If rm is denoting a register, no more instruction bytes.
7416 */
7417 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7418 {
7419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7420 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7421 {
7422 IEM_MC_BEGIN(0, 1);
7423 IEM_MC_LOCAL(uint32_t, u32Value);
7424 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7425 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7426 IEM_MC_ADVANCE_RIP();
7427 IEM_MC_END();
7428 }
7429 else
7430 {
7431 IEM_MC_BEGIN(0, 1);
7432 IEM_MC_LOCAL(uint64_t, u64Value);
7433 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7434 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7435 IEM_MC_ADVANCE_RIP();
7436 IEM_MC_END();
7437 }
7438 }
7439 else
7440 {
7441 /*
7442 * We're loading a register from memory.
7443 */
7444 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7445 {
7446 IEM_MC_BEGIN(0, 2);
7447 IEM_MC_LOCAL(uint32_t, u32Value);
7448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7451 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7452 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7453 IEM_MC_ADVANCE_RIP();
7454 IEM_MC_END();
7455 }
7456 else
7457 {
7458 IEM_MC_BEGIN(0, 2);
7459 IEM_MC_LOCAL(uint64_t, u64Value);
7460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7463 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7464 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7465 IEM_MC_ADVANCE_RIP();
7466 IEM_MC_END();
7467 }
7468 }
7469 return VINF_SUCCESS;
7470}
7471
7472
7473/** Opcode 0x0f 0xc0. */
7474FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7475{
7476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7477 IEMOP_HLP_MIN_486();
7478 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7479
7480 /*
7481 * If rm is denoting a register, no more instruction bytes.
7482 */
7483 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7484 {
7485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7486
7487 IEM_MC_BEGIN(3, 0);
7488 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7489 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7490 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7491
7492 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7493 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7494 IEM_MC_REF_EFLAGS(pEFlags);
7495 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7496
7497 IEM_MC_ADVANCE_RIP();
7498 IEM_MC_END();
7499 }
7500 else
7501 {
7502 /*
7503 * We're accessing memory.
7504 */
7505 IEM_MC_BEGIN(3, 3);
7506 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7507 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7508 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7509 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7511
7512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7513 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7514 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7515 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7516 IEM_MC_FETCH_EFLAGS(EFlags);
7517 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7518 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7519 else
7520 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7521
7522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7523 IEM_MC_COMMIT_EFLAGS(EFlags);
7524 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7525 IEM_MC_ADVANCE_RIP();
7526 IEM_MC_END();
7527 return VINF_SUCCESS;
7528 }
7529 return VINF_SUCCESS;
7530}
7531
7532
7533/** Opcode 0x0f 0xc1. */
7534FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7535{
7536 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7537 IEMOP_HLP_MIN_486();
7538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7539
7540 /*
7541 * If rm is denoting a register, no more instruction bytes.
7542 */
7543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7544 {
7545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7546
7547 switch (pVCpu->iem.s.enmEffOpSize)
7548 {
7549 case IEMMODE_16BIT:
7550 IEM_MC_BEGIN(3, 0);
7551 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7552 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7553 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7554
7555 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7556 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7557 IEM_MC_REF_EFLAGS(pEFlags);
7558 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7559
7560 IEM_MC_ADVANCE_RIP();
7561 IEM_MC_END();
7562 return VINF_SUCCESS;
7563
7564 case IEMMODE_32BIT:
7565 IEM_MC_BEGIN(3, 0);
7566 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7567 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7568 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7569
7570 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7571 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7572 IEM_MC_REF_EFLAGS(pEFlags);
7573 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7574
7575 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7576 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7577 IEM_MC_ADVANCE_RIP();
7578 IEM_MC_END();
7579 return VINF_SUCCESS;
7580
7581 case IEMMODE_64BIT:
7582 IEM_MC_BEGIN(3, 0);
7583 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7584 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7585 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7586
7587 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7588 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7589 IEM_MC_REF_EFLAGS(pEFlags);
7590 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7591
7592 IEM_MC_ADVANCE_RIP();
7593 IEM_MC_END();
7594 return VINF_SUCCESS;
7595
7596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7597 }
7598 }
7599 else
7600 {
7601 /*
7602 * We're accessing memory.
7603 */
7604 switch (pVCpu->iem.s.enmEffOpSize)
7605 {
7606 case IEMMODE_16BIT:
7607 IEM_MC_BEGIN(3, 3);
7608 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7609 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7610 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7611 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7613
7614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7615 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7616 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7617 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7618 IEM_MC_FETCH_EFLAGS(EFlags);
7619 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7620 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7621 else
7622 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7623
7624 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7625 IEM_MC_COMMIT_EFLAGS(EFlags);
7626 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7627 IEM_MC_ADVANCE_RIP();
7628 IEM_MC_END();
7629 return VINF_SUCCESS;
7630
7631 case IEMMODE_32BIT:
7632 IEM_MC_BEGIN(3, 3);
7633 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7634 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7635 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7636 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7638
7639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7640 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7641 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7642 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7643 IEM_MC_FETCH_EFLAGS(EFlags);
7644 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7645 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7646 else
7647 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7648
7649 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7650 IEM_MC_COMMIT_EFLAGS(EFlags);
7651 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7652 IEM_MC_ADVANCE_RIP();
7653 IEM_MC_END();
7654 return VINF_SUCCESS;
7655
7656 case IEMMODE_64BIT:
7657 IEM_MC_BEGIN(3, 3);
7658 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7659 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7660 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7661 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7663
7664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7665 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7666 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7667 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7668 IEM_MC_FETCH_EFLAGS(EFlags);
7669 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7670 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7671 else
7672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7673
7674 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7675 IEM_MC_COMMIT_EFLAGS(EFlags);
7676 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7677 IEM_MC_ADVANCE_RIP();
7678 IEM_MC_END();
7679 return VINF_SUCCESS;
7680
7681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7682 }
7683 }
7684}
7685
7686
7687/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7688FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7689/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7690FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7691/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7692FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7693/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7694FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7695
7696
7697/** Opcode 0x0f 0xc3. */
7698FNIEMOP_DEF(iemOp_movnti_My_Gy)
7699{
7700 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7701
7702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7703
7704 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7706 {
7707 switch (pVCpu->iem.s.enmEffOpSize)
7708 {
7709 case IEMMODE_32BIT:
7710 IEM_MC_BEGIN(0, 2);
7711 IEM_MC_LOCAL(uint32_t, u32Value);
7712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7713
7714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7717 return IEMOP_RAISE_INVALID_OPCODE();
7718
7719 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7720 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7721 IEM_MC_ADVANCE_RIP();
7722 IEM_MC_END();
7723 break;
7724
7725 case IEMMODE_64BIT:
7726 IEM_MC_BEGIN(0, 2);
7727 IEM_MC_LOCAL(uint64_t, u64Value);
7728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7729
7730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7732 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7733 return IEMOP_RAISE_INVALID_OPCODE();
7734
7735 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7736 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7737 IEM_MC_ADVANCE_RIP();
7738 IEM_MC_END();
7739 break;
7740
7741 case IEMMODE_16BIT:
7742 /** @todo check this form. */
7743 return IEMOP_RAISE_INVALID_OPCODE();
7744 }
7745 }
7746 else
7747 return IEMOP_RAISE_INVALID_OPCODE();
7748 return VINF_SUCCESS;
7749}
7750/* Opcode 0x66 0x0f 0xc3 - invalid */
7751/* Opcode 0xf3 0x0f 0xc3 - invalid */
7752/* Opcode 0xf2 0x0f 0xc3 - invalid */
7753
7754/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7755FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7756/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7757FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7758/* Opcode 0xf3 0x0f 0xc4 - invalid */
7759/* Opcode 0xf2 0x0f 0xc4 - invalid */
7760
7761/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7762FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7763/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7764FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7765/* Opcode 0xf3 0x0f 0xc5 - invalid */
7766/* Opcode 0xf2 0x0f 0xc5 - invalid */
7767
7768/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7769FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7770/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7771FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7772/* Opcode 0xf3 0x0f 0xc6 - invalid */
7773/* Opcode 0xf2 0x0f 0xc6 - invalid */
7774
7775
7776/** Opcode 0x0f 0xc7 !11/1. */
7777FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7778{
7779 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7780
7781 IEM_MC_BEGIN(4, 3);
7782 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7783 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7784 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7785 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7786 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7787 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7789
7790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7791 IEMOP_HLP_DONE_DECODING();
7792 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7793
7794 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7795 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7796 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7797
7798 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7799 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7800 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7801
7802 IEM_MC_FETCH_EFLAGS(EFlags);
7803 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7804 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7805 else
7806 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7807
7808 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7809 IEM_MC_COMMIT_EFLAGS(EFlags);
7810 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7811 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7812 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7813 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7814 IEM_MC_ENDIF();
7815 IEM_MC_ADVANCE_RIP();
7816
7817 IEM_MC_END();
7818 return VINF_SUCCESS;
7819}
7820
7821
7822/** Opcode REX.W 0x0f 0xc7 !11/1. */
7823FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7824{
7825 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7826 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7827 {
7828#if 0
7829 RT_NOREF(bRm);
7830 IEMOP_BITCH_ABOUT_STUB();
7831 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7832#else
7833 IEM_MC_BEGIN(4, 3);
7834 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7835 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7836 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7837 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7838 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7839 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7841
7842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7843 IEMOP_HLP_DONE_DECODING();
7844 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7845 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7846
7847 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7848 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7849 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7850
7851 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7852 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7853 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7854
7855 IEM_MC_FETCH_EFLAGS(EFlags);
7856# ifdef RT_ARCH_AMD64
7857 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7858 {
7859 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7860 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7861 else
7862 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7863 }
7864 else
7865# endif
7866 {
7867 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7868 accesses and not all all atomic, which works fine on in UNI CPU guest
7869 configuration (ignoring DMA). If guest SMP is active we have no choice
7870 but to use a rendezvous callback here. Sigh. */
7871 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7872 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7873 else
7874 {
7875 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7876 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7877 }
7878 }
7879
7880 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7881 IEM_MC_COMMIT_EFLAGS(EFlags);
7882 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7883 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7884 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7885 IEM_MC_ENDIF();
7886 IEM_MC_ADVANCE_RIP();
7887
7888 IEM_MC_END();
7889 return VINF_SUCCESS;
7890#endif
7891 }
7892 Log(("cmpxchg16b -> #UD\n"));
7893 return IEMOP_RAISE_INVALID_OPCODE();
7894}
7895
7896FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7897{
7898 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7899 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7900 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7901}
7902
7903/** Opcode 0x0f 0xc7 11/6. */
7904FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7905
7906/** Opcode 0x0f 0xc7 !11/6. */
7907FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7908
7909/** Opcode 0x66 0x0f 0xc7 !11/6. */
7910FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7911
7912/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7913FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7914
7915/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7916FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7917
7918/** Opcode 0x0f 0xc7 11/7. */
7919FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7920
7921
7922/**
7923 * Group 9 jump table for register variant.
7924 */
7925IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7926{ /* pfx: none, 066h, 0f3h, 0f2h */
7927 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7928 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7929 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7930 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7931 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7932 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7933 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7934 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7935};
7936AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7937
7938
7939/**
7940 * Group 9 jump table for memory variant.
7941 */
7942IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7943{ /* pfx: none, 066h, 0f3h, 0f2h */
7944 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7945 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7946 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7947 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7948 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7949 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7950 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7951 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7952};
7953AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7954
7955
7956/** Opcode 0x0f 0xc7. */
7957FNIEMOP_DEF(iemOp_Grp9)
7958{
7959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7961 /* register, register */
7962 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7963 + pVCpu->iem.s.idxPrefix], bRm);
7964 /* memory, register */
7965 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7966 + pVCpu->iem.s.idxPrefix], bRm);
7967}
7968
7969
7970/**
7971 * Common 'bswap register' helper.
7972 */
7973FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7974{
7975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7976 switch (pVCpu->iem.s.enmEffOpSize)
7977 {
7978 case IEMMODE_16BIT:
7979 IEM_MC_BEGIN(1, 0);
7980 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7981 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7982 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7983 IEM_MC_ADVANCE_RIP();
7984 IEM_MC_END();
7985 return VINF_SUCCESS;
7986
7987 case IEMMODE_32BIT:
7988 IEM_MC_BEGIN(1, 0);
7989 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7990 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7991 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7992 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7993 IEM_MC_ADVANCE_RIP();
7994 IEM_MC_END();
7995 return VINF_SUCCESS;
7996
7997 case IEMMODE_64BIT:
7998 IEM_MC_BEGIN(1, 0);
7999 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8000 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8001 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8002 IEM_MC_ADVANCE_RIP();
8003 IEM_MC_END();
8004 return VINF_SUCCESS;
8005
8006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8007 }
8008}
8009
8010
8011/** Opcode 0x0f 0xc8. */
8012FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8013{
8014 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8015 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8016 prefix. REX.B is the correct prefix it appears. For a parallel
8017 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8018 IEMOP_HLP_MIN_486();
8019 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8020}
8021
8022
8023/** Opcode 0x0f 0xc9. */
8024FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8025{
8026 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8027 IEMOP_HLP_MIN_486();
8028 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8029}
8030
8031
8032/** Opcode 0x0f 0xca. */
8033FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8034{
8035 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8036 IEMOP_HLP_MIN_486();
8037 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8038}
8039
8040
8041/** Opcode 0x0f 0xcb. */
8042FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8043{
8044 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8045 IEMOP_HLP_MIN_486();
8046 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8047}
8048
8049
8050/** Opcode 0x0f 0xcc. */
8051FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8052{
8053 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8054 IEMOP_HLP_MIN_486();
8055 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8056}
8057
8058
8059/** Opcode 0x0f 0xcd. */
8060FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8061{
8062 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8063 IEMOP_HLP_MIN_486();
8064 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8065}
8066
8067
8068/** Opcode 0x0f 0xce. */
8069FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8070{
8071 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8072 IEMOP_HLP_MIN_486();
8073 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8074}
8075
8076
8077/** Opcode 0x0f 0xcf. */
8078FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8079{
8080 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8081 IEMOP_HLP_MIN_486();
8082 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8083}
8084
8085
8086/* Opcode 0x0f 0xd0 - invalid */
8087/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8088FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8089/* Opcode 0xf3 0x0f 0xd0 - invalid */
8090/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8091FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8092
8093/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8094FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8095/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8096FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8097/* Opcode 0xf3 0x0f 0xd1 - invalid */
8098/* Opcode 0xf2 0x0f 0xd1 - invalid */
8099
8100/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8101FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8102/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8103FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8104/* Opcode 0xf3 0x0f 0xd2 - invalid */
8105/* Opcode 0xf2 0x0f 0xd2 - invalid */
8106
8107/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8108FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8109/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8110FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8111/* Opcode 0xf3 0x0f 0xd3 - invalid */
8112/* Opcode 0xf2 0x0f 0xd3 - invalid */
8113
8114/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8115FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8116/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8117FNIEMOP_STUB(iemOp_paddq_Vx_W);
8118/* Opcode 0xf3 0x0f 0xd4 - invalid */
8119/* Opcode 0xf2 0x0f 0xd4 - invalid */
8120
8121/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8122FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8123/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8124FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8125/* Opcode 0xf3 0x0f 0xd5 - invalid */
8126/* Opcode 0xf2 0x0f 0xd5 - invalid */
8127
8128/* Opcode 0x0f 0xd6 - invalid */
8129
8130/**
8131 * @opcode 0xd6
8132 * @oppfx 0x66
8133 * @opcpuid sse2
8134 * @opgroup og_sse2_pcksclr_datamove
8135 * @opxcpttype none
8136 * @optest op1=-1 op2=2 -> op1=2
8137 * @optest op1=0 op2=-42 -> op1=-42
8138 */
8139FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8140{
8141 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8143 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8144 {
8145 /*
8146 * Register, register.
8147 */
8148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8149 IEM_MC_BEGIN(0, 2);
8150 IEM_MC_LOCAL(uint64_t, uSrc);
8151
8152 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8153 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8154
8155 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8156 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8157
8158 IEM_MC_ADVANCE_RIP();
8159 IEM_MC_END();
8160 }
8161 else
8162 {
8163 /*
8164 * Memory, register.
8165 */
8166 IEM_MC_BEGIN(0, 2);
8167 IEM_MC_LOCAL(uint64_t, uSrc);
8168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8169
8170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8172 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8173 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8174
8175 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8176 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8177
8178 IEM_MC_ADVANCE_RIP();
8179 IEM_MC_END();
8180 }
8181 return VINF_SUCCESS;
8182}
8183
8184
8185/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
8186FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
8187/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
8188FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
8189#if 0
8190FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
8191{
8192 /* Docs says register only. */
8193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8194
8195 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8196 {
8197 case IEM_OP_PRF_SIZE_OP: /* SSE */
8198 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
8199 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8200 IEM_MC_BEGIN(2, 0);
8201 IEM_MC_ARG(uint64_t *, pDst, 0);
8202 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8203 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8204 IEM_MC_PREPARE_SSE_USAGE();
8205 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8206 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8207 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8208 IEM_MC_ADVANCE_RIP();
8209 IEM_MC_END();
8210 return VINF_SUCCESS;
8211
8212 case 0: /* MMX */
8213 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
8214 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8215 IEM_MC_BEGIN(2, 0);
8216 IEM_MC_ARG(uint64_t *, pDst, 0);
8217 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8218 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8219 IEM_MC_PREPARE_FPU_USAGE();
8220 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8221 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8222 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8223 IEM_MC_ADVANCE_RIP();
8224 IEM_MC_END();
8225 return VINF_SUCCESS;
8226
8227 default:
8228 return IEMOP_RAISE_INVALID_OPCODE();
8229 }
8230}
8231#endif
8232
8233
8234/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8235FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8236{
8237 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8238 /** @todo testcase: Check that the instruction implicitly clears the high
8239 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8240 * and opcode modifications are made to work with the whole width (not
8241 * just 128). */
8242 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8243 /* Docs says register only. */
8244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8245 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8246 {
8247 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8248 IEM_MC_BEGIN(2, 0);
8249 IEM_MC_ARG(uint64_t *, pDst, 0);
8250 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8251 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8252 IEM_MC_PREPARE_FPU_USAGE();
8253 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8254 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8255 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8256 IEM_MC_ADVANCE_RIP();
8257 IEM_MC_END();
8258 return VINF_SUCCESS;
8259 }
8260 return IEMOP_RAISE_INVALID_OPCODE();
8261}
8262
8263/** Opcode 0x66 0x0f 0xd7 - */
8264FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8265{
8266 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8267 /** @todo testcase: Check that the instruction implicitly clears the high
8268 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8269 * and opcode modifications are made to work with the whole width (not
8270 * just 128). */
8271 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8272 /* Docs says register only. */
8273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8274 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8275 {
8276 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8277 IEM_MC_BEGIN(2, 0);
8278 IEM_MC_ARG(uint64_t *, pDst, 0);
8279 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8280 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8281 IEM_MC_PREPARE_SSE_USAGE();
8282 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8283 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8284 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8285 IEM_MC_ADVANCE_RIP();
8286 IEM_MC_END();
8287 return VINF_SUCCESS;
8288 }
8289 return IEMOP_RAISE_INVALID_OPCODE();
8290}
8291
8292/* Opcode 0xf3 0x0f 0xd7 - invalid */
8293/* Opcode 0xf2 0x0f 0xd7 - invalid */
8294
8295
8296/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8297FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8298/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8299FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8300/* Opcode 0xf3 0x0f 0xd8 - invalid */
8301/* Opcode 0xf2 0x0f 0xd8 - invalid */
8302
8303/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8304FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8305/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8306FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8307/* Opcode 0xf3 0x0f 0xd9 - invalid */
8308/* Opcode 0xf2 0x0f 0xd9 - invalid */
8309
8310/** Opcode 0x0f 0xda - pminub Pq, Qq */
8311FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8312/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8313FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8314/* Opcode 0xf3 0x0f 0xda - invalid */
8315/* Opcode 0xf2 0x0f 0xda - invalid */
8316
8317/** Opcode 0x0f 0xdb - pand Pq, Qq */
8318FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8319/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8320FNIEMOP_STUB(iemOp_pand_Vx_W);
8321/* Opcode 0xf3 0x0f 0xdb - invalid */
8322/* Opcode 0xf2 0x0f 0xdb - invalid */
8323
8324/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8325FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8326/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8327FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8328/* Opcode 0xf3 0x0f 0xdc - invalid */
8329/* Opcode 0xf2 0x0f 0xdc - invalid */
8330
8331/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8332FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8333/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8334FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8335/* Opcode 0xf3 0x0f 0xdd - invalid */
8336/* Opcode 0xf2 0x0f 0xdd - invalid */
8337
8338/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8339FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8340/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8341FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8342/* Opcode 0xf3 0x0f 0xde - invalid */
8343/* Opcode 0xf2 0x0f 0xde - invalid */
8344
8345/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8346FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8347/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8348FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8349/* Opcode 0xf3 0x0f 0xdf - invalid */
8350/* Opcode 0xf2 0x0f 0xdf - invalid */
8351
8352/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8353FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8354/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8355FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8356/* Opcode 0xf3 0x0f 0xe0 - invalid */
8357/* Opcode 0xf2 0x0f 0xe0 - invalid */
8358
8359/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8360FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8361/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8362FNIEMOP_STUB(iemOp_psraw_Vx_W);
8363/* Opcode 0xf3 0x0f 0xe1 - invalid */
8364/* Opcode 0xf2 0x0f 0xe1 - invalid */
8365
8366/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8367FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8368/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8369FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8370/* Opcode 0xf3 0x0f 0xe2 - invalid */
8371/* Opcode 0xf2 0x0f 0xe2 - invalid */
8372
8373/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8374FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8375/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8376FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8377/* Opcode 0xf3 0x0f 0xe3 - invalid */
8378/* Opcode 0xf2 0x0f 0xe3 - invalid */
8379
8380/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8381FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8382/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8383FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8384/* Opcode 0xf3 0x0f 0xe4 - invalid */
8385/* Opcode 0xf2 0x0f 0xe4 - invalid */
8386
8387/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8388FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8389/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8390FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8391/* Opcode 0xf3 0x0f 0xe5 - invalid */
8392/* Opcode 0xf2 0x0f 0xe5 - invalid */
8393
8394/* Opcode 0x0f 0xe6 - invalid */
8395/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8396FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8397/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8398FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8399/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8400FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8401
8402
8403/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8404FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8405{
8406 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8408 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8409 {
8410 /* Register, memory. */
8411 IEM_MC_BEGIN(0, 2);
8412 IEM_MC_LOCAL(uint64_t, uSrc);
8413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8414
8415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8417 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8418 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8419
8420 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8421 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8422
8423 IEM_MC_ADVANCE_RIP();
8424 IEM_MC_END();
8425 return VINF_SUCCESS;
8426 }
8427 /* The register, register encoding is invalid. */
8428 return IEMOP_RAISE_INVALID_OPCODE();
8429}
8430
8431/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8432FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8433{
8434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8435 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8436 {
8437 /* Register, memory. */
8438 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8439 IEM_MC_BEGIN(0, 2);
8440 IEM_MC_LOCAL(RTUINT128U, uSrc);
8441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8442
8443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8445 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8446 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8447
8448 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8449 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8450
8451 IEM_MC_ADVANCE_RIP();
8452 IEM_MC_END();
8453 return VINF_SUCCESS;
8454 }
8455
8456 /* The register, register encoding is invalid. */
8457 return IEMOP_RAISE_INVALID_OPCODE();
8458}
8459
8460/* Opcode 0xf3 0x0f 0xe7 - invalid */
8461/* Opcode 0xf2 0x0f 0xe7 - invalid */
8462
8463
8464/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8465FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8466/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8467FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8468/* Opcode 0xf3 0x0f 0xe8 - invalid */
8469/* Opcode 0xf2 0x0f 0xe8 - invalid */
8470
8471/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8472FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8473/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8474FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8475/* Opcode 0xf3 0x0f 0xe9 - invalid */
8476/* Opcode 0xf2 0x0f 0xe9 - invalid */
8477
8478/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8479FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8480/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8481FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8482/* Opcode 0xf3 0x0f 0xea - invalid */
8483/* Opcode 0xf2 0x0f 0xea - invalid */
8484
8485/** Opcode 0x0f 0xeb - por Pq, Qq */
8486FNIEMOP_STUB(iemOp_por_Pq_Qq);
8487/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8488FNIEMOP_STUB(iemOp_por_Vx_W);
8489/* Opcode 0xf3 0x0f 0xeb - invalid */
8490/* Opcode 0xf2 0x0f 0xeb - invalid */
8491
8492/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8493FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8494/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8495FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8496/* Opcode 0xf3 0x0f 0xec - invalid */
8497/* Opcode 0xf2 0x0f 0xec - invalid */
8498
8499/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8500FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8501/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8502FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8503/* Opcode 0xf3 0x0f 0xed - invalid */
8504/* Opcode 0xf2 0x0f 0xed - invalid */
8505
8506/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8507FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8508/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8509FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8510/* Opcode 0xf3 0x0f 0xee - invalid */
8511/* Opcode 0xf2 0x0f 0xee - invalid */
8512
8513
8514/** Opcode 0x0f 0xef - pxor Pq, Qq */
8515FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8516{
8517 IEMOP_MNEMONIC(pxor, "pxor");
8518 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8519}
8520
8521/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8522FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8523{
8524 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8525 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8526}
8527
8528/* Opcode 0xf3 0x0f 0xef - invalid */
8529/* Opcode 0xf2 0x0f 0xef - invalid */
8530
8531/* Opcode 0x0f 0xf0 - invalid */
8532/* Opcode 0x66 0x0f 0xf0 - invalid */
8533/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8534FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8535
8536/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8537FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8538/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8539FNIEMOP_STUB(iemOp_psllw_Vx_W);
8540/* Opcode 0xf2 0x0f 0xf1 - invalid */
8541
8542/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8543FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8544/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8545FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8546/* Opcode 0xf2 0x0f 0xf2 - invalid */
8547
8548/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8549FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8550/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8551FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8552/* Opcode 0xf2 0x0f 0xf3 - invalid */
8553
8554/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8555FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8556/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8557FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8558/* Opcode 0xf2 0x0f 0xf4 - invalid */
8559
8560/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8561FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8562/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8563FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8564/* Opcode 0xf2 0x0f 0xf5 - invalid */
8565
8566/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8567FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8568/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8569FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8570/* Opcode 0xf2 0x0f 0xf6 - invalid */
8571
8572/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8573FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8574/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8575FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8576/* Opcode 0xf2 0x0f 0xf7 - invalid */
8577
8578/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8579FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8580/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8581FNIEMOP_STUB(iemOp_psubb_Vx_W);
8582/* Opcode 0xf2 0x0f 0xf8 - invalid */
8583
8584/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8585FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8586/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8587FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8588/* Opcode 0xf2 0x0f 0xf9 - invalid */
8589
8590/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8591FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8592/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8593FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8594/* Opcode 0xf2 0x0f 0xfa - invalid */
8595
8596/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8597FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8598/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8599FNIEMOP_STUB(iemOp_psubq_Vx_W);
8600/* Opcode 0xf2 0x0f 0xfb - invalid */
8601
8602/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8603FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8604/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8605FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8606/* Opcode 0xf2 0x0f 0xfc - invalid */
8607
8608/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8609FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8610/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8611FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8612/* Opcode 0xf2 0x0f 0xfd - invalid */
8613
8614/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8615FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8616/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8617FNIEMOP_STUB(iemOp_paddd_Vx_W);
8618/* Opcode 0xf2 0x0f 0xfe - invalid */
8619
8620
8621/** Opcode **** 0x0f 0xff - UD0 */
8622FNIEMOP_DEF(iemOp_ud0)
8623{
8624 IEMOP_MNEMONIC(ud0, "ud0");
8625 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8626 {
8627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8628#ifndef TST_IEM_CHECK_MC
8629 RTGCPTR GCPtrEff;
8630 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8631 if (rcStrict != VINF_SUCCESS)
8632 return rcStrict;
8633#endif
8634 IEMOP_HLP_DONE_DECODING();
8635 }
8636 return IEMOP_RAISE_INVALID_OPCODE();
8637}
8638
8639
8640
8641/**
8642 * Two byte opcode map, first byte 0x0f.
8643 *
8644 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8645 * check if it needs updating as well when making changes.
8646 */
8647IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8648{
8649 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8650 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8651 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8652 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8653 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8654 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8655 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8656 /* 0x06 */ IEMOP_X4(iemOp_clts),
8657 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8658 /* 0x08 */ IEMOP_X4(iemOp_invd),
8659 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8660 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8661 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8662 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8663 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8664 /* 0x0e */ IEMOP_X4(iemOp_femms),
8665 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8666
8667 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
8668 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8669 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8670 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8671 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8672 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8673 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8674 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8675 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8676 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8677 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8678 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8679 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8680 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8681 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8682 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8683
8684 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8685 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8686 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8687 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8688 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8689 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8690 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8691 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8692 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8693 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8694 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8695 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8696 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8697 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8698 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8699 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8700
8701 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8702 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8703 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8704 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8705 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8706 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8707 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8708 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8709 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8710 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8711 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8712 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8713 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8714 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8715 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8716 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8717
8718 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8719 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8720 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8721 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8722 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8723 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8724 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8725 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8726 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8727 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8728 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8729 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8730 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8731 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8732 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8733 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8734
8735 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8736 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8737 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8738 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8739 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8740 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8741 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8742 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8743 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8744 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8745 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8746 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8747 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8748 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8749 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8750 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8751
8752 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8753 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8754 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8755 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8756 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8757 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8758 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8759 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8760 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8761 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8762 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8763 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8764 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8765 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8766 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8767 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8768
8769 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8770 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8771 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8772 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8773 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8774 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8775 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8776 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8777
8778 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8779 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8780 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8781 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8782 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8783 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8784 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8785 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8786
8787 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8788 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8789 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8790 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8791 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8792 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8793 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8794 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8795 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8796 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8797 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8798 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8799 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8800 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8801 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8802 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8803
8804 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8805 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8806 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8807 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8808 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8809 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8810 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8811 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8812 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8813 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8814 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8815 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8816 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8817 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8818 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8819 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8820
8821 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8822 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8823 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8824 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8825 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8826 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8827 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8828 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8829 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8830 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8831 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8832 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8833 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8834 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8835 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8836 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8837
8838 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8839 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8840 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8841 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8842 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8843 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8844 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8845 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8846 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8847 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8848 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8849 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8850 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8851 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8852 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8853 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8854
8855 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8856 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8857 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8858 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8859 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8860 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8861 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8862 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8863 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8864 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8865 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8866 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8867 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8868 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8869 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8870 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8871
8872 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8873 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8874 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8875 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8876 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8877 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8878 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8879 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8880 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8881 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8882 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8883 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8884 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8885 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8886 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8887 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8888
8889 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8890 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8891 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8892 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8893 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8894 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8895 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8896 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8897 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8898 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8899 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8900 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8901 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8902 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8903 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8904 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8905
8906 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8907 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8908 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8909 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8910 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8911 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8912 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8913 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8914 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8915 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8916 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8917 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8918 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8919 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8920 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8921 /* 0xff */ IEMOP_X4(iemOp_ud0),
8922};
8923AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8924
8925/** @} */
8926
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette