VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66748

最後變更 在這個檔案從66748是 66748,由 vboxsync 提交於 8 年 前

IEM: More tests.

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 312.1 KB
 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66748 2017-05-02 14:36:39Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.alldomusa.eu.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZxReg, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse
1697 * @opgroup og_sse_simdfp_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813/** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */
1814FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT
1815/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1816FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT
1817/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1818FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1819
1820/**
1821 * @opdone
1822 * @opmnemonic udf30f16
1823 * @opcode 0x16
1824 * @oppfx 0xf2
1825 * @opunused intel-modrm
1826 * @opcpuid sse
1827 * @optest ->
1828 * @opdone
1829 */
1830
1831/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1832FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT
1833/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1834FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT
1835
1836/**
1837 * @opdone
1838 * @opmnemonic udf30f17
1839 * @opcode 0x17
1840 * @oppfx 0xf3
1841 * @opunused intel-modrm
1842 * @opcpuid sse
1843 * @optest ->
1844 * @opdone
1845 */
1846
1847/**
1848 * @opmnemonic udf20f17
1849 * @opcode 0x17
1850 * @oppfx 0xf2
1851 * @opunused intel-modrm
1852 * @opcpuid sse
1853 * @optest ->
1854 * @opdone
1855 */
1856
1857
1858/** Opcode 0x0f 0x18. */
1859FNIEMOP_DEF(iemOp_prefetch_Grp16)
1860{
1861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1862 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1863 {
1864 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1865 {
1866 case 4: /* Aliased to /0 for the time being according to AMD. */
1867 case 5: /* Aliased to /0 for the time being according to AMD. */
1868 case 6: /* Aliased to /0 for the time being according to AMD. */
1869 case 7: /* Aliased to /0 for the time being according to AMD. */
1870 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1871 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1872 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1873 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1875 }
1876
1877 IEM_MC_BEGIN(0, 1);
1878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1881 /* Currently a NOP. */
1882 NOREF(GCPtrEffSrc);
1883 IEM_MC_ADVANCE_RIP();
1884 IEM_MC_END();
1885 return VINF_SUCCESS;
1886 }
1887
1888 return IEMOP_RAISE_INVALID_OPCODE();
1889}
1890
1891
1892/** Opcode 0x0f 0x19..0x1f. */
1893FNIEMOP_DEF(iemOp_nop_Ev)
1894{
1895 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1897 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1898 {
1899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1900 IEM_MC_BEGIN(0, 0);
1901 IEM_MC_ADVANCE_RIP();
1902 IEM_MC_END();
1903 }
1904 else
1905 {
1906 IEM_MC_BEGIN(0, 1);
1907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1910 /* Currently a NOP. */
1911 NOREF(GCPtrEffSrc);
1912 IEM_MC_ADVANCE_RIP();
1913 IEM_MC_END();
1914 }
1915 return VINF_SUCCESS;
1916}
1917
1918
1919/** Opcode 0x0f 0x20. */
1920FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1921{
1922 /* mod is ignored, as is operand size overrides. */
1923 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1924 IEMOP_HLP_MIN_386();
1925 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1926 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1927 else
1928 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1929
1930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1931 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1932 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1933 {
1934 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1935 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1936 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1937 iCrReg |= 8;
1938 }
1939 switch (iCrReg)
1940 {
1941 case 0: case 2: case 3: case 4: case 8:
1942 break;
1943 default:
1944 return IEMOP_RAISE_INVALID_OPCODE();
1945 }
1946 IEMOP_HLP_DONE_DECODING();
1947
1948 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1949}
1950
1951
1952/** Opcode 0x0f 0x21. */
1953FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1954{
1955 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1956 IEMOP_HLP_MIN_386();
1957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1959 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1960 return IEMOP_RAISE_INVALID_OPCODE();
1961 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1962 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1963 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1964}
1965
1966
1967/** Opcode 0x0f 0x22. */
1968FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1969{
1970 /* mod is ignored, as is operand size overrides. */
1971 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1972 IEMOP_HLP_MIN_386();
1973 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1974 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1975 else
1976 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1977
1978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1979 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1980 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1981 {
1982 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1983 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1984 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1985 iCrReg |= 8;
1986 }
1987 switch (iCrReg)
1988 {
1989 case 0: case 2: case 3: case 4: case 8:
1990 break;
1991 default:
1992 return IEMOP_RAISE_INVALID_OPCODE();
1993 }
1994 IEMOP_HLP_DONE_DECODING();
1995
1996 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1997}
1998
1999
2000/** Opcode 0x0f 0x23. */
2001FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2002{
2003 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2004 IEMOP_HLP_MIN_386();
2005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2007 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2008 return IEMOP_RAISE_INVALID_OPCODE();
2009 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2010 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2011 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2012}
2013
2014
2015/** Opcode 0x0f 0x24. */
2016FNIEMOP_DEF(iemOp_mov_Rd_Td)
2017{
2018 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2019 /** @todo works on 386 and 486. */
2020 /* The RM byte is not considered, see testcase. */
2021 return IEMOP_RAISE_INVALID_OPCODE();
2022}
2023
2024
2025/** Opcode 0x0f 0x26. */
2026FNIEMOP_DEF(iemOp_mov_Td_Rd)
2027{
2028 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2029 /** @todo works on 386 and 486. */
2030 /* The RM byte is not considered, see testcase. */
2031 return IEMOP_RAISE_INVALID_OPCODE();
2032}
2033
2034
2035/** Opcode 0x0f 0x28 - movaps Vps, Wps */
2036FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2037{
2038 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2041 {
2042 /*
2043 * Register, register.
2044 */
2045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2046 IEM_MC_BEGIN(0, 0);
2047 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2048 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2049 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2050 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2051 IEM_MC_ADVANCE_RIP();
2052 IEM_MC_END();
2053 }
2054 else
2055 {
2056 /*
2057 * Register, memory.
2058 */
2059 IEM_MC_BEGIN(0, 2);
2060 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2062
2063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2065 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2066 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2067
2068 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2069 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2070
2071 IEM_MC_ADVANCE_RIP();
2072 IEM_MC_END();
2073 }
2074 return VINF_SUCCESS;
2075}
2076
2077/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
2078FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2079{
2080 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2083 {
2084 /*
2085 * Register, register.
2086 */
2087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2088 IEM_MC_BEGIN(0, 0);
2089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2090 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2091 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2092 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2093 IEM_MC_ADVANCE_RIP();
2094 IEM_MC_END();
2095 }
2096 else
2097 {
2098 /*
2099 * Register, memory.
2100 */
2101 IEM_MC_BEGIN(0, 2);
2102 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2104
2105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2107 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2109
2110 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2111 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2112
2113 IEM_MC_ADVANCE_RIP();
2114 IEM_MC_END();
2115 }
2116 return VINF_SUCCESS;
2117}
2118
2119/* Opcode 0xf3 0x0f 0x28 - invalid */
2120/* Opcode 0xf2 0x0f 0x28 - invalid */
2121
2122/** Opcode 0x0f 0x29 - movaps Wps, Vps */
2123FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2124{
2125 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2127 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2128 {
2129 /*
2130 * Register, register.
2131 */
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2133 IEM_MC_BEGIN(0, 0);
2134 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2136 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2137 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 }
2141 else
2142 {
2143 /*
2144 * Memory, register.
2145 */
2146 IEM_MC_BEGIN(0, 2);
2147 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2149
2150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2152 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2153 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2154
2155 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2156 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2157
2158 IEM_MC_ADVANCE_RIP();
2159 IEM_MC_END();
2160 }
2161 return VINF_SUCCESS;
2162}
2163
2164/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
2165FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2166{
2167 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2169 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2170 {
2171 /*
2172 * Register, register.
2173 */
2174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2175 IEM_MC_BEGIN(0, 0);
2176 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2177 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2178 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2179 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2180 IEM_MC_ADVANCE_RIP();
2181 IEM_MC_END();
2182 }
2183 else
2184 {
2185 /*
2186 * Memory, register.
2187 */
2188 IEM_MC_BEGIN(0, 2);
2189 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2191
2192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2194 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2195 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2196
2197 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2198 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2199
2200 IEM_MC_ADVANCE_RIP();
2201 IEM_MC_END();
2202 }
2203 return VINF_SUCCESS;
2204}
2205
2206/* Opcode 0xf3 0x0f 0x29 - invalid */
2207/* Opcode 0xf2 0x0f 0x29 - invalid */
2208
2209
2210/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2211FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2212/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2213FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2214/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2215FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2216/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2217FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2218
2219
2220/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2221FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2222{
2223 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2225 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2226 {
2227 /*
2228 * memory, register.
2229 */
2230 IEM_MC_BEGIN(0, 2);
2231 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2233
2234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2237 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2238
2239 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2240 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2241
2242 IEM_MC_ADVANCE_RIP();
2243 IEM_MC_END();
2244 }
2245 /* The register, register encoding is invalid. */
2246 else
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 return VINF_SUCCESS;
2249}
2250
2251/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2252FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2253{
2254 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2256 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2257 {
2258 /*
2259 * memory, register.
2260 */
2261 IEM_MC_BEGIN(0, 2);
2262 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2264
2265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2267 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2268 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2269
2270 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2271 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2272
2273 IEM_MC_ADVANCE_RIP();
2274 IEM_MC_END();
2275 }
2276 /* The register, register encoding is invalid. */
2277 else
2278 return IEMOP_RAISE_INVALID_OPCODE();
2279 return VINF_SUCCESS;
2280}
2281/* Opcode 0xf3 0x0f 0x2b - invalid */
2282/* Opcode 0xf2 0x0f 0x2b - invalid */
2283
2284
2285/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2286FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2287/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2288FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2289/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2290FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2291/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2292FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2293
2294/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2295FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2296/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2297FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2298/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2299FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2300/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2301FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2302
2303/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2304FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2305/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2306FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2307/* Opcode 0xf3 0x0f 0x2e - invalid */
2308/* Opcode 0xf2 0x0f 0x2e - invalid */
2309
2310/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2311FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2312/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2313FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2314/* Opcode 0xf3 0x0f 0x2f - invalid */
2315/* Opcode 0xf2 0x0f 0x2f - invalid */
2316
2317/** Opcode 0x0f 0x30. */
2318FNIEMOP_DEF(iemOp_wrmsr)
2319{
2320 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2322 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2323}
2324
2325
2326/** Opcode 0x0f 0x31. */
2327FNIEMOP_DEF(iemOp_rdtsc)
2328{
2329 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2331 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2332}
2333
2334
2335/** Opcode 0x0f 0x33. */
2336FNIEMOP_DEF(iemOp_rdmsr)
2337{
2338 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2340 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2341}
2342
2343
2344/** Opcode 0x0f 0x34. */
2345FNIEMOP_DEF(iemOp_rdpmc)
2346{
2347 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2350}
2351
2352
2353/** Opcode 0x0f 0x34. */
2354FNIEMOP_STUB(iemOp_sysenter);
2355/** Opcode 0x0f 0x35. */
2356FNIEMOP_STUB(iemOp_sysexit);
2357/** Opcode 0x0f 0x37. */
2358FNIEMOP_STUB(iemOp_getsec);
2359
2360
2361/** Opcode 0x0f 0x38. */
2362FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2363{
2364#ifdef IEM_WITH_THREE_0F_38
2365 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2366 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2367#else
2368 IEMOP_BITCH_ABOUT_STUB();
2369 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2370#endif
2371}
2372
2373
2374/** Opcode 0x0f 0x3a. */
2375FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2376{
2377#ifdef IEM_WITH_THREE_0F_3A
2378 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2379 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2380#else
2381 IEMOP_BITCH_ABOUT_STUB();
2382 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2383#endif
2384}
2385
2386
2387/**
2388 * Implements a conditional move.
2389 *
2390 * Wish there was an obvious way to do this where we could share and reduce
2391 * code bloat.
2392 *
2393 * @param a_Cnd The conditional "microcode" operation.
2394 */
2395#define CMOV_X(a_Cnd) \
2396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2398 { \
2399 switch (pVCpu->iem.s.enmEffOpSize) \
2400 { \
2401 case IEMMODE_16BIT: \
2402 IEM_MC_BEGIN(0, 1); \
2403 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2404 a_Cnd { \
2405 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2406 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2407 } IEM_MC_ENDIF(); \
2408 IEM_MC_ADVANCE_RIP(); \
2409 IEM_MC_END(); \
2410 return VINF_SUCCESS; \
2411 \
2412 case IEMMODE_32BIT: \
2413 IEM_MC_BEGIN(0, 1); \
2414 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2415 a_Cnd { \
2416 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2417 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2418 } IEM_MC_ELSE() { \
2419 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2420 } IEM_MC_ENDIF(); \
2421 IEM_MC_ADVANCE_RIP(); \
2422 IEM_MC_END(); \
2423 return VINF_SUCCESS; \
2424 \
2425 case IEMMODE_64BIT: \
2426 IEM_MC_BEGIN(0, 1); \
2427 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2428 a_Cnd { \
2429 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2430 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2431 } IEM_MC_ENDIF(); \
2432 IEM_MC_ADVANCE_RIP(); \
2433 IEM_MC_END(); \
2434 return VINF_SUCCESS; \
2435 \
2436 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2437 } \
2438 } \
2439 else \
2440 { \
2441 switch (pVCpu->iem.s.enmEffOpSize) \
2442 { \
2443 case IEMMODE_16BIT: \
2444 IEM_MC_BEGIN(0, 2); \
2445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2446 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2448 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2449 a_Cnd { \
2450 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2451 } IEM_MC_ENDIF(); \
2452 IEM_MC_ADVANCE_RIP(); \
2453 IEM_MC_END(); \
2454 return VINF_SUCCESS; \
2455 \
2456 case IEMMODE_32BIT: \
2457 IEM_MC_BEGIN(0, 2); \
2458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2459 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2461 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2462 a_Cnd { \
2463 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2464 } IEM_MC_ELSE() { \
2465 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2466 } IEM_MC_ENDIF(); \
2467 IEM_MC_ADVANCE_RIP(); \
2468 IEM_MC_END(); \
2469 return VINF_SUCCESS; \
2470 \
2471 case IEMMODE_64BIT: \
2472 IEM_MC_BEGIN(0, 2); \
2473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2474 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2476 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2477 a_Cnd { \
2478 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2479 } IEM_MC_ENDIF(); \
2480 IEM_MC_ADVANCE_RIP(); \
2481 IEM_MC_END(); \
2482 return VINF_SUCCESS; \
2483 \
2484 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2485 } \
2486 } do {} while (0)
2487
2488
2489
2490/** Opcode 0x0f 0x40. */
2491FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2492{
2493 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2494 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2495}
2496
2497
2498/** Opcode 0x0f 0x41. */
2499FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2500{
2501 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2502 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2503}
2504
2505
2506/** Opcode 0x0f 0x42. */
2507FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2508{
2509 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2510 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2511}
2512
2513
2514/** Opcode 0x0f 0x43. */
2515FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2516{
2517 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2518 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2519}
2520
2521
2522/** Opcode 0x0f 0x44. */
2523FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2524{
2525 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2526 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2527}
2528
2529
2530/** Opcode 0x0f 0x45. */
2531FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2532{
2533 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2534 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2535}
2536
2537
2538/** Opcode 0x0f 0x46. */
2539FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2540{
2541 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2542 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2543}
2544
2545
2546/** Opcode 0x0f 0x47. */
2547FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2548{
2549 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2550 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2551}
2552
2553
2554/** Opcode 0x0f 0x48. */
2555FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2556{
2557 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2558 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2559}
2560
2561
2562/** Opcode 0x0f 0x49. */
2563FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2564{
2565 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2566 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2567}
2568
2569
2570/** Opcode 0x0f 0x4a. */
2571FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2572{
2573 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2574 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2575}
2576
2577
2578/** Opcode 0x0f 0x4b. */
2579FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2580{
2581 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2582 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2583}
2584
2585
2586/** Opcode 0x0f 0x4c. */
2587FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2588{
2589 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2590 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2591}
2592
2593
2594/** Opcode 0x0f 0x4d. */
2595FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2596{
2597 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2598 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2599}
2600
2601
2602/** Opcode 0x0f 0x4e. */
2603FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2604{
2605 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2606 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2607}
2608
2609
2610/** Opcode 0x0f 0x4f. */
2611FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2612{
2613 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2614 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2615}
2616
2617#undef CMOV_X
2618
2619/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2620FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2621/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2622FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2623/* Opcode 0xf3 0x0f 0x50 - invalid */
2624/* Opcode 0xf2 0x0f 0x50 - invalid */
2625
2626/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2627FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2628/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2629FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2630/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2631FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2632/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2633FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2634
2635/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2636FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2637/* Opcode 0x66 0x0f 0x52 - invalid */
2638/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2639FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2640/* Opcode 0xf2 0x0f 0x52 - invalid */
2641
2642/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2643FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2644/* Opcode 0x66 0x0f 0x53 - invalid */
2645/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2646FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2647/* Opcode 0xf2 0x0f 0x53 - invalid */
2648
2649/** Opcode 0x0f 0x54 - andps Vps, Wps */
2650FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2651/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2652FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2653/* Opcode 0xf3 0x0f 0x54 - invalid */
2654/* Opcode 0xf2 0x0f 0x54 - invalid */
2655
2656/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2657FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2658/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2659FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2660/* Opcode 0xf3 0x0f 0x55 - invalid */
2661/* Opcode 0xf2 0x0f 0x55 - invalid */
2662
2663/** Opcode 0x0f 0x56 - orps Vps, Wps */
2664FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2665/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2666FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2667/* Opcode 0xf3 0x0f 0x56 - invalid */
2668/* Opcode 0xf2 0x0f 0x56 - invalid */
2669
2670/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2671FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2672/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2673FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2674/* Opcode 0xf3 0x0f 0x57 - invalid */
2675/* Opcode 0xf2 0x0f 0x57 - invalid */
2676
2677/** Opcode 0x0f 0x58 - addps Vps, Wps */
2678FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2679/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2680FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2681/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2682FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2683/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2684FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2685
2686/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2687FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2688/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2689FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2690/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2691FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2692/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2693FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2694
2695/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2696FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2697/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2698FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2699/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2700FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2701/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2702FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2703
2704/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2705FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2706/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2707FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2708/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2709FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2710/* Opcode 0xf2 0x0f 0x5b - invalid */
2711
2712/** Opcode 0x0f 0x5c - subps Vps, Wps */
2713FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2714/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2715FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2716/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2717FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2718/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2719FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2720
2721/** Opcode 0x0f 0x5d - minps Vps, Wps */
2722FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2723/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2724FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2725/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2726FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2727/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2728FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2729
2730/** Opcode 0x0f 0x5e - divps Vps, Wps */
2731FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2732/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2733FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2734/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2735FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2736/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2737FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2738
2739/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2740FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2741/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2742FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2743/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2744FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2745/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2746FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2747
2748/**
2749 * Common worker for MMX instructions on the forms:
2750 * pxxxx mm1, mm2/mem32
2751 *
2752 * The 2nd operand is the first half of a register, which in the memory case
2753 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2754 * memory accessed for MMX.
2755 *
2756 * Exceptions type 4.
2757 */
2758FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2759{
2760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2762 {
2763 /*
2764 * Register, register.
2765 */
2766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2767 IEM_MC_BEGIN(2, 0);
2768 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2769 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2770 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2771 IEM_MC_PREPARE_SSE_USAGE();
2772 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2773 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2774 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2775 IEM_MC_ADVANCE_RIP();
2776 IEM_MC_END();
2777 }
2778 else
2779 {
2780 /*
2781 * Register, memory.
2782 */
2783 IEM_MC_BEGIN(2, 2);
2784 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2785 IEM_MC_LOCAL(uint64_t, uSrc);
2786 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2788
2789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2791 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2792 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2793
2794 IEM_MC_PREPARE_SSE_USAGE();
2795 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2796 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2797
2798 IEM_MC_ADVANCE_RIP();
2799 IEM_MC_END();
2800 }
2801 return VINF_SUCCESS;
2802}
2803
2804
2805/**
2806 * Common worker for SSE2 instructions on the forms:
2807 * pxxxx xmm1, xmm2/mem128
2808 *
2809 * The 2nd operand is the first half of a register, which in the memory case
2810 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2811 * memory accessed for MMX.
2812 *
2813 * Exceptions type 4.
2814 */
2815FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2816{
2817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2818 if (!pImpl->pfnU64)
2819 return IEMOP_RAISE_INVALID_OPCODE();
2820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2821 {
2822 /*
2823 * Register, register.
2824 */
2825 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2826 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2828 IEM_MC_BEGIN(2, 0);
2829 IEM_MC_ARG(uint64_t *, pDst, 0);
2830 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2831 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2832 IEM_MC_PREPARE_FPU_USAGE();
2833 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2834 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2835 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2836 IEM_MC_ADVANCE_RIP();
2837 IEM_MC_END();
2838 }
2839 else
2840 {
2841 /*
2842 * Register, memory.
2843 */
2844 IEM_MC_BEGIN(2, 2);
2845 IEM_MC_ARG(uint64_t *, pDst, 0);
2846 IEM_MC_LOCAL(uint32_t, uSrc);
2847 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2849
2850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2852 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2853 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2854
2855 IEM_MC_PREPARE_FPU_USAGE();
2856 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2857 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2858
2859 IEM_MC_ADVANCE_RIP();
2860 IEM_MC_END();
2861 }
2862 return VINF_SUCCESS;
2863}
2864
2865
2866/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2867FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2868{
2869 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2870 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2871}
2872
2873/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2874FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2875{
2876 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2877 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2878}
2879
2880/* Opcode 0xf3 0x0f 0x60 - invalid */
2881
2882
2883/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2884FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2885{
2886 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2887 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2888}
2889
2890/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2891FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2892{
2893 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2894 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2895}
2896
2897/* Opcode 0xf3 0x0f 0x61 - invalid */
2898
2899
2900/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2901FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2902{
2903 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2904 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2905}
2906
2907/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2908FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2909{
2910 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2911 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2912}
2913
2914/* Opcode 0xf3 0x0f 0x62 - invalid */
2915
2916
2917
2918/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2919FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2920/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2921FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2922/* Opcode 0xf3 0x0f 0x63 - invalid */
2923
2924/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2925FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2926/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2927FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2928/* Opcode 0xf3 0x0f 0x64 - invalid */
2929
2930/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2931FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2932/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2933FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2934/* Opcode 0xf3 0x0f 0x65 - invalid */
2935
2936/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2937FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2938/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
2939FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
2940/* Opcode 0xf3 0x0f 0x66 - invalid */
2941
2942/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2943FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2944/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
2945FNIEMOP_STUB(iemOp_packuswb_Vx_W);
2946/* Opcode 0xf3 0x0f 0x67 - invalid */
2947
2948
2949/**
2950 * Common worker for MMX instructions on the form:
2951 * pxxxx mm1, mm2/mem64
2952 *
2953 * The 2nd operand is the second half of a register, which in the memory case
2954 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2955 * where it may read the full 128 bits or only the upper 64 bits.
2956 *
2957 * Exceptions type 4.
2958 */
2959FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2960{
2961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2962 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2963 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2964 {
2965 /*
2966 * Register, register.
2967 */
2968 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2969 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2971 IEM_MC_BEGIN(2, 0);
2972 IEM_MC_ARG(uint64_t *, pDst, 0);
2973 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2974 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2975 IEM_MC_PREPARE_FPU_USAGE();
2976 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2977 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2978 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2979 IEM_MC_ADVANCE_RIP();
2980 IEM_MC_END();
2981 }
2982 else
2983 {
2984 /*
2985 * Register, memory.
2986 */
2987 IEM_MC_BEGIN(2, 2);
2988 IEM_MC_ARG(uint64_t *, pDst, 0);
2989 IEM_MC_LOCAL(uint64_t, uSrc);
2990 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2992
2993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2995 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2996 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2997
2998 IEM_MC_PREPARE_FPU_USAGE();
2999 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3000 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3001
3002 IEM_MC_ADVANCE_RIP();
3003 IEM_MC_END();
3004 }
3005 return VINF_SUCCESS;
3006}
3007
3008
3009/**
3010 * Common worker for SSE2 instructions on the form:
3011 * pxxxx xmm1, xmm2/mem128
3012 *
3013 * The 2nd operand is the second half of a register, which in the memory case
3014 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3015 * where it may read the full 128 bits or only the upper 64 bits.
3016 *
3017 * Exceptions type 4.
3018 */
3019FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3020{
3021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3023 {
3024 /*
3025 * Register, register.
3026 */
3027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3028 IEM_MC_BEGIN(2, 0);
3029 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3030 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3031 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3032 IEM_MC_PREPARE_SSE_USAGE();
3033 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3034 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3035 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3036 IEM_MC_ADVANCE_RIP();
3037 IEM_MC_END();
3038 }
3039 else
3040 {
3041 /*
3042 * Register, memory.
3043 */
3044 IEM_MC_BEGIN(2, 2);
3045 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3046 IEM_MC_LOCAL(RTUINT128U, uSrc);
3047 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3049
3050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3052 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3053 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3054
3055 IEM_MC_PREPARE_SSE_USAGE();
3056 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3057 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3058
3059 IEM_MC_ADVANCE_RIP();
3060 IEM_MC_END();
3061 }
3062 return VINF_SUCCESS;
3063}
3064
3065
3066/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3067FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3068{
3069 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3070 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3071}
3072
3073/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3074FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3075{
3076 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3077 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3078}
3079/* Opcode 0xf3 0x0f 0x68 - invalid */
3080
3081
3082/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3083FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3084{
3085 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3086 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3087}
3088
3089/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3090FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3091{
3092 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3093 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3094
3095}
3096/* Opcode 0xf3 0x0f 0x69 - invalid */
3097
3098
3099/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3100FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3101{
3102 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3103 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3104}
3105
3106/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3107FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3108{
3109 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3110 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3111}
3112/* Opcode 0xf3 0x0f 0x6a - invalid */
3113
3114
3115/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3116FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3117/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3118FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3119/* Opcode 0xf3 0x0f 0x6b - invalid */
3120
3121
3122/* Opcode 0x0f 0x6c - invalid */
3123
3124/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3125FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3126{
3127 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3128 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3129}
3130
3131/* Opcode 0xf3 0x0f 0x6c - invalid */
3132/* Opcode 0xf2 0x0f 0x6c - invalid */
3133
3134
3135/* Opcode 0x0f 0x6d - invalid */
3136
3137/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3138FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3139{
3140 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3141 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3142}
3143
3144/* Opcode 0xf3 0x0f 0x6d - invalid */
3145
3146
3147/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3148FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3149{
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3152 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3153 else
3154 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3156 {
3157 /* MMX, greg */
3158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3159 IEM_MC_BEGIN(0, 1);
3160 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3161 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3162 IEM_MC_LOCAL(uint64_t, u64Tmp);
3163 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3164 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3165 else
3166 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3167 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3168 IEM_MC_ADVANCE_RIP();
3169 IEM_MC_END();
3170 }
3171 else
3172 {
3173 /* MMX, [mem] */
3174 IEM_MC_BEGIN(0, 2);
3175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3176 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3179 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3180 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3181 {
3182 IEM_MC_LOCAL(uint64_t, u64Tmp);
3183 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3184 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3185 }
3186 else
3187 {
3188 IEM_MC_LOCAL(uint32_t, u32Tmp);
3189 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3190 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3191 }
3192 IEM_MC_ADVANCE_RIP();
3193 IEM_MC_END();
3194 }
3195 return VINF_SUCCESS;
3196}
3197
3198/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3199FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3200{
3201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3202 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3203 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3204 else
3205 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3206 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3207 {
3208 /* XMM, greg*/
3209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3210 IEM_MC_BEGIN(0, 1);
3211 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3212 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3213 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3214 {
3215 IEM_MC_LOCAL(uint64_t, u64Tmp);
3216 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3217 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3218 }
3219 else
3220 {
3221 IEM_MC_LOCAL(uint32_t, u32Tmp);
3222 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3223 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3224 }
3225 IEM_MC_ADVANCE_RIP();
3226 IEM_MC_END();
3227 }
3228 else
3229 {
3230 /* XMM, [mem] */
3231 IEM_MC_BEGIN(0, 2);
3232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3233 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3236 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3237 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3238 {
3239 IEM_MC_LOCAL(uint64_t, u64Tmp);
3240 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3241 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3242 }
3243 else
3244 {
3245 IEM_MC_LOCAL(uint32_t, u32Tmp);
3246 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3247 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3248 }
3249 IEM_MC_ADVANCE_RIP();
3250 IEM_MC_END();
3251 }
3252 return VINF_SUCCESS;
3253}
3254
3255/* Opcode 0xf3 0x0f 0x6e - invalid */
3256
3257
3258/** Opcode 0x0f 0x6f - movq Pq, Qq */
3259FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3260{
3261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3262 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3263 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3264 {
3265 /*
3266 * Register, register.
3267 */
3268 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3269 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3271 IEM_MC_BEGIN(0, 1);
3272 IEM_MC_LOCAL(uint64_t, u64Tmp);
3273 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3274 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3275 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3276 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3277 IEM_MC_ADVANCE_RIP();
3278 IEM_MC_END();
3279 }
3280 else
3281 {
3282 /*
3283 * Register, memory.
3284 */
3285 IEM_MC_BEGIN(0, 2);
3286 IEM_MC_LOCAL(uint64_t, u64Tmp);
3287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3288
3289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3292 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3293 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3294 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3295
3296 IEM_MC_ADVANCE_RIP();
3297 IEM_MC_END();
3298 }
3299 return VINF_SUCCESS;
3300}
3301
3302/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3303FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3304{
3305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3306 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3308 {
3309 /*
3310 * Register, register.
3311 */
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313 IEM_MC_BEGIN(0, 0);
3314 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3315 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3316 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3317 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3318 IEM_MC_ADVANCE_RIP();
3319 IEM_MC_END();
3320 }
3321 else
3322 {
3323 /*
3324 * Register, memory.
3325 */
3326 IEM_MC_BEGIN(0, 2);
3327 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3329
3330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3333 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3334 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3335 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3336
3337 IEM_MC_ADVANCE_RIP();
3338 IEM_MC_END();
3339 }
3340 return VINF_SUCCESS;
3341}
3342
3343/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3344FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3345{
3346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3347 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3349 {
3350 /*
3351 * Register, register.
3352 */
3353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3354 IEM_MC_BEGIN(0, 0);
3355 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3356 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3357 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3358 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3359 IEM_MC_ADVANCE_RIP();
3360 IEM_MC_END();
3361 }
3362 else
3363 {
3364 /*
3365 * Register, memory.
3366 */
3367 IEM_MC_BEGIN(0, 2);
3368 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3370
3371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3373 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3374 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3375 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3376 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3377
3378 IEM_MC_ADVANCE_RIP();
3379 IEM_MC_END();
3380 }
3381 return VINF_SUCCESS;
3382}
3383
3384
3385/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3386FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3387{
3388 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3390 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3391 {
3392 /*
3393 * Register, register.
3394 */
3395 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3397
3398 IEM_MC_BEGIN(3, 0);
3399 IEM_MC_ARG(uint64_t *, pDst, 0);
3400 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3401 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3402 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3403 IEM_MC_PREPARE_FPU_USAGE();
3404 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3405 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3406 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3407 IEM_MC_ADVANCE_RIP();
3408 IEM_MC_END();
3409 }
3410 else
3411 {
3412 /*
3413 * Register, memory.
3414 */
3415 IEM_MC_BEGIN(3, 2);
3416 IEM_MC_ARG(uint64_t *, pDst, 0);
3417 IEM_MC_LOCAL(uint64_t, uSrc);
3418 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3420
3421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3422 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3423 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3425 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3426
3427 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3428 IEM_MC_PREPARE_FPU_USAGE();
3429 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3430 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3431
3432 IEM_MC_ADVANCE_RIP();
3433 IEM_MC_END();
3434 }
3435 return VINF_SUCCESS;
3436}
3437
3438/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3439FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3440{
3441 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3443 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3444 {
3445 /*
3446 * Register, register.
3447 */
3448 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3450
3451 IEM_MC_BEGIN(3, 0);
3452 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3453 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3454 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3455 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3456 IEM_MC_PREPARE_SSE_USAGE();
3457 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3458 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3459 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 }
3463 else
3464 {
3465 /*
3466 * Register, memory.
3467 */
3468 IEM_MC_BEGIN(3, 2);
3469 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3470 IEM_MC_LOCAL(RTUINT128U, uSrc);
3471 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3473
3474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3475 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3476 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3478 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3479
3480 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3481 IEM_MC_PREPARE_SSE_USAGE();
3482 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3483 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3484
3485 IEM_MC_ADVANCE_RIP();
3486 IEM_MC_END();
3487 }
3488 return VINF_SUCCESS;
3489}
3490
3491/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3492FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3493{
3494 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3496 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3497 {
3498 /*
3499 * Register, register.
3500 */
3501 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3503
3504 IEM_MC_BEGIN(3, 0);
3505 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3506 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3507 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3508 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3509 IEM_MC_PREPARE_SSE_USAGE();
3510 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3511 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3512 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3513 IEM_MC_ADVANCE_RIP();
3514 IEM_MC_END();
3515 }
3516 else
3517 {
3518 /*
3519 * Register, memory.
3520 */
3521 IEM_MC_BEGIN(3, 2);
3522 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3523 IEM_MC_LOCAL(RTUINT128U, uSrc);
3524 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3526
3527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3528 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3529 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3531 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3532
3533 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3534 IEM_MC_PREPARE_SSE_USAGE();
3535 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3536 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3537
3538 IEM_MC_ADVANCE_RIP();
3539 IEM_MC_END();
3540 }
3541 return VINF_SUCCESS;
3542}
3543
3544/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3545FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3546{
3547 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3550 {
3551 /*
3552 * Register, register.
3553 */
3554 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3556
3557 IEM_MC_BEGIN(3, 0);
3558 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3559 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3560 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3561 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3562 IEM_MC_PREPARE_SSE_USAGE();
3563 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3564 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3565 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3566 IEM_MC_ADVANCE_RIP();
3567 IEM_MC_END();
3568 }
3569 else
3570 {
3571 /*
3572 * Register, memory.
3573 */
3574 IEM_MC_BEGIN(3, 2);
3575 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3576 IEM_MC_LOCAL(RTUINT128U, uSrc);
3577 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3579
3580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3581 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3582 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3584 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3585
3586 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3587 IEM_MC_PREPARE_SSE_USAGE();
3588 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3589 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3590
3591 IEM_MC_ADVANCE_RIP();
3592 IEM_MC_END();
3593 }
3594 return VINF_SUCCESS;
3595}
3596
3597
3598/** Opcode 0x0f 0x71 11/2. */
3599FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3600
3601/** Opcode 0x66 0x0f 0x71 11/2. */
3602FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3603
3604/** Opcode 0x0f 0x71 11/4. */
3605FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3606
3607/** Opcode 0x66 0x0f 0x71 11/4. */
3608FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3609
3610/** Opcode 0x0f 0x71 11/6. */
3611FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3612
3613/** Opcode 0x66 0x0f 0x71 11/6. */
3614FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3615
3616
3617/**
3618 * Group 12 jump table for register variant.
3619 */
3620IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3621{
3622 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3623 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3624 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3625 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3626 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3627 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3628 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3629 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3630};
3631AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3632
3633
3634/** Opcode 0x0f 0x71. */
3635FNIEMOP_DEF(iemOp_Grp12)
3636{
3637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3638 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3639 /* register, register */
3640 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3641 + pVCpu->iem.s.idxPrefix], bRm);
3642 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3643}
3644
3645
3646/** Opcode 0x0f 0x72 11/2. */
3647FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3648
3649/** Opcode 0x66 0x0f 0x72 11/2. */
3650FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3651
3652/** Opcode 0x0f 0x72 11/4. */
3653FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3654
3655/** Opcode 0x66 0x0f 0x72 11/4. */
3656FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3657
3658/** Opcode 0x0f 0x72 11/6. */
3659FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3660
3661/** Opcode 0x66 0x0f 0x72 11/6. */
3662FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3663
3664
3665/**
3666 * Group 13 jump table for register variant.
3667 */
3668IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3669{
3670 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3671 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3672 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3673 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3674 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3675 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3676 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3677 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3678};
3679AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3680
3681/** Opcode 0x0f 0x72. */
3682FNIEMOP_DEF(iemOp_Grp13)
3683{
3684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3686 /* register, register */
3687 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3688 + pVCpu->iem.s.idxPrefix], bRm);
3689 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3690}
3691
3692
3693/** Opcode 0x0f 0x73 11/2. */
3694FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3695
3696/** Opcode 0x66 0x0f 0x73 11/2. */
3697FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3698
3699/** Opcode 0x66 0x0f 0x73 11/3. */
3700FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3701
3702/** Opcode 0x0f 0x73 11/6. */
3703FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3704
3705/** Opcode 0x66 0x0f 0x73 11/6. */
3706FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3707
3708/** Opcode 0x66 0x0f 0x73 11/7. */
3709FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3710
3711/**
3712 * Group 14 jump table for register variant.
3713 */
3714IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3715{
3716 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3717 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3718 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3719 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3720 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3721 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3722 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3723 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3724};
3725AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3726
3727
3728/** Opcode 0x0f 0x73. */
3729FNIEMOP_DEF(iemOp_Grp14)
3730{
3731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3733 /* register, register */
3734 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3735 + pVCpu->iem.s.idxPrefix], bRm);
3736 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3737}
3738
3739
3740/**
3741 * Common worker for MMX instructions on the form:
3742 * pxxx mm1, mm2/mem64
3743 */
3744FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3745{
3746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3748 {
3749 /*
3750 * Register, register.
3751 */
3752 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3753 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755 IEM_MC_BEGIN(2, 0);
3756 IEM_MC_ARG(uint64_t *, pDst, 0);
3757 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3758 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3759 IEM_MC_PREPARE_FPU_USAGE();
3760 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3761 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3762 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3763 IEM_MC_ADVANCE_RIP();
3764 IEM_MC_END();
3765 }
3766 else
3767 {
3768 /*
3769 * Register, memory.
3770 */
3771 IEM_MC_BEGIN(2, 2);
3772 IEM_MC_ARG(uint64_t *, pDst, 0);
3773 IEM_MC_LOCAL(uint64_t, uSrc);
3774 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3776
3777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3779 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3780 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3781
3782 IEM_MC_PREPARE_FPU_USAGE();
3783 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3784 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3785
3786 IEM_MC_ADVANCE_RIP();
3787 IEM_MC_END();
3788 }
3789 return VINF_SUCCESS;
3790}
3791
3792
3793/**
3794 * Common worker for SSE2 instructions on the forms:
3795 * pxxx xmm1, xmm2/mem128
3796 *
3797 * Proper alignment of the 128-bit operand is enforced.
3798 * Exceptions type 4. SSE2 cpuid checks.
3799 */
3800FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3801{
3802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3804 {
3805 /*
3806 * Register, register.
3807 */
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809 IEM_MC_BEGIN(2, 0);
3810 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3811 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3812 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3813 IEM_MC_PREPARE_SSE_USAGE();
3814 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3815 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3816 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3817 IEM_MC_ADVANCE_RIP();
3818 IEM_MC_END();
3819 }
3820 else
3821 {
3822 /*
3823 * Register, memory.
3824 */
3825 IEM_MC_BEGIN(2, 2);
3826 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3827 IEM_MC_LOCAL(RTUINT128U, uSrc);
3828 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3830
3831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3833 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3834 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3835
3836 IEM_MC_PREPARE_SSE_USAGE();
3837 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3838 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3839
3840 IEM_MC_ADVANCE_RIP();
3841 IEM_MC_END();
3842 }
3843 return VINF_SUCCESS;
3844}
3845
3846
3847/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3848FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3849{
3850 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3851 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3852}
3853
3854/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3855FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3856{
3857 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3858 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3859}
3860
3861/* Opcode 0xf3 0x0f 0x74 - invalid */
3862/* Opcode 0xf2 0x0f 0x74 - invalid */
3863
3864
3865/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3866FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3867{
3868 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3869 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3870}
3871
3872/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3873FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3874{
3875 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3876 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3877}
3878
3879/* Opcode 0xf3 0x0f 0x75 - invalid */
3880/* Opcode 0xf2 0x0f 0x75 - invalid */
3881
3882
3883/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3884FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3885{
3886 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3887 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3888}
3889
3890/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3891FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3892{
3893 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3894 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3895}
3896
3897/* Opcode 0xf3 0x0f 0x76 - invalid */
3898/* Opcode 0xf2 0x0f 0x76 - invalid */
3899
3900
3901/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3902FNIEMOP_STUB(iemOp_emms);
3903/* Opcode 0x66 0x0f 0x77 - invalid */
3904/* Opcode 0xf3 0x0f 0x77 - invalid */
3905/* Opcode 0xf2 0x0f 0x77 - invalid */
3906
3907/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3908FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3909/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3910FNIEMOP_STUB(iemOp_AmdGrp17);
3911/* Opcode 0xf3 0x0f 0x78 - invalid */
3912/* Opcode 0xf2 0x0f 0x78 - invalid */
3913
3914/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3915FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3916/* Opcode 0x66 0x0f 0x79 - invalid */
3917/* Opcode 0xf3 0x0f 0x79 - invalid */
3918/* Opcode 0xf2 0x0f 0x79 - invalid */
3919
3920/* Opcode 0x0f 0x7a - invalid */
3921/* Opcode 0x66 0x0f 0x7a - invalid */
3922/* Opcode 0xf3 0x0f 0x7a - invalid */
3923/* Opcode 0xf2 0x0f 0x7a - invalid */
3924
3925/* Opcode 0x0f 0x7b - invalid */
3926/* Opcode 0x66 0x0f 0x7b - invalid */
3927/* Opcode 0xf3 0x0f 0x7b - invalid */
3928/* Opcode 0xf2 0x0f 0x7b - invalid */
3929
3930/* Opcode 0x0f 0x7c - invalid */
3931/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3932FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3933/* Opcode 0xf3 0x0f 0x7c - invalid */
3934/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3935FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3936
3937/* Opcode 0x0f 0x7d - invalid */
3938/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
3939FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
3940/* Opcode 0xf3 0x0f 0x7d - invalid */
3941/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
3942FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
3943
3944
3945/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3946FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3947{
3948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3949 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3950 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3951 else
3952 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3953 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3954 {
3955 /* greg, MMX */
3956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3957 IEM_MC_BEGIN(0, 1);
3958 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3959 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3960 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3961 {
3962 IEM_MC_LOCAL(uint64_t, u64Tmp);
3963 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3964 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3965 }
3966 else
3967 {
3968 IEM_MC_LOCAL(uint32_t, u32Tmp);
3969 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3970 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3971 }
3972 IEM_MC_ADVANCE_RIP();
3973 IEM_MC_END();
3974 }
3975 else
3976 {
3977 /* [mem], MMX */
3978 IEM_MC_BEGIN(0, 2);
3979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3980 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3983 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3984 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3985 {
3986 IEM_MC_LOCAL(uint64_t, u64Tmp);
3987 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3988 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3989 }
3990 else
3991 {
3992 IEM_MC_LOCAL(uint32_t, u32Tmp);
3993 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3994 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3995 }
3996 IEM_MC_ADVANCE_RIP();
3997 IEM_MC_END();
3998 }
3999 return VINF_SUCCESS;
4000}
4001
4002/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4003FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4004{
4005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4006 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4007 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4008 else
4009 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4011 {
4012 /* greg, XMM */
4013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4014 IEM_MC_BEGIN(0, 1);
4015 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4016 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4017 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4018 {
4019 IEM_MC_LOCAL(uint64_t, u64Tmp);
4020 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4021 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4022 }
4023 else
4024 {
4025 IEM_MC_LOCAL(uint32_t, u32Tmp);
4026 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4027 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4028 }
4029 IEM_MC_ADVANCE_RIP();
4030 IEM_MC_END();
4031 }
4032 else
4033 {
4034 /* [mem], XMM */
4035 IEM_MC_BEGIN(0, 2);
4036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4037 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4040 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4041 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4042 {
4043 IEM_MC_LOCAL(uint64_t, u64Tmp);
4044 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4045 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4046 }
4047 else
4048 {
4049 IEM_MC_LOCAL(uint32_t, u32Tmp);
4050 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4051 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4052 }
4053 IEM_MC_ADVANCE_RIP();
4054 IEM_MC_END();
4055 }
4056 return VINF_SUCCESS;
4057}
4058
4059/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
4060FNIEMOP_STUB(iemOp_movq_Vq_Wq);
4061/* Opcode 0xf2 0x0f 0x7e - invalid */
4062
4063
4064/** Opcode 0x0f 0x7f - movq Qq, Pq */
4065FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4066{
4067 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4070 {
4071 /*
4072 * Register, register.
4073 */
4074 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4075 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4077 IEM_MC_BEGIN(0, 1);
4078 IEM_MC_LOCAL(uint64_t, u64Tmp);
4079 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4080 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4081 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4082 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4083 IEM_MC_ADVANCE_RIP();
4084 IEM_MC_END();
4085 }
4086 else
4087 {
4088 /*
4089 * Register, memory.
4090 */
4091 IEM_MC_BEGIN(0, 2);
4092 IEM_MC_LOCAL(uint64_t, u64Tmp);
4093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4094
4095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4097 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4098 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4099
4100 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4101 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4102
4103 IEM_MC_ADVANCE_RIP();
4104 IEM_MC_END();
4105 }
4106 return VINF_SUCCESS;
4107}
4108
4109/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4110FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4111{
4112 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4114 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4115 {
4116 /*
4117 * Register, register.
4118 */
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120 IEM_MC_BEGIN(0, 0);
4121 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4122 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4123 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4124 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4125 IEM_MC_ADVANCE_RIP();
4126 IEM_MC_END();
4127 }
4128 else
4129 {
4130 /*
4131 * Register, memory.
4132 */
4133 IEM_MC_BEGIN(0, 2);
4134 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4136
4137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4139 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4140 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4141
4142 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4143 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4144
4145 IEM_MC_ADVANCE_RIP();
4146 IEM_MC_END();
4147 }
4148 return VINF_SUCCESS;
4149}
4150
4151/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4152FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4153{
4154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4155 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4156 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4157 {
4158 /*
4159 * Register, register.
4160 */
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162 IEM_MC_BEGIN(0, 0);
4163 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4164 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4165 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4166 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4167 IEM_MC_ADVANCE_RIP();
4168 IEM_MC_END();
4169 }
4170 else
4171 {
4172 /*
4173 * Register, memory.
4174 */
4175 IEM_MC_BEGIN(0, 2);
4176 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4178
4179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4181 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4182 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4183
4184 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4185 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4186
4187 IEM_MC_ADVANCE_RIP();
4188 IEM_MC_END();
4189 }
4190 return VINF_SUCCESS;
4191}
4192
4193/* Opcode 0xf2 0x0f 0x7f - invalid */
4194
4195
4196
4197/** Opcode 0x0f 0x80. */
4198FNIEMOP_DEF(iemOp_jo_Jv)
4199{
4200 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4201 IEMOP_HLP_MIN_386();
4202 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4203 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4204 {
4205 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4207
4208 IEM_MC_BEGIN(0, 0);
4209 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4210 IEM_MC_REL_JMP_S16(i16Imm);
4211 } IEM_MC_ELSE() {
4212 IEM_MC_ADVANCE_RIP();
4213 } IEM_MC_ENDIF();
4214 IEM_MC_END();
4215 }
4216 else
4217 {
4218 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4220
4221 IEM_MC_BEGIN(0, 0);
4222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4223 IEM_MC_REL_JMP_S32(i32Imm);
4224 } IEM_MC_ELSE() {
4225 IEM_MC_ADVANCE_RIP();
4226 } IEM_MC_ENDIF();
4227 IEM_MC_END();
4228 }
4229 return VINF_SUCCESS;
4230}
4231
4232
4233/** Opcode 0x0f 0x81. */
4234FNIEMOP_DEF(iemOp_jno_Jv)
4235{
4236 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4237 IEMOP_HLP_MIN_386();
4238 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4239 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4240 {
4241 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4243
4244 IEM_MC_BEGIN(0, 0);
4245 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4246 IEM_MC_ADVANCE_RIP();
4247 } IEM_MC_ELSE() {
4248 IEM_MC_REL_JMP_S16(i16Imm);
4249 } IEM_MC_ENDIF();
4250 IEM_MC_END();
4251 }
4252 else
4253 {
4254 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4256
4257 IEM_MC_BEGIN(0, 0);
4258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4259 IEM_MC_ADVANCE_RIP();
4260 } IEM_MC_ELSE() {
4261 IEM_MC_REL_JMP_S32(i32Imm);
4262 } IEM_MC_ENDIF();
4263 IEM_MC_END();
4264 }
4265 return VINF_SUCCESS;
4266}
4267
4268
4269/** Opcode 0x0f 0x82. */
4270FNIEMOP_DEF(iemOp_jc_Jv)
4271{
4272 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4273 IEMOP_HLP_MIN_386();
4274 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4275 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4276 {
4277 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4279
4280 IEM_MC_BEGIN(0, 0);
4281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4282 IEM_MC_REL_JMP_S16(i16Imm);
4283 } IEM_MC_ELSE() {
4284 IEM_MC_ADVANCE_RIP();
4285 } IEM_MC_ENDIF();
4286 IEM_MC_END();
4287 }
4288 else
4289 {
4290 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4292
4293 IEM_MC_BEGIN(0, 0);
4294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4295 IEM_MC_REL_JMP_S32(i32Imm);
4296 } IEM_MC_ELSE() {
4297 IEM_MC_ADVANCE_RIP();
4298 } IEM_MC_ENDIF();
4299 IEM_MC_END();
4300 }
4301 return VINF_SUCCESS;
4302}
4303
4304
4305/** Opcode 0x0f 0x83. */
4306FNIEMOP_DEF(iemOp_jnc_Jv)
4307{
4308 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4309 IEMOP_HLP_MIN_386();
4310 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4311 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4312 {
4313 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4315
4316 IEM_MC_BEGIN(0, 0);
4317 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4318 IEM_MC_ADVANCE_RIP();
4319 } IEM_MC_ELSE() {
4320 IEM_MC_REL_JMP_S16(i16Imm);
4321 } IEM_MC_ENDIF();
4322 IEM_MC_END();
4323 }
4324 else
4325 {
4326 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4328
4329 IEM_MC_BEGIN(0, 0);
4330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4331 IEM_MC_ADVANCE_RIP();
4332 } IEM_MC_ELSE() {
4333 IEM_MC_REL_JMP_S32(i32Imm);
4334 } IEM_MC_ENDIF();
4335 IEM_MC_END();
4336 }
4337 return VINF_SUCCESS;
4338}
4339
4340
4341/** Opcode 0x0f 0x84. */
4342FNIEMOP_DEF(iemOp_je_Jv)
4343{
4344 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4345 IEMOP_HLP_MIN_386();
4346 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4347 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4348 {
4349 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4351
4352 IEM_MC_BEGIN(0, 0);
4353 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4354 IEM_MC_REL_JMP_S16(i16Imm);
4355 } IEM_MC_ELSE() {
4356 IEM_MC_ADVANCE_RIP();
4357 } IEM_MC_ENDIF();
4358 IEM_MC_END();
4359 }
4360 else
4361 {
4362 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4364
4365 IEM_MC_BEGIN(0, 0);
4366 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4367 IEM_MC_REL_JMP_S32(i32Imm);
4368 } IEM_MC_ELSE() {
4369 IEM_MC_ADVANCE_RIP();
4370 } IEM_MC_ENDIF();
4371 IEM_MC_END();
4372 }
4373 return VINF_SUCCESS;
4374}
4375
4376
4377/** Opcode 0x0f 0x85. */
4378FNIEMOP_DEF(iemOp_jne_Jv)
4379{
4380 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4381 IEMOP_HLP_MIN_386();
4382 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4383 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4384 {
4385 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4387
4388 IEM_MC_BEGIN(0, 0);
4389 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4390 IEM_MC_ADVANCE_RIP();
4391 } IEM_MC_ELSE() {
4392 IEM_MC_REL_JMP_S16(i16Imm);
4393 } IEM_MC_ENDIF();
4394 IEM_MC_END();
4395 }
4396 else
4397 {
4398 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4400
4401 IEM_MC_BEGIN(0, 0);
4402 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4403 IEM_MC_ADVANCE_RIP();
4404 } IEM_MC_ELSE() {
4405 IEM_MC_REL_JMP_S32(i32Imm);
4406 } IEM_MC_ENDIF();
4407 IEM_MC_END();
4408 }
4409 return VINF_SUCCESS;
4410}
4411
4412
4413/** Opcode 0x0f 0x86. */
4414FNIEMOP_DEF(iemOp_jbe_Jv)
4415{
4416 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4417 IEMOP_HLP_MIN_386();
4418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4419 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4420 {
4421 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4423
4424 IEM_MC_BEGIN(0, 0);
4425 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4426 IEM_MC_REL_JMP_S16(i16Imm);
4427 } IEM_MC_ELSE() {
4428 IEM_MC_ADVANCE_RIP();
4429 } IEM_MC_ENDIF();
4430 IEM_MC_END();
4431 }
4432 else
4433 {
4434 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4436
4437 IEM_MC_BEGIN(0, 0);
4438 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4439 IEM_MC_REL_JMP_S32(i32Imm);
4440 } IEM_MC_ELSE() {
4441 IEM_MC_ADVANCE_RIP();
4442 } IEM_MC_ENDIF();
4443 IEM_MC_END();
4444 }
4445 return VINF_SUCCESS;
4446}
4447
4448
4449/** Opcode 0x0f 0x87. */
4450FNIEMOP_DEF(iemOp_jnbe_Jv)
4451{
4452 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4453 IEMOP_HLP_MIN_386();
4454 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4455 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4456 {
4457 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4459
4460 IEM_MC_BEGIN(0, 0);
4461 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4462 IEM_MC_ADVANCE_RIP();
4463 } IEM_MC_ELSE() {
4464 IEM_MC_REL_JMP_S16(i16Imm);
4465 } IEM_MC_ENDIF();
4466 IEM_MC_END();
4467 }
4468 else
4469 {
4470 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4472
4473 IEM_MC_BEGIN(0, 0);
4474 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4475 IEM_MC_ADVANCE_RIP();
4476 } IEM_MC_ELSE() {
4477 IEM_MC_REL_JMP_S32(i32Imm);
4478 } IEM_MC_ENDIF();
4479 IEM_MC_END();
4480 }
4481 return VINF_SUCCESS;
4482}
4483
4484
4485/** Opcode 0x0f 0x88. */
4486FNIEMOP_DEF(iemOp_js_Jv)
4487{
4488 IEMOP_MNEMONIC(js_Jv, "js Jv");
4489 IEMOP_HLP_MIN_386();
4490 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4491 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4492 {
4493 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4495
4496 IEM_MC_BEGIN(0, 0);
4497 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4498 IEM_MC_REL_JMP_S16(i16Imm);
4499 } IEM_MC_ELSE() {
4500 IEM_MC_ADVANCE_RIP();
4501 } IEM_MC_ENDIF();
4502 IEM_MC_END();
4503 }
4504 else
4505 {
4506 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4508
4509 IEM_MC_BEGIN(0, 0);
4510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4511 IEM_MC_REL_JMP_S32(i32Imm);
4512 } IEM_MC_ELSE() {
4513 IEM_MC_ADVANCE_RIP();
4514 } IEM_MC_ENDIF();
4515 IEM_MC_END();
4516 }
4517 return VINF_SUCCESS;
4518}
4519
4520
4521/** Opcode 0x0f 0x89. */
4522FNIEMOP_DEF(iemOp_jns_Jv)
4523{
4524 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4525 IEMOP_HLP_MIN_386();
4526 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4527 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4528 {
4529 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4531
4532 IEM_MC_BEGIN(0, 0);
4533 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4534 IEM_MC_ADVANCE_RIP();
4535 } IEM_MC_ELSE() {
4536 IEM_MC_REL_JMP_S16(i16Imm);
4537 } IEM_MC_ENDIF();
4538 IEM_MC_END();
4539 }
4540 else
4541 {
4542 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4544
4545 IEM_MC_BEGIN(0, 0);
4546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4547 IEM_MC_ADVANCE_RIP();
4548 } IEM_MC_ELSE() {
4549 IEM_MC_REL_JMP_S32(i32Imm);
4550 } IEM_MC_ENDIF();
4551 IEM_MC_END();
4552 }
4553 return VINF_SUCCESS;
4554}
4555
4556
4557/** Opcode 0x0f 0x8a. */
4558FNIEMOP_DEF(iemOp_jp_Jv)
4559{
4560 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4561 IEMOP_HLP_MIN_386();
4562 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4563 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4564 {
4565 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4567
4568 IEM_MC_BEGIN(0, 0);
4569 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4570 IEM_MC_REL_JMP_S16(i16Imm);
4571 } IEM_MC_ELSE() {
4572 IEM_MC_ADVANCE_RIP();
4573 } IEM_MC_ENDIF();
4574 IEM_MC_END();
4575 }
4576 else
4577 {
4578 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4580
4581 IEM_MC_BEGIN(0, 0);
4582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4583 IEM_MC_REL_JMP_S32(i32Imm);
4584 } IEM_MC_ELSE() {
4585 IEM_MC_ADVANCE_RIP();
4586 } IEM_MC_ENDIF();
4587 IEM_MC_END();
4588 }
4589 return VINF_SUCCESS;
4590}
4591
4592
4593/** Opcode 0x0f 0x8b. */
4594FNIEMOP_DEF(iemOp_jnp_Jv)
4595{
4596 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4597 IEMOP_HLP_MIN_386();
4598 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4599 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4600 {
4601 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4603
4604 IEM_MC_BEGIN(0, 0);
4605 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4606 IEM_MC_ADVANCE_RIP();
4607 } IEM_MC_ELSE() {
4608 IEM_MC_REL_JMP_S16(i16Imm);
4609 } IEM_MC_ENDIF();
4610 IEM_MC_END();
4611 }
4612 else
4613 {
4614 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4616
4617 IEM_MC_BEGIN(0, 0);
4618 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4619 IEM_MC_ADVANCE_RIP();
4620 } IEM_MC_ELSE() {
4621 IEM_MC_REL_JMP_S32(i32Imm);
4622 } IEM_MC_ENDIF();
4623 IEM_MC_END();
4624 }
4625 return VINF_SUCCESS;
4626}
4627
4628
4629/** Opcode 0x0f 0x8c. */
4630FNIEMOP_DEF(iemOp_jl_Jv)
4631{
4632 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4633 IEMOP_HLP_MIN_386();
4634 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4635 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4636 {
4637 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4639
4640 IEM_MC_BEGIN(0, 0);
4641 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4642 IEM_MC_REL_JMP_S16(i16Imm);
4643 } IEM_MC_ELSE() {
4644 IEM_MC_ADVANCE_RIP();
4645 } IEM_MC_ENDIF();
4646 IEM_MC_END();
4647 }
4648 else
4649 {
4650 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4652
4653 IEM_MC_BEGIN(0, 0);
4654 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4655 IEM_MC_REL_JMP_S32(i32Imm);
4656 } IEM_MC_ELSE() {
4657 IEM_MC_ADVANCE_RIP();
4658 } IEM_MC_ENDIF();
4659 IEM_MC_END();
4660 }
4661 return VINF_SUCCESS;
4662}
4663
4664
4665/** Opcode 0x0f 0x8d. */
4666FNIEMOP_DEF(iemOp_jnl_Jv)
4667{
4668 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4669 IEMOP_HLP_MIN_386();
4670 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4671 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4672 {
4673 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4675
4676 IEM_MC_BEGIN(0, 0);
4677 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4678 IEM_MC_ADVANCE_RIP();
4679 } IEM_MC_ELSE() {
4680 IEM_MC_REL_JMP_S16(i16Imm);
4681 } IEM_MC_ENDIF();
4682 IEM_MC_END();
4683 }
4684 else
4685 {
4686 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4688
4689 IEM_MC_BEGIN(0, 0);
4690 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4691 IEM_MC_ADVANCE_RIP();
4692 } IEM_MC_ELSE() {
4693 IEM_MC_REL_JMP_S32(i32Imm);
4694 } IEM_MC_ENDIF();
4695 IEM_MC_END();
4696 }
4697 return VINF_SUCCESS;
4698}
4699
4700
4701/** Opcode 0x0f 0x8e. */
4702FNIEMOP_DEF(iemOp_jle_Jv)
4703{
4704 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4705 IEMOP_HLP_MIN_386();
4706 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4707 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4708 {
4709 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4711
4712 IEM_MC_BEGIN(0, 0);
4713 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4714 IEM_MC_REL_JMP_S16(i16Imm);
4715 } IEM_MC_ELSE() {
4716 IEM_MC_ADVANCE_RIP();
4717 } IEM_MC_ENDIF();
4718 IEM_MC_END();
4719 }
4720 else
4721 {
4722 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4724
4725 IEM_MC_BEGIN(0, 0);
4726 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4727 IEM_MC_REL_JMP_S32(i32Imm);
4728 } IEM_MC_ELSE() {
4729 IEM_MC_ADVANCE_RIP();
4730 } IEM_MC_ENDIF();
4731 IEM_MC_END();
4732 }
4733 return VINF_SUCCESS;
4734}
4735
4736
4737/** Opcode 0x0f 0x8f. */
4738FNIEMOP_DEF(iemOp_jnle_Jv)
4739{
4740 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4741 IEMOP_HLP_MIN_386();
4742 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4743 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4744 {
4745 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4747
4748 IEM_MC_BEGIN(0, 0);
4749 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4750 IEM_MC_ADVANCE_RIP();
4751 } IEM_MC_ELSE() {
4752 IEM_MC_REL_JMP_S16(i16Imm);
4753 } IEM_MC_ENDIF();
4754 IEM_MC_END();
4755 }
4756 else
4757 {
4758 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4760
4761 IEM_MC_BEGIN(0, 0);
4762 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4763 IEM_MC_ADVANCE_RIP();
4764 } IEM_MC_ELSE() {
4765 IEM_MC_REL_JMP_S32(i32Imm);
4766 } IEM_MC_ENDIF();
4767 IEM_MC_END();
4768 }
4769 return VINF_SUCCESS;
4770}
4771
4772
4773/** Opcode 0x0f 0x90. */
4774FNIEMOP_DEF(iemOp_seto_Eb)
4775{
4776 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4777 IEMOP_HLP_MIN_386();
4778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4779
4780 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4781 * any way. AMD says it's "unused", whatever that means. We're
4782 * ignoring for now. */
4783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4784 {
4785 /* register target */
4786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4787 IEM_MC_BEGIN(0, 0);
4788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4789 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4790 } IEM_MC_ELSE() {
4791 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4792 } IEM_MC_ENDIF();
4793 IEM_MC_ADVANCE_RIP();
4794 IEM_MC_END();
4795 }
4796 else
4797 {
4798 /* memory target */
4799 IEM_MC_BEGIN(0, 1);
4800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4803 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4804 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4805 } IEM_MC_ELSE() {
4806 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4807 } IEM_MC_ENDIF();
4808 IEM_MC_ADVANCE_RIP();
4809 IEM_MC_END();
4810 }
4811 return VINF_SUCCESS;
4812}
4813
4814
4815/** Opcode 0x0f 0x91. */
4816FNIEMOP_DEF(iemOp_setno_Eb)
4817{
4818 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4819 IEMOP_HLP_MIN_386();
4820 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4821
4822 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4823 * any way. AMD says it's "unused", whatever that means. We're
4824 * ignoring for now. */
4825 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4826 {
4827 /* register target */
4828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4829 IEM_MC_BEGIN(0, 0);
4830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4831 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4832 } IEM_MC_ELSE() {
4833 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4834 } IEM_MC_ENDIF();
4835 IEM_MC_ADVANCE_RIP();
4836 IEM_MC_END();
4837 }
4838 else
4839 {
4840 /* memory target */
4841 IEM_MC_BEGIN(0, 1);
4842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4845 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4846 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4847 } IEM_MC_ELSE() {
4848 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4849 } IEM_MC_ENDIF();
4850 IEM_MC_ADVANCE_RIP();
4851 IEM_MC_END();
4852 }
4853 return VINF_SUCCESS;
4854}
4855
4856
4857/** Opcode 0x0f 0x92. */
4858FNIEMOP_DEF(iemOp_setc_Eb)
4859{
4860 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4861 IEMOP_HLP_MIN_386();
4862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4863
4864 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4865 * any way. AMD says it's "unused", whatever that means. We're
4866 * ignoring for now. */
4867 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4868 {
4869 /* register target */
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4871 IEM_MC_BEGIN(0, 0);
4872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4873 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4874 } IEM_MC_ELSE() {
4875 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4876 } IEM_MC_ENDIF();
4877 IEM_MC_ADVANCE_RIP();
4878 IEM_MC_END();
4879 }
4880 else
4881 {
4882 /* memory target */
4883 IEM_MC_BEGIN(0, 1);
4884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4887 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4888 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4889 } IEM_MC_ELSE() {
4890 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4891 } IEM_MC_ENDIF();
4892 IEM_MC_ADVANCE_RIP();
4893 IEM_MC_END();
4894 }
4895 return VINF_SUCCESS;
4896}
4897
4898
4899/** Opcode 0x0f 0x93. */
4900FNIEMOP_DEF(iemOp_setnc_Eb)
4901{
4902 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4903 IEMOP_HLP_MIN_386();
4904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4905
4906 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4907 * any way. AMD says it's "unused", whatever that means. We're
4908 * ignoring for now. */
4909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4910 {
4911 /* register target */
4912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4913 IEM_MC_BEGIN(0, 0);
4914 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4915 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4916 } IEM_MC_ELSE() {
4917 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4918 } IEM_MC_ENDIF();
4919 IEM_MC_ADVANCE_RIP();
4920 IEM_MC_END();
4921 }
4922 else
4923 {
4924 /* memory target */
4925 IEM_MC_BEGIN(0, 1);
4926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4929 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4930 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4931 } IEM_MC_ELSE() {
4932 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4933 } IEM_MC_ENDIF();
4934 IEM_MC_ADVANCE_RIP();
4935 IEM_MC_END();
4936 }
4937 return VINF_SUCCESS;
4938}
4939
4940
4941/** Opcode 0x0f 0x94. */
4942FNIEMOP_DEF(iemOp_sete_Eb)
4943{
4944 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4945 IEMOP_HLP_MIN_386();
4946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4947
4948 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4949 * any way. AMD says it's "unused", whatever that means. We're
4950 * ignoring for now. */
4951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4952 {
4953 /* register target */
4954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4955 IEM_MC_BEGIN(0, 0);
4956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4957 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4958 } IEM_MC_ELSE() {
4959 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4960 } IEM_MC_ENDIF();
4961 IEM_MC_ADVANCE_RIP();
4962 IEM_MC_END();
4963 }
4964 else
4965 {
4966 /* memory target */
4967 IEM_MC_BEGIN(0, 1);
4968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4971 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4972 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4973 } IEM_MC_ELSE() {
4974 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4975 } IEM_MC_ENDIF();
4976 IEM_MC_ADVANCE_RIP();
4977 IEM_MC_END();
4978 }
4979 return VINF_SUCCESS;
4980}
4981
4982
4983/** Opcode 0x0f 0x95. */
4984FNIEMOP_DEF(iemOp_setne_Eb)
4985{
4986 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4987 IEMOP_HLP_MIN_386();
4988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4989
4990 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4991 * any way. AMD says it's "unused", whatever that means. We're
4992 * ignoring for now. */
4993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4994 {
4995 /* register target */
4996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4997 IEM_MC_BEGIN(0, 0);
4998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4999 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5000 } IEM_MC_ELSE() {
5001 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5002 } IEM_MC_ENDIF();
5003 IEM_MC_ADVANCE_RIP();
5004 IEM_MC_END();
5005 }
5006 else
5007 {
5008 /* memory target */
5009 IEM_MC_BEGIN(0, 1);
5010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5013 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5014 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5015 } IEM_MC_ELSE() {
5016 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5017 } IEM_MC_ENDIF();
5018 IEM_MC_ADVANCE_RIP();
5019 IEM_MC_END();
5020 }
5021 return VINF_SUCCESS;
5022}
5023
5024
5025/** Opcode 0x0f 0x96. */
5026FNIEMOP_DEF(iemOp_setbe_Eb)
5027{
5028 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5029 IEMOP_HLP_MIN_386();
5030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5031
5032 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5033 * any way. AMD says it's "unused", whatever that means. We're
5034 * ignoring for now. */
5035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5036 {
5037 /* register target */
5038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5039 IEM_MC_BEGIN(0, 0);
5040 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5041 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5042 } IEM_MC_ELSE() {
5043 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5044 } IEM_MC_ENDIF();
5045 IEM_MC_ADVANCE_RIP();
5046 IEM_MC_END();
5047 }
5048 else
5049 {
5050 /* memory target */
5051 IEM_MC_BEGIN(0, 1);
5052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5055 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5056 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5057 } IEM_MC_ELSE() {
5058 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5059 } IEM_MC_ENDIF();
5060 IEM_MC_ADVANCE_RIP();
5061 IEM_MC_END();
5062 }
5063 return VINF_SUCCESS;
5064}
5065
5066
5067/** Opcode 0x0f 0x97. */
5068FNIEMOP_DEF(iemOp_setnbe_Eb)
5069{
5070 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5071 IEMOP_HLP_MIN_386();
5072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5073
5074 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5075 * any way. AMD says it's "unused", whatever that means. We're
5076 * ignoring for now. */
5077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5078 {
5079 /* register target */
5080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5081 IEM_MC_BEGIN(0, 0);
5082 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5083 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5084 } IEM_MC_ELSE() {
5085 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5086 } IEM_MC_ENDIF();
5087 IEM_MC_ADVANCE_RIP();
5088 IEM_MC_END();
5089 }
5090 else
5091 {
5092 /* memory target */
5093 IEM_MC_BEGIN(0, 1);
5094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5097 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5098 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5099 } IEM_MC_ELSE() {
5100 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5101 } IEM_MC_ENDIF();
5102 IEM_MC_ADVANCE_RIP();
5103 IEM_MC_END();
5104 }
5105 return VINF_SUCCESS;
5106}
5107
5108
5109/** Opcode 0x0f 0x98. */
5110FNIEMOP_DEF(iemOp_sets_Eb)
5111{
5112 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5113 IEMOP_HLP_MIN_386();
5114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5115
5116 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5117 * any way. AMD says it's "unused", whatever that means. We're
5118 * ignoring for now. */
5119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5120 {
5121 /* register target */
5122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5123 IEM_MC_BEGIN(0, 0);
5124 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5125 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5126 } IEM_MC_ELSE() {
5127 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5128 } IEM_MC_ENDIF();
5129 IEM_MC_ADVANCE_RIP();
5130 IEM_MC_END();
5131 }
5132 else
5133 {
5134 /* memory target */
5135 IEM_MC_BEGIN(0, 1);
5136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5139 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5140 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5141 } IEM_MC_ELSE() {
5142 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5143 } IEM_MC_ENDIF();
5144 IEM_MC_ADVANCE_RIP();
5145 IEM_MC_END();
5146 }
5147 return VINF_SUCCESS;
5148}
5149
5150
5151/** Opcode 0x0f 0x99. */
5152FNIEMOP_DEF(iemOp_setns_Eb)
5153{
5154 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5155 IEMOP_HLP_MIN_386();
5156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5157
5158 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5159 * any way. AMD says it's "unused", whatever that means. We're
5160 * ignoring for now. */
5161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5162 {
5163 /* register target */
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165 IEM_MC_BEGIN(0, 0);
5166 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5167 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5168 } IEM_MC_ELSE() {
5169 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5170 } IEM_MC_ENDIF();
5171 IEM_MC_ADVANCE_RIP();
5172 IEM_MC_END();
5173 }
5174 else
5175 {
5176 /* memory target */
5177 IEM_MC_BEGIN(0, 1);
5178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5181 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5182 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5183 } IEM_MC_ELSE() {
5184 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5185 } IEM_MC_ENDIF();
5186 IEM_MC_ADVANCE_RIP();
5187 IEM_MC_END();
5188 }
5189 return VINF_SUCCESS;
5190}
5191
5192
5193/** Opcode 0x0f 0x9a. */
5194FNIEMOP_DEF(iemOp_setp_Eb)
5195{
5196 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5197 IEMOP_HLP_MIN_386();
5198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5199
5200 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5201 * any way. AMD says it's "unused", whatever that means. We're
5202 * ignoring for now. */
5203 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5204 {
5205 /* register target */
5206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5207 IEM_MC_BEGIN(0, 0);
5208 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5209 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5210 } IEM_MC_ELSE() {
5211 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5212 } IEM_MC_ENDIF();
5213 IEM_MC_ADVANCE_RIP();
5214 IEM_MC_END();
5215 }
5216 else
5217 {
5218 /* memory target */
5219 IEM_MC_BEGIN(0, 1);
5220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5223 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5224 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5225 } IEM_MC_ELSE() {
5226 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5227 } IEM_MC_ENDIF();
5228 IEM_MC_ADVANCE_RIP();
5229 IEM_MC_END();
5230 }
5231 return VINF_SUCCESS;
5232}
5233
5234
5235/** Opcode 0x0f 0x9b. */
5236FNIEMOP_DEF(iemOp_setnp_Eb)
5237{
5238 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5239 IEMOP_HLP_MIN_386();
5240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5241
5242 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5243 * any way. AMD says it's "unused", whatever that means. We're
5244 * ignoring for now. */
5245 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5246 {
5247 /* register target */
5248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5249 IEM_MC_BEGIN(0, 0);
5250 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5251 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5252 } IEM_MC_ELSE() {
5253 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5254 } IEM_MC_ENDIF();
5255 IEM_MC_ADVANCE_RIP();
5256 IEM_MC_END();
5257 }
5258 else
5259 {
5260 /* memory target */
5261 IEM_MC_BEGIN(0, 1);
5262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5266 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5267 } IEM_MC_ELSE() {
5268 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5269 } IEM_MC_ENDIF();
5270 IEM_MC_ADVANCE_RIP();
5271 IEM_MC_END();
5272 }
5273 return VINF_SUCCESS;
5274}
5275
5276
5277/** Opcode 0x0f 0x9c. */
5278FNIEMOP_DEF(iemOp_setl_Eb)
5279{
5280 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5281 IEMOP_HLP_MIN_386();
5282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5283
5284 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5285 * any way. AMD says it's "unused", whatever that means. We're
5286 * ignoring for now. */
5287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5288 {
5289 /* register target */
5290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5291 IEM_MC_BEGIN(0, 0);
5292 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5293 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5294 } IEM_MC_ELSE() {
5295 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5296 } IEM_MC_ENDIF();
5297 IEM_MC_ADVANCE_RIP();
5298 IEM_MC_END();
5299 }
5300 else
5301 {
5302 /* memory target */
5303 IEM_MC_BEGIN(0, 1);
5304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5307 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5308 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5309 } IEM_MC_ELSE() {
5310 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5311 } IEM_MC_ENDIF();
5312 IEM_MC_ADVANCE_RIP();
5313 IEM_MC_END();
5314 }
5315 return VINF_SUCCESS;
5316}
5317
5318
5319/** Opcode 0x0f 0x9d. */
5320FNIEMOP_DEF(iemOp_setnl_Eb)
5321{
5322 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5323 IEMOP_HLP_MIN_386();
5324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5325
5326 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5327 * any way. AMD says it's "unused", whatever that means. We're
5328 * ignoring for now. */
5329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5330 {
5331 /* register target */
5332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5333 IEM_MC_BEGIN(0, 0);
5334 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5335 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5336 } IEM_MC_ELSE() {
5337 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5338 } IEM_MC_ENDIF();
5339 IEM_MC_ADVANCE_RIP();
5340 IEM_MC_END();
5341 }
5342 else
5343 {
5344 /* memory target */
5345 IEM_MC_BEGIN(0, 1);
5346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5349 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5350 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5351 } IEM_MC_ELSE() {
5352 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5353 } IEM_MC_ENDIF();
5354 IEM_MC_ADVANCE_RIP();
5355 IEM_MC_END();
5356 }
5357 return VINF_SUCCESS;
5358}
5359
5360
5361/** Opcode 0x0f 0x9e. */
5362FNIEMOP_DEF(iemOp_setle_Eb)
5363{
5364 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5365 IEMOP_HLP_MIN_386();
5366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5367
5368 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5369 * any way. AMD says it's "unused", whatever that means. We're
5370 * ignoring for now. */
5371 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5372 {
5373 /* register target */
5374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5375 IEM_MC_BEGIN(0, 0);
5376 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5377 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5378 } IEM_MC_ELSE() {
5379 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5380 } IEM_MC_ENDIF();
5381 IEM_MC_ADVANCE_RIP();
5382 IEM_MC_END();
5383 }
5384 else
5385 {
5386 /* memory target */
5387 IEM_MC_BEGIN(0, 1);
5388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5391 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5392 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5393 } IEM_MC_ELSE() {
5394 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5395 } IEM_MC_ENDIF();
5396 IEM_MC_ADVANCE_RIP();
5397 IEM_MC_END();
5398 }
5399 return VINF_SUCCESS;
5400}
5401
5402
5403/** Opcode 0x0f 0x9f. */
5404FNIEMOP_DEF(iemOp_setnle_Eb)
5405{
5406 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5407 IEMOP_HLP_MIN_386();
5408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5409
5410 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5411 * any way. AMD says it's "unused", whatever that means. We're
5412 * ignoring for now. */
5413 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5414 {
5415 /* register target */
5416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5417 IEM_MC_BEGIN(0, 0);
5418 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5419 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5420 } IEM_MC_ELSE() {
5421 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5422 } IEM_MC_ENDIF();
5423 IEM_MC_ADVANCE_RIP();
5424 IEM_MC_END();
5425 }
5426 else
5427 {
5428 /* memory target */
5429 IEM_MC_BEGIN(0, 1);
5430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5433 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5434 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5435 } IEM_MC_ELSE() {
5436 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5437 } IEM_MC_ENDIF();
5438 IEM_MC_ADVANCE_RIP();
5439 IEM_MC_END();
5440 }
5441 return VINF_SUCCESS;
5442}
5443
5444
5445/**
5446 * Common 'push segment-register' helper.
5447 */
5448FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5449{
5450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5451 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5452 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5453
5454 switch (pVCpu->iem.s.enmEffOpSize)
5455 {
5456 case IEMMODE_16BIT:
5457 IEM_MC_BEGIN(0, 1);
5458 IEM_MC_LOCAL(uint16_t, u16Value);
5459 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5460 IEM_MC_PUSH_U16(u16Value);
5461 IEM_MC_ADVANCE_RIP();
5462 IEM_MC_END();
5463 break;
5464
5465 case IEMMODE_32BIT:
5466 IEM_MC_BEGIN(0, 1);
5467 IEM_MC_LOCAL(uint32_t, u32Value);
5468 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5469 IEM_MC_PUSH_U32_SREG(u32Value);
5470 IEM_MC_ADVANCE_RIP();
5471 IEM_MC_END();
5472 break;
5473
5474 case IEMMODE_64BIT:
5475 IEM_MC_BEGIN(0, 1);
5476 IEM_MC_LOCAL(uint64_t, u64Value);
5477 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5478 IEM_MC_PUSH_U64(u64Value);
5479 IEM_MC_ADVANCE_RIP();
5480 IEM_MC_END();
5481 break;
5482 }
5483
5484 return VINF_SUCCESS;
5485}
5486
5487
5488/** Opcode 0x0f 0xa0. */
5489FNIEMOP_DEF(iemOp_push_fs)
5490{
5491 IEMOP_MNEMONIC(push_fs, "push fs");
5492 IEMOP_HLP_MIN_386();
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5495}
5496
5497
5498/** Opcode 0x0f 0xa1. */
5499FNIEMOP_DEF(iemOp_pop_fs)
5500{
5501 IEMOP_MNEMONIC(pop_fs, "pop fs");
5502 IEMOP_HLP_MIN_386();
5503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5504 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5505}
5506
5507
5508/** Opcode 0x0f 0xa2. */
5509FNIEMOP_DEF(iemOp_cpuid)
5510{
5511 IEMOP_MNEMONIC(cpuid, "cpuid");
5512 IEMOP_HLP_MIN_486(); /* not all 486es. */
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5515}
5516
5517
5518/**
5519 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5520 * iemOp_bts_Ev_Gv.
5521 */
5522FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5523{
5524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5525 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5526
5527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5528 {
5529 /* register destination. */
5530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5531 switch (pVCpu->iem.s.enmEffOpSize)
5532 {
5533 case IEMMODE_16BIT:
5534 IEM_MC_BEGIN(3, 0);
5535 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5536 IEM_MC_ARG(uint16_t, u16Src, 1);
5537 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5538
5539 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5540 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5541 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5542 IEM_MC_REF_EFLAGS(pEFlags);
5543 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5544
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 return VINF_SUCCESS;
5548
5549 case IEMMODE_32BIT:
5550 IEM_MC_BEGIN(3, 0);
5551 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5552 IEM_MC_ARG(uint32_t, u32Src, 1);
5553 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5554
5555 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5556 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5557 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5558 IEM_MC_REF_EFLAGS(pEFlags);
5559 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5560
5561 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5562 IEM_MC_ADVANCE_RIP();
5563 IEM_MC_END();
5564 return VINF_SUCCESS;
5565
5566 case IEMMODE_64BIT:
5567 IEM_MC_BEGIN(3, 0);
5568 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5569 IEM_MC_ARG(uint64_t, u64Src, 1);
5570 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5571
5572 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5573 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5574 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5575 IEM_MC_REF_EFLAGS(pEFlags);
5576 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5577
5578 IEM_MC_ADVANCE_RIP();
5579 IEM_MC_END();
5580 return VINF_SUCCESS;
5581
5582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5583 }
5584 }
5585 else
5586 {
5587 /* memory destination. */
5588
5589 uint32_t fAccess;
5590 if (pImpl->pfnLockedU16)
5591 fAccess = IEM_ACCESS_DATA_RW;
5592 else /* BT */
5593 fAccess = IEM_ACCESS_DATA_R;
5594
5595 /** @todo test negative bit offsets! */
5596 switch (pVCpu->iem.s.enmEffOpSize)
5597 {
5598 case IEMMODE_16BIT:
5599 IEM_MC_BEGIN(3, 2);
5600 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5601 IEM_MC_ARG(uint16_t, u16Src, 1);
5602 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5604 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5605
5606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5607 if (pImpl->pfnLockedU16)
5608 IEMOP_HLP_DONE_DECODING();
5609 else
5610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5611 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5612 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5613 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5614 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5615 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5616 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5617 IEM_MC_FETCH_EFLAGS(EFlags);
5618
5619 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5620 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5621 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5622 else
5623 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5624 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5625
5626 IEM_MC_COMMIT_EFLAGS(EFlags);
5627 IEM_MC_ADVANCE_RIP();
5628 IEM_MC_END();
5629 return VINF_SUCCESS;
5630
5631 case IEMMODE_32BIT:
5632 IEM_MC_BEGIN(3, 2);
5633 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5634 IEM_MC_ARG(uint32_t, u32Src, 1);
5635 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5637 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5638
5639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5640 if (pImpl->pfnLockedU16)
5641 IEMOP_HLP_DONE_DECODING();
5642 else
5643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5644 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5645 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5646 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5647 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5648 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5649 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5650 IEM_MC_FETCH_EFLAGS(EFlags);
5651
5652 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5653 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5654 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5655 else
5656 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5657 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5658
5659 IEM_MC_COMMIT_EFLAGS(EFlags);
5660 IEM_MC_ADVANCE_RIP();
5661 IEM_MC_END();
5662 return VINF_SUCCESS;
5663
5664 case IEMMODE_64BIT:
5665 IEM_MC_BEGIN(3, 2);
5666 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5667 IEM_MC_ARG(uint64_t, u64Src, 1);
5668 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5670 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5671
5672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5673 if (pImpl->pfnLockedU16)
5674 IEMOP_HLP_DONE_DECODING();
5675 else
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5678 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5679 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5680 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5681 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5682 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5683 IEM_MC_FETCH_EFLAGS(EFlags);
5684
5685 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5686 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5687 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5688 else
5689 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5690 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5691
5692 IEM_MC_COMMIT_EFLAGS(EFlags);
5693 IEM_MC_ADVANCE_RIP();
5694 IEM_MC_END();
5695 return VINF_SUCCESS;
5696
5697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5698 }
5699 }
5700}
5701
5702
5703/** Opcode 0x0f 0xa3. */
5704FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5705{
5706 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5707 IEMOP_HLP_MIN_386();
5708 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5709}
5710
5711
5712/**
5713 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5714 */
5715FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5716{
5717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5718 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5719
5720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5721 {
5722 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5724
5725 switch (pVCpu->iem.s.enmEffOpSize)
5726 {
5727 case IEMMODE_16BIT:
5728 IEM_MC_BEGIN(4, 0);
5729 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5730 IEM_MC_ARG(uint16_t, u16Src, 1);
5731 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5732 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5733
5734 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5735 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5736 IEM_MC_REF_EFLAGS(pEFlags);
5737 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5738
5739 IEM_MC_ADVANCE_RIP();
5740 IEM_MC_END();
5741 return VINF_SUCCESS;
5742
5743 case IEMMODE_32BIT:
5744 IEM_MC_BEGIN(4, 0);
5745 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5746 IEM_MC_ARG(uint32_t, u32Src, 1);
5747 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5748 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5749
5750 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5751 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5752 IEM_MC_REF_EFLAGS(pEFlags);
5753 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5754
5755 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5756 IEM_MC_ADVANCE_RIP();
5757 IEM_MC_END();
5758 return VINF_SUCCESS;
5759
5760 case IEMMODE_64BIT:
5761 IEM_MC_BEGIN(4, 0);
5762 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5763 IEM_MC_ARG(uint64_t, u64Src, 1);
5764 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5765 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5766
5767 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5768 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5769 IEM_MC_REF_EFLAGS(pEFlags);
5770 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5771
5772 IEM_MC_ADVANCE_RIP();
5773 IEM_MC_END();
5774 return VINF_SUCCESS;
5775
5776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5777 }
5778 }
5779 else
5780 {
5781 switch (pVCpu->iem.s.enmEffOpSize)
5782 {
5783 case IEMMODE_16BIT:
5784 IEM_MC_BEGIN(4, 2);
5785 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5786 IEM_MC_ARG(uint16_t, u16Src, 1);
5787 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5788 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5790
5791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5792 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5793 IEM_MC_ASSIGN(cShiftArg, cShift);
5794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5795 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5796 IEM_MC_FETCH_EFLAGS(EFlags);
5797 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5798 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5799
5800 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5801 IEM_MC_COMMIT_EFLAGS(EFlags);
5802 IEM_MC_ADVANCE_RIP();
5803 IEM_MC_END();
5804 return VINF_SUCCESS;
5805
5806 case IEMMODE_32BIT:
5807 IEM_MC_BEGIN(4, 2);
5808 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5809 IEM_MC_ARG(uint32_t, u32Src, 1);
5810 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5811 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5813
5814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5815 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5816 IEM_MC_ASSIGN(cShiftArg, cShift);
5817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5818 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5819 IEM_MC_FETCH_EFLAGS(EFlags);
5820 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5821 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5822
5823 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5824 IEM_MC_COMMIT_EFLAGS(EFlags);
5825 IEM_MC_ADVANCE_RIP();
5826 IEM_MC_END();
5827 return VINF_SUCCESS;
5828
5829 case IEMMODE_64BIT:
5830 IEM_MC_BEGIN(4, 2);
5831 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5832 IEM_MC_ARG(uint64_t, u64Src, 1);
5833 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5834 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5836
5837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5838 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5839 IEM_MC_ASSIGN(cShiftArg, cShift);
5840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5841 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5842 IEM_MC_FETCH_EFLAGS(EFlags);
5843 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5844 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5845
5846 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5847 IEM_MC_COMMIT_EFLAGS(EFlags);
5848 IEM_MC_ADVANCE_RIP();
5849 IEM_MC_END();
5850 return VINF_SUCCESS;
5851
5852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5853 }
5854 }
5855}
5856
5857
5858/**
5859 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5860 */
5861FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5862{
5863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5864 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5865
5866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5867 {
5868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5869
5870 switch (pVCpu->iem.s.enmEffOpSize)
5871 {
5872 case IEMMODE_16BIT:
5873 IEM_MC_BEGIN(4, 0);
5874 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5875 IEM_MC_ARG(uint16_t, u16Src, 1);
5876 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5877 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5878
5879 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5880 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5881 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5882 IEM_MC_REF_EFLAGS(pEFlags);
5883 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5884
5885 IEM_MC_ADVANCE_RIP();
5886 IEM_MC_END();
5887 return VINF_SUCCESS;
5888
5889 case IEMMODE_32BIT:
5890 IEM_MC_BEGIN(4, 0);
5891 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5892 IEM_MC_ARG(uint32_t, u32Src, 1);
5893 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5894 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5895
5896 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5897 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5898 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5899 IEM_MC_REF_EFLAGS(pEFlags);
5900 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5901
5902 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5903 IEM_MC_ADVANCE_RIP();
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906
5907 case IEMMODE_64BIT:
5908 IEM_MC_BEGIN(4, 0);
5909 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5910 IEM_MC_ARG(uint64_t, u64Src, 1);
5911 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5912 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5913
5914 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5915 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5916 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5917 IEM_MC_REF_EFLAGS(pEFlags);
5918 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5919
5920 IEM_MC_ADVANCE_RIP();
5921 IEM_MC_END();
5922 return VINF_SUCCESS;
5923
5924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5925 }
5926 }
5927 else
5928 {
5929 switch (pVCpu->iem.s.enmEffOpSize)
5930 {
5931 case IEMMODE_16BIT:
5932 IEM_MC_BEGIN(4, 2);
5933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5934 IEM_MC_ARG(uint16_t, u16Src, 1);
5935 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5936 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5938
5939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5941 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5942 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5943 IEM_MC_FETCH_EFLAGS(EFlags);
5944 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5945 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5946
5947 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5948 IEM_MC_COMMIT_EFLAGS(EFlags);
5949 IEM_MC_ADVANCE_RIP();
5950 IEM_MC_END();
5951 return VINF_SUCCESS;
5952
5953 case IEMMODE_32BIT:
5954 IEM_MC_BEGIN(4, 2);
5955 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5956 IEM_MC_ARG(uint32_t, u32Src, 1);
5957 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5958 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5960
5961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5963 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5964 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5965 IEM_MC_FETCH_EFLAGS(EFlags);
5966 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5967 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5968
5969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5970 IEM_MC_COMMIT_EFLAGS(EFlags);
5971 IEM_MC_ADVANCE_RIP();
5972 IEM_MC_END();
5973 return VINF_SUCCESS;
5974
5975 case IEMMODE_64BIT:
5976 IEM_MC_BEGIN(4, 2);
5977 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5978 IEM_MC_ARG(uint64_t, u64Src, 1);
5979 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5982
5983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5985 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5986 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5987 IEM_MC_FETCH_EFLAGS(EFlags);
5988 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5989 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5990
5991 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5992 IEM_MC_COMMIT_EFLAGS(EFlags);
5993 IEM_MC_ADVANCE_RIP();
5994 IEM_MC_END();
5995 return VINF_SUCCESS;
5996
5997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5998 }
5999 }
6000}
6001
6002
6003
6004/** Opcode 0x0f 0xa4. */
6005FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6006{
6007 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6008 IEMOP_HLP_MIN_386();
6009 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6010}
6011
6012
6013/** Opcode 0x0f 0xa5. */
6014FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6015{
6016 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6017 IEMOP_HLP_MIN_386();
6018 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6019}
6020
6021
6022/** Opcode 0x0f 0xa8. */
6023FNIEMOP_DEF(iemOp_push_gs)
6024{
6025 IEMOP_MNEMONIC(push_gs, "push gs");
6026 IEMOP_HLP_MIN_386();
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6029}
6030
6031
6032/** Opcode 0x0f 0xa9. */
6033FNIEMOP_DEF(iemOp_pop_gs)
6034{
6035 IEMOP_MNEMONIC(pop_gs, "pop gs");
6036 IEMOP_HLP_MIN_386();
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6038 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6039}
6040
6041
6042/** Opcode 0x0f 0xaa. */
6043FNIEMOP_DEF(iemOp_rsm)
6044{
6045 IEMOP_MNEMONIC(rsm, "rsm");
6046 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6047 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6048 * intercept). */
6049 IEMOP_BITCH_ABOUT_STUB();
6050 return IEMOP_RAISE_INVALID_OPCODE();
6051}
6052
6053//IEMOP_HLP_MIN_386();
6054
6055
6056/** Opcode 0x0f 0xab. */
6057FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6058{
6059 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6060 IEMOP_HLP_MIN_386();
6061 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6062}
6063
6064
6065/** Opcode 0x0f 0xac. */
6066FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6067{
6068 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6069 IEMOP_HLP_MIN_386();
6070 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6071}
6072
6073
6074/** Opcode 0x0f 0xad. */
6075FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6076{
6077 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6078 IEMOP_HLP_MIN_386();
6079 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6080}
6081
6082
6083/** Opcode 0x0f 0xae mem/0. */
6084FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6085{
6086 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6087 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6088 return IEMOP_RAISE_INVALID_OPCODE();
6089
6090 IEM_MC_BEGIN(3, 1);
6091 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6092 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6093 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6096 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6097 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6098 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6099 IEM_MC_END();
6100 return VINF_SUCCESS;
6101}
6102
6103
6104/** Opcode 0x0f 0xae mem/1. */
6105FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6106{
6107 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6108 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6109 return IEMOP_RAISE_INVALID_OPCODE();
6110
6111 IEM_MC_BEGIN(3, 1);
6112 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6113 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6114 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6117 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6118 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6119 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6120 IEM_MC_END();
6121 return VINF_SUCCESS;
6122}
6123
6124
6125/**
6126 * @opmaps grp15
6127 * @opcode !11/2
6128 * @oppfx none
6129 * @opcpuid sse
6130 * @opgroup og_sse_mxcsrsm
6131 * @opxcpttype 5
6132 * @optest op1=0 -> mxcsr=0
6133 * @optest op1=0x2083 -> mxcsr=0x2083
6134 * @optest op1=0xfffffffe -> value.xcpt=0xd
6135 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6136 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6137 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6138 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6139 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6140 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6141 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6142 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6143 */
6144FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6145{
6146 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6147 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6148 return IEMOP_RAISE_INVALID_OPCODE();
6149
6150 IEM_MC_BEGIN(2, 0);
6151 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6152 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6156 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6157 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6158 IEM_MC_END();
6159 return VINF_SUCCESS;
6160}
6161
6162
6163/**
6164 * @opmaps grp15
6165 * @opcode !11/3
6166 * @oppfx none
6167 * @opcpuid sse
6168 * @opgroup og_sse_mxcsrsm
6169 * @opxcpttype 5
6170 * @optest mxcsr=0 -> op1=0
6171 * @optest mxcsr=0x2083 -> op1=0x2083
6172 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6173 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6174 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6175 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6176 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6177 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6178 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6179 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6180 */
6181FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6182{
6183 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6184 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6185 return IEMOP_RAISE_INVALID_OPCODE();
6186
6187 IEM_MC_BEGIN(2, 0);
6188 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6189 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6192 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6193 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6194 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6195 IEM_MC_END();
6196 return VINF_SUCCESS;
6197}
6198
6199
6200/**
6201 * @opmaps grp15
6202 * @opcode !11/4
6203 * @oppfx none
6204 * @opcpuid xsave
6205 * @opgroup og_system
6206 * @opxcpttype none
6207 */
6208FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6209{
6210 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
6211 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6212 return IEMOP_RAISE_INVALID_OPCODE();
6213
6214 IEM_MC_BEGIN(3, 0);
6215 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6216 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6217 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6220 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6221 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6222 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6223 IEM_MC_END();
6224 return VINF_SUCCESS;
6225}
6226
6227
6228/**
6229 * @opmaps grp15
6230 * @opcode !11/5
6231 * @oppfx none
6232 * @opcpuid xsave
6233 * @opgroup og_system
6234 * @opxcpttype none
6235 */
6236FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6237{
6238 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
6239 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6240 return IEMOP_RAISE_INVALID_OPCODE();
6241
6242 IEM_MC_BEGIN(3, 0);
6243 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6244 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6245 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6248 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6249 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6250 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6251 IEM_MC_END();
6252 return VINF_SUCCESS;
6253}
6254
6255/** Opcode 0x0f 0xae mem/6. */
6256FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6257
6258/**
6259 * @opmaps grp15
6260 * @opcode !11/7
6261 * @oppfx none
6262 * @opcpuid clfsh
6263 * @opgroup og_cachectl
6264 * @optest op1=1 ->
6265 */
6266FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6267{
6268 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6269 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6270 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6271
6272 IEM_MC_BEGIN(2, 0);
6273 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6274 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6277 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6278 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6279 IEM_MC_END();
6280 return VINF_SUCCESS;
6281}
6282
6283/**
6284 * @opmaps grp15
6285 * @opcode !11/7
6286 * @oppfx 0x66
6287 * @opcpuid clflushopt
6288 * @opgroup og_cachectl
6289 * @optest op1=1 ->
6290 */
6291FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6292{
6293 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6294 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6295 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6296
6297 IEM_MC_BEGIN(2, 0);
6298 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6299 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6303 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6304 IEM_MC_END();
6305 return VINF_SUCCESS;
6306}
6307
6308
6309/** Opcode 0x0f 0xae 11b/5. */
6310FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6311{
6312 RT_NOREF_PV(bRm);
6313 IEMOP_MNEMONIC(lfence, "lfence");
6314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6315 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6316 return IEMOP_RAISE_INVALID_OPCODE();
6317
6318 IEM_MC_BEGIN(0, 0);
6319 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6320 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6321 else
6322 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6323 IEM_MC_ADVANCE_RIP();
6324 IEM_MC_END();
6325 return VINF_SUCCESS;
6326}
6327
6328
6329/** Opcode 0x0f 0xae 11b/6. */
6330FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6331{
6332 RT_NOREF_PV(bRm);
6333 IEMOP_MNEMONIC(mfence, "mfence");
6334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6335 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6336 return IEMOP_RAISE_INVALID_OPCODE();
6337
6338 IEM_MC_BEGIN(0, 0);
6339 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6340 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6341 else
6342 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6343 IEM_MC_ADVANCE_RIP();
6344 IEM_MC_END();
6345 return VINF_SUCCESS;
6346}
6347
6348
6349/** Opcode 0x0f 0xae 11b/7. */
6350FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6351{
6352 RT_NOREF_PV(bRm);
6353 IEMOP_MNEMONIC(sfence, "sfence");
6354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6355 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6356 return IEMOP_RAISE_INVALID_OPCODE();
6357
6358 IEM_MC_BEGIN(0, 0);
6359 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6360 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6361 else
6362 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6363 IEM_MC_ADVANCE_RIP();
6364 IEM_MC_END();
6365 return VINF_SUCCESS;
6366}
6367
6368
6369/** Opcode 0xf3 0x0f 0xae 11b/0. */
6370FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6371
6372/** Opcode 0xf3 0x0f 0xae 11b/1. */
6373FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6374
6375/** Opcode 0xf3 0x0f 0xae 11b/2. */
6376FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6377
6378/** Opcode 0xf3 0x0f 0xae 11b/3. */
6379FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6380
6381
6382/**
6383 * Group 15 jump table for register variant.
6384 */
6385IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6386{ /* pfx: none, 066h, 0f3h, 0f2h */
6387 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6388 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6389 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6390 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6391 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6392 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6393 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6394 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6395};
6396AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6397
6398
6399/**
6400 * Group 15 jump table for memory variant.
6401 */
6402IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6403{ /* pfx: none, 066h, 0f3h, 0f2h */
6404 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6405 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6406 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6407 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6408 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6409 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6410 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6411 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6412};
6413AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6414
6415
6416/** Opcode 0x0f 0xae. */
6417FNIEMOP_DEF(iemOp_Grp15)
6418{
6419 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6422 /* register, register */
6423 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6424 + pVCpu->iem.s.idxPrefix], bRm);
6425 /* memory, register */
6426 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6427 + pVCpu->iem.s.idxPrefix], bRm);
6428}
6429
6430
6431/** Opcode 0x0f 0xaf. */
6432FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6433{
6434 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6435 IEMOP_HLP_MIN_386();
6436 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6437 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6438}
6439
6440
6441/** Opcode 0x0f 0xb0. */
6442FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6443{
6444 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6445 IEMOP_HLP_MIN_486();
6446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6447
6448 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6449 {
6450 IEMOP_HLP_DONE_DECODING();
6451 IEM_MC_BEGIN(4, 0);
6452 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6453 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6454 IEM_MC_ARG(uint8_t, u8Src, 2);
6455 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6456
6457 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6458 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6459 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6460 IEM_MC_REF_EFLAGS(pEFlags);
6461 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6462 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6463 else
6464 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6465
6466 IEM_MC_ADVANCE_RIP();
6467 IEM_MC_END();
6468 }
6469 else
6470 {
6471 IEM_MC_BEGIN(4, 3);
6472 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6473 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6474 IEM_MC_ARG(uint8_t, u8Src, 2);
6475 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6477 IEM_MC_LOCAL(uint8_t, u8Al);
6478
6479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6480 IEMOP_HLP_DONE_DECODING();
6481 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6482 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6483 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6484 IEM_MC_FETCH_EFLAGS(EFlags);
6485 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6486 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6487 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6488 else
6489 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6490
6491 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6492 IEM_MC_COMMIT_EFLAGS(EFlags);
6493 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6494 IEM_MC_ADVANCE_RIP();
6495 IEM_MC_END();
6496 }
6497 return VINF_SUCCESS;
6498}
6499
6500/** Opcode 0x0f 0xb1. */
6501FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6502{
6503 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6504 IEMOP_HLP_MIN_486();
6505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6506
6507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6508 {
6509 IEMOP_HLP_DONE_DECODING();
6510 switch (pVCpu->iem.s.enmEffOpSize)
6511 {
6512 case IEMMODE_16BIT:
6513 IEM_MC_BEGIN(4, 0);
6514 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6515 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6516 IEM_MC_ARG(uint16_t, u16Src, 2);
6517 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6518
6519 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6520 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6521 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6522 IEM_MC_REF_EFLAGS(pEFlags);
6523 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6524 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6525 else
6526 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6527
6528 IEM_MC_ADVANCE_RIP();
6529 IEM_MC_END();
6530 return VINF_SUCCESS;
6531
6532 case IEMMODE_32BIT:
6533 IEM_MC_BEGIN(4, 0);
6534 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6535 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6536 IEM_MC_ARG(uint32_t, u32Src, 2);
6537 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6538
6539 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6540 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6541 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6542 IEM_MC_REF_EFLAGS(pEFlags);
6543 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6544 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6545 else
6546 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6547
6548 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6549 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6550 IEM_MC_ADVANCE_RIP();
6551 IEM_MC_END();
6552 return VINF_SUCCESS;
6553
6554 case IEMMODE_64BIT:
6555 IEM_MC_BEGIN(4, 0);
6556 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6557 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6558#ifdef RT_ARCH_X86
6559 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6560#else
6561 IEM_MC_ARG(uint64_t, u64Src, 2);
6562#endif
6563 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6564
6565 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6566 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6567 IEM_MC_REF_EFLAGS(pEFlags);
6568#ifdef RT_ARCH_X86
6569 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6570 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6571 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6572 else
6573 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6574#else
6575 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6576 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6577 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6578 else
6579 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6580#endif
6581
6582 IEM_MC_ADVANCE_RIP();
6583 IEM_MC_END();
6584 return VINF_SUCCESS;
6585
6586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6587 }
6588 }
6589 else
6590 {
6591 switch (pVCpu->iem.s.enmEffOpSize)
6592 {
6593 case IEMMODE_16BIT:
6594 IEM_MC_BEGIN(4, 3);
6595 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6596 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6597 IEM_MC_ARG(uint16_t, u16Src, 2);
6598 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6600 IEM_MC_LOCAL(uint16_t, u16Ax);
6601
6602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6603 IEMOP_HLP_DONE_DECODING();
6604 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6605 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6606 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6607 IEM_MC_FETCH_EFLAGS(EFlags);
6608 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6609 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6610 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6611 else
6612 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6613
6614 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6615 IEM_MC_COMMIT_EFLAGS(EFlags);
6616 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6617 IEM_MC_ADVANCE_RIP();
6618 IEM_MC_END();
6619 return VINF_SUCCESS;
6620
6621 case IEMMODE_32BIT:
6622 IEM_MC_BEGIN(4, 3);
6623 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6624 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6625 IEM_MC_ARG(uint32_t, u32Src, 2);
6626 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6628 IEM_MC_LOCAL(uint32_t, u32Eax);
6629
6630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6631 IEMOP_HLP_DONE_DECODING();
6632 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6633 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6634 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6635 IEM_MC_FETCH_EFLAGS(EFlags);
6636 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6637 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6638 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6639 else
6640 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6641
6642 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6643 IEM_MC_COMMIT_EFLAGS(EFlags);
6644 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6645 IEM_MC_ADVANCE_RIP();
6646 IEM_MC_END();
6647 return VINF_SUCCESS;
6648
6649 case IEMMODE_64BIT:
6650 IEM_MC_BEGIN(4, 3);
6651 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6652 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6653#ifdef RT_ARCH_X86
6654 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6655#else
6656 IEM_MC_ARG(uint64_t, u64Src, 2);
6657#endif
6658 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6660 IEM_MC_LOCAL(uint64_t, u64Rax);
6661
6662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6663 IEMOP_HLP_DONE_DECODING();
6664 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6665 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6666 IEM_MC_FETCH_EFLAGS(EFlags);
6667 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6668#ifdef RT_ARCH_X86
6669 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6670 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6671 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6672 else
6673 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6674#else
6675 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6676 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6677 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6678 else
6679 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6680#endif
6681
6682 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6683 IEM_MC_COMMIT_EFLAGS(EFlags);
6684 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6685 IEM_MC_ADVANCE_RIP();
6686 IEM_MC_END();
6687 return VINF_SUCCESS;
6688
6689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6690 }
6691 }
6692}
6693
6694
6695FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6696{
6697 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6698 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6699
6700 switch (pVCpu->iem.s.enmEffOpSize)
6701 {
6702 case IEMMODE_16BIT:
6703 IEM_MC_BEGIN(5, 1);
6704 IEM_MC_ARG(uint16_t, uSel, 0);
6705 IEM_MC_ARG(uint16_t, offSeg, 1);
6706 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6707 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6708 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6709 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6712 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6713 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6714 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6715 IEM_MC_END();
6716 return VINF_SUCCESS;
6717
6718 case IEMMODE_32BIT:
6719 IEM_MC_BEGIN(5, 1);
6720 IEM_MC_ARG(uint16_t, uSel, 0);
6721 IEM_MC_ARG(uint32_t, offSeg, 1);
6722 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6723 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6724 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6725 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6728 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6729 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6730 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6731 IEM_MC_END();
6732 return VINF_SUCCESS;
6733
6734 case IEMMODE_64BIT:
6735 IEM_MC_BEGIN(5, 1);
6736 IEM_MC_ARG(uint16_t, uSel, 0);
6737 IEM_MC_ARG(uint64_t, offSeg, 1);
6738 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6739 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6740 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6741 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6744 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6745 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6746 else
6747 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6748 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6749 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6750 IEM_MC_END();
6751 return VINF_SUCCESS;
6752
6753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6754 }
6755}
6756
6757
6758/** Opcode 0x0f 0xb2. */
6759FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6760{
6761 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6762 IEMOP_HLP_MIN_386();
6763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6764 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6765 return IEMOP_RAISE_INVALID_OPCODE();
6766 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6767}
6768
6769
6770/** Opcode 0x0f 0xb3. */
6771FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6772{
6773 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6774 IEMOP_HLP_MIN_386();
6775 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6776}
6777
6778
6779/** Opcode 0x0f 0xb4. */
6780FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6781{
6782 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6783 IEMOP_HLP_MIN_386();
6784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6785 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6786 return IEMOP_RAISE_INVALID_OPCODE();
6787 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6788}
6789
6790
6791/** Opcode 0x0f 0xb5. */
6792FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6793{
6794 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6795 IEMOP_HLP_MIN_386();
6796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6797 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6798 return IEMOP_RAISE_INVALID_OPCODE();
6799 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6800}
6801
6802
6803/** Opcode 0x0f 0xb6. */
6804FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6805{
6806 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6807 IEMOP_HLP_MIN_386();
6808
6809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6810
6811 /*
6812 * If rm is denoting a register, no more instruction bytes.
6813 */
6814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6815 {
6816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6817 switch (pVCpu->iem.s.enmEffOpSize)
6818 {
6819 case IEMMODE_16BIT:
6820 IEM_MC_BEGIN(0, 1);
6821 IEM_MC_LOCAL(uint16_t, u16Value);
6822 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6823 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6824 IEM_MC_ADVANCE_RIP();
6825 IEM_MC_END();
6826 return VINF_SUCCESS;
6827
6828 case IEMMODE_32BIT:
6829 IEM_MC_BEGIN(0, 1);
6830 IEM_MC_LOCAL(uint32_t, u32Value);
6831 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6832 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6833 IEM_MC_ADVANCE_RIP();
6834 IEM_MC_END();
6835 return VINF_SUCCESS;
6836
6837 case IEMMODE_64BIT:
6838 IEM_MC_BEGIN(0, 1);
6839 IEM_MC_LOCAL(uint64_t, u64Value);
6840 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6841 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6842 IEM_MC_ADVANCE_RIP();
6843 IEM_MC_END();
6844 return VINF_SUCCESS;
6845
6846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6847 }
6848 }
6849 else
6850 {
6851 /*
6852 * We're loading a register from memory.
6853 */
6854 switch (pVCpu->iem.s.enmEffOpSize)
6855 {
6856 case IEMMODE_16BIT:
6857 IEM_MC_BEGIN(0, 2);
6858 IEM_MC_LOCAL(uint16_t, u16Value);
6859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6862 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6863 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6864 IEM_MC_ADVANCE_RIP();
6865 IEM_MC_END();
6866 return VINF_SUCCESS;
6867
6868 case IEMMODE_32BIT:
6869 IEM_MC_BEGIN(0, 2);
6870 IEM_MC_LOCAL(uint32_t, u32Value);
6871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6874 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6875 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6876 IEM_MC_ADVANCE_RIP();
6877 IEM_MC_END();
6878 return VINF_SUCCESS;
6879
6880 case IEMMODE_64BIT:
6881 IEM_MC_BEGIN(0, 2);
6882 IEM_MC_LOCAL(uint64_t, u64Value);
6883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6886 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6887 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6888 IEM_MC_ADVANCE_RIP();
6889 IEM_MC_END();
6890 return VINF_SUCCESS;
6891
6892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6893 }
6894 }
6895}
6896
6897
6898/** Opcode 0x0f 0xb7. */
6899FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6900{
6901 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6902 IEMOP_HLP_MIN_386();
6903
6904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6905
6906 /** @todo Not entirely sure how the operand size prefix is handled here,
6907 * assuming that it will be ignored. Would be nice to have a few
6908 * test for this. */
6909 /*
6910 * If rm is denoting a register, no more instruction bytes.
6911 */
6912 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6913 {
6914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6915 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6916 {
6917 IEM_MC_BEGIN(0, 1);
6918 IEM_MC_LOCAL(uint32_t, u32Value);
6919 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6920 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6921 IEM_MC_ADVANCE_RIP();
6922 IEM_MC_END();
6923 }
6924 else
6925 {
6926 IEM_MC_BEGIN(0, 1);
6927 IEM_MC_LOCAL(uint64_t, u64Value);
6928 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6929 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6930 IEM_MC_ADVANCE_RIP();
6931 IEM_MC_END();
6932 }
6933 }
6934 else
6935 {
6936 /*
6937 * We're loading a register from memory.
6938 */
6939 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6940 {
6941 IEM_MC_BEGIN(0, 2);
6942 IEM_MC_LOCAL(uint32_t, u32Value);
6943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6946 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6947 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6948 IEM_MC_ADVANCE_RIP();
6949 IEM_MC_END();
6950 }
6951 else
6952 {
6953 IEM_MC_BEGIN(0, 2);
6954 IEM_MC_LOCAL(uint64_t, u64Value);
6955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6958 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6959 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6960 IEM_MC_ADVANCE_RIP();
6961 IEM_MC_END();
6962 }
6963 }
6964 return VINF_SUCCESS;
6965}
6966
6967
6968/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6969FNIEMOP_UD_STUB(iemOp_jmpe);
6970/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6971FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6972
6973
6974/**
6975 * @opcode 0xb9
6976 * @opinvalid intel-modrm
6977 * @optest ->
6978 */
6979FNIEMOP_DEF(iemOp_Grp10)
6980{
6981 /*
6982 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6983 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6984 */
6985 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6986 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6987 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6988}
6989
6990
6991/** Opcode 0x0f 0xba. */
6992FNIEMOP_DEF(iemOp_Grp8)
6993{
6994 IEMOP_HLP_MIN_386();
6995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6996 PCIEMOPBINSIZES pImpl;
6997 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6998 {
6999 case 0: case 1: case 2: case 3:
7000 /* Both AMD and Intel want full modr/m decoding and imm8. */
7001 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7002 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7003 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7004 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7005 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7007 }
7008 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7009
7010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7011 {
7012 /* register destination. */
7013 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7015
7016 switch (pVCpu->iem.s.enmEffOpSize)
7017 {
7018 case IEMMODE_16BIT:
7019 IEM_MC_BEGIN(3, 0);
7020 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7021 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7022 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7023
7024 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7025 IEM_MC_REF_EFLAGS(pEFlags);
7026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7027
7028 IEM_MC_ADVANCE_RIP();
7029 IEM_MC_END();
7030 return VINF_SUCCESS;
7031
7032 case IEMMODE_32BIT:
7033 IEM_MC_BEGIN(3, 0);
7034 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7035 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7036 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7037
7038 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7039 IEM_MC_REF_EFLAGS(pEFlags);
7040 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7041
7042 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7043 IEM_MC_ADVANCE_RIP();
7044 IEM_MC_END();
7045 return VINF_SUCCESS;
7046
7047 case IEMMODE_64BIT:
7048 IEM_MC_BEGIN(3, 0);
7049 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7050 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7052
7053 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7054 IEM_MC_REF_EFLAGS(pEFlags);
7055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7056
7057 IEM_MC_ADVANCE_RIP();
7058 IEM_MC_END();
7059 return VINF_SUCCESS;
7060
7061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7062 }
7063 }
7064 else
7065 {
7066 /* memory destination. */
7067
7068 uint32_t fAccess;
7069 if (pImpl->pfnLockedU16)
7070 fAccess = IEM_ACCESS_DATA_RW;
7071 else /* BT */
7072 fAccess = IEM_ACCESS_DATA_R;
7073
7074 /** @todo test negative bit offsets! */
7075 switch (pVCpu->iem.s.enmEffOpSize)
7076 {
7077 case IEMMODE_16BIT:
7078 IEM_MC_BEGIN(3, 1);
7079 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7080 IEM_MC_ARG(uint16_t, u16Src, 1);
7081 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7083
7084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7085 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7086 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7087 if (pImpl->pfnLockedU16)
7088 IEMOP_HLP_DONE_DECODING();
7089 else
7090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7091 IEM_MC_FETCH_EFLAGS(EFlags);
7092 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7093 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7094 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7095 else
7096 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7097 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7098
7099 IEM_MC_COMMIT_EFLAGS(EFlags);
7100 IEM_MC_ADVANCE_RIP();
7101 IEM_MC_END();
7102 return VINF_SUCCESS;
7103
7104 case IEMMODE_32BIT:
7105 IEM_MC_BEGIN(3, 1);
7106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7107 IEM_MC_ARG(uint32_t, u32Src, 1);
7108 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7110
7111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7112 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7113 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7114 if (pImpl->pfnLockedU16)
7115 IEMOP_HLP_DONE_DECODING();
7116 else
7117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7118 IEM_MC_FETCH_EFLAGS(EFlags);
7119 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7120 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7121 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7122 else
7123 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7124 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7125
7126 IEM_MC_COMMIT_EFLAGS(EFlags);
7127 IEM_MC_ADVANCE_RIP();
7128 IEM_MC_END();
7129 return VINF_SUCCESS;
7130
7131 case IEMMODE_64BIT:
7132 IEM_MC_BEGIN(3, 1);
7133 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7134 IEM_MC_ARG(uint64_t, u64Src, 1);
7135 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7137
7138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7139 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7140 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7141 if (pImpl->pfnLockedU16)
7142 IEMOP_HLP_DONE_DECODING();
7143 else
7144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7145 IEM_MC_FETCH_EFLAGS(EFlags);
7146 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7147 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7148 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7149 else
7150 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7151 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7152
7153 IEM_MC_COMMIT_EFLAGS(EFlags);
7154 IEM_MC_ADVANCE_RIP();
7155 IEM_MC_END();
7156 return VINF_SUCCESS;
7157
7158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7159 }
7160 }
7161}
7162
7163
7164/** Opcode 0x0f 0xbb. */
7165FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7166{
7167 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7168 IEMOP_HLP_MIN_386();
7169 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7170}
7171
7172
7173/** Opcode 0x0f 0xbc. */
7174FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7175{
7176 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7177 IEMOP_HLP_MIN_386();
7178 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7179 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7180}
7181
7182
7183/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7184FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7185
7186
7187/** Opcode 0x0f 0xbd. */
7188FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7189{
7190 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7191 IEMOP_HLP_MIN_386();
7192 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7193 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7194}
7195
7196
7197/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7198FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7199
7200
7201/** Opcode 0x0f 0xbe. */
7202FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7203{
7204 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7205 IEMOP_HLP_MIN_386();
7206
7207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7208
7209 /*
7210 * If rm is denoting a register, no more instruction bytes.
7211 */
7212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7213 {
7214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7215 switch (pVCpu->iem.s.enmEffOpSize)
7216 {
7217 case IEMMODE_16BIT:
7218 IEM_MC_BEGIN(0, 1);
7219 IEM_MC_LOCAL(uint16_t, u16Value);
7220 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7221 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7222 IEM_MC_ADVANCE_RIP();
7223 IEM_MC_END();
7224 return VINF_SUCCESS;
7225
7226 case IEMMODE_32BIT:
7227 IEM_MC_BEGIN(0, 1);
7228 IEM_MC_LOCAL(uint32_t, u32Value);
7229 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7230 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7231 IEM_MC_ADVANCE_RIP();
7232 IEM_MC_END();
7233 return VINF_SUCCESS;
7234
7235 case IEMMODE_64BIT:
7236 IEM_MC_BEGIN(0, 1);
7237 IEM_MC_LOCAL(uint64_t, u64Value);
7238 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7239 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7240 IEM_MC_ADVANCE_RIP();
7241 IEM_MC_END();
7242 return VINF_SUCCESS;
7243
7244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7245 }
7246 }
7247 else
7248 {
7249 /*
7250 * We're loading a register from memory.
7251 */
7252 switch (pVCpu->iem.s.enmEffOpSize)
7253 {
7254 case IEMMODE_16BIT:
7255 IEM_MC_BEGIN(0, 2);
7256 IEM_MC_LOCAL(uint16_t, u16Value);
7257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7260 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7261 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7262 IEM_MC_ADVANCE_RIP();
7263 IEM_MC_END();
7264 return VINF_SUCCESS;
7265
7266 case IEMMODE_32BIT:
7267 IEM_MC_BEGIN(0, 2);
7268 IEM_MC_LOCAL(uint32_t, u32Value);
7269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7272 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7273 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7274 IEM_MC_ADVANCE_RIP();
7275 IEM_MC_END();
7276 return VINF_SUCCESS;
7277
7278 case IEMMODE_64BIT:
7279 IEM_MC_BEGIN(0, 2);
7280 IEM_MC_LOCAL(uint64_t, u64Value);
7281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7284 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7285 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7286 IEM_MC_ADVANCE_RIP();
7287 IEM_MC_END();
7288 return VINF_SUCCESS;
7289
7290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7291 }
7292 }
7293}
7294
7295
7296/** Opcode 0x0f 0xbf. */
7297FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7298{
7299 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7300 IEMOP_HLP_MIN_386();
7301
7302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7303
7304 /** @todo Not entirely sure how the operand size prefix is handled here,
7305 * assuming that it will be ignored. Would be nice to have a few
7306 * test for this. */
7307 /*
7308 * If rm is denoting a register, no more instruction bytes.
7309 */
7310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7311 {
7312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7313 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7314 {
7315 IEM_MC_BEGIN(0, 1);
7316 IEM_MC_LOCAL(uint32_t, u32Value);
7317 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7318 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7319 IEM_MC_ADVANCE_RIP();
7320 IEM_MC_END();
7321 }
7322 else
7323 {
7324 IEM_MC_BEGIN(0, 1);
7325 IEM_MC_LOCAL(uint64_t, u64Value);
7326 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7327 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7328 IEM_MC_ADVANCE_RIP();
7329 IEM_MC_END();
7330 }
7331 }
7332 else
7333 {
7334 /*
7335 * We're loading a register from memory.
7336 */
7337 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7338 {
7339 IEM_MC_BEGIN(0, 2);
7340 IEM_MC_LOCAL(uint32_t, u32Value);
7341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7344 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7345 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7346 IEM_MC_ADVANCE_RIP();
7347 IEM_MC_END();
7348 }
7349 else
7350 {
7351 IEM_MC_BEGIN(0, 2);
7352 IEM_MC_LOCAL(uint64_t, u64Value);
7353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7356 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7357 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7358 IEM_MC_ADVANCE_RIP();
7359 IEM_MC_END();
7360 }
7361 }
7362 return VINF_SUCCESS;
7363}
7364
7365
7366/** Opcode 0x0f 0xc0. */
7367FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7368{
7369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7370 IEMOP_HLP_MIN_486();
7371 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7372
7373 /*
7374 * If rm is denoting a register, no more instruction bytes.
7375 */
7376 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7377 {
7378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7379
7380 IEM_MC_BEGIN(3, 0);
7381 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7382 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7383 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7384
7385 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7386 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7387 IEM_MC_REF_EFLAGS(pEFlags);
7388 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7389
7390 IEM_MC_ADVANCE_RIP();
7391 IEM_MC_END();
7392 }
7393 else
7394 {
7395 /*
7396 * We're accessing memory.
7397 */
7398 IEM_MC_BEGIN(3, 3);
7399 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7400 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7401 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7402 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7404
7405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7406 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7407 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7408 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7409 IEM_MC_FETCH_EFLAGS(EFlags);
7410 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7411 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7412 else
7413 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7414
7415 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7416 IEM_MC_COMMIT_EFLAGS(EFlags);
7417 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7418 IEM_MC_ADVANCE_RIP();
7419 IEM_MC_END();
7420 return VINF_SUCCESS;
7421 }
7422 return VINF_SUCCESS;
7423}
7424
7425
7426/** Opcode 0x0f 0xc1. */
7427FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7428{
7429 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7430 IEMOP_HLP_MIN_486();
7431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7432
7433 /*
7434 * If rm is denoting a register, no more instruction bytes.
7435 */
7436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7437 {
7438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7439
7440 switch (pVCpu->iem.s.enmEffOpSize)
7441 {
7442 case IEMMODE_16BIT:
7443 IEM_MC_BEGIN(3, 0);
7444 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7445 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7446 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7447
7448 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7449 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7450 IEM_MC_REF_EFLAGS(pEFlags);
7451 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7452
7453 IEM_MC_ADVANCE_RIP();
7454 IEM_MC_END();
7455 return VINF_SUCCESS;
7456
7457 case IEMMODE_32BIT:
7458 IEM_MC_BEGIN(3, 0);
7459 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7460 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7462
7463 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7464 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7465 IEM_MC_REF_EFLAGS(pEFlags);
7466 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7467
7468 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7469 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7470 IEM_MC_ADVANCE_RIP();
7471 IEM_MC_END();
7472 return VINF_SUCCESS;
7473
7474 case IEMMODE_64BIT:
7475 IEM_MC_BEGIN(3, 0);
7476 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7477 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7479
7480 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7481 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7482 IEM_MC_REF_EFLAGS(pEFlags);
7483 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7484
7485 IEM_MC_ADVANCE_RIP();
7486 IEM_MC_END();
7487 return VINF_SUCCESS;
7488
7489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7490 }
7491 }
7492 else
7493 {
7494 /*
7495 * We're accessing memory.
7496 */
7497 switch (pVCpu->iem.s.enmEffOpSize)
7498 {
7499 case IEMMODE_16BIT:
7500 IEM_MC_BEGIN(3, 3);
7501 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7502 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7503 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7504 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7506
7507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7508 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7509 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7510 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7511 IEM_MC_FETCH_EFLAGS(EFlags);
7512 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7513 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7514 else
7515 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7516
7517 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7518 IEM_MC_COMMIT_EFLAGS(EFlags);
7519 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7520 IEM_MC_ADVANCE_RIP();
7521 IEM_MC_END();
7522 return VINF_SUCCESS;
7523
7524 case IEMMODE_32BIT:
7525 IEM_MC_BEGIN(3, 3);
7526 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7527 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7528 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7529 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7531
7532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7533 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7534 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7535 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7536 IEM_MC_FETCH_EFLAGS(EFlags);
7537 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7538 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7539 else
7540 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7541
7542 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7543 IEM_MC_COMMIT_EFLAGS(EFlags);
7544 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7545 IEM_MC_ADVANCE_RIP();
7546 IEM_MC_END();
7547 return VINF_SUCCESS;
7548
7549 case IEMMODE_64BIT:
7550 IEM_MC_BEGIN(3, 3);
7551 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7552 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7553 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7554 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7556
7557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7558 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7559 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7560 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7561 IEM_MC_FETCH_EFLAGS(EFlags);
7562 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7563 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7564 else
7565 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7566
7567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7568 IEM_MC_COMMIT_EFLAGS(EFlags);
7569 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7570 IEM_MC_ADVANCE_RIP();
7571 IEM_MC_END();
7572 return VINF_SUCCESS;
7573
7574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7575 }
7576 }
7577}
7578
7579
7580/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7581FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7582/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7583FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7584/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7585FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7586/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7587FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7588
7589
7590/** Opcode 0x0f 0xc3. */
7591FNIEMOP_DEF(iemOp_movnti_My_Gy)
7592{
7593 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7594
7595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7596
7597 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7598 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7599 {
7600 switch (pVCpu->iem.s.enmEffOpSize)
7601 {
7602 case IEMMODE_32BIT:
7603 IEM_MC_BEGIN(0, 2);
7604 IEM_MC_LOCAL(uint32_t, u32Value);
7605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7606
7607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7609 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7610 return IEMOP_RAISE_INVALID_OPCODE();
7611
7612 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7613 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7614 IEM_MC_ADVANCE_RIP();
7615 IEM_MC_END();
7616 break;
7617
7618 case IEMMODE_64BIT:
7619 IEM_MC_BEGIN(0, 2);
7620 IEM_MC_LOCAL(uint64_t, u64Value);
7621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7622
7623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7625 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7626 return IEMOP_RAISE_INVALID_OPCODE();
7627
7628 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7629 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7630 IEM_MC_ADVANCE_RIP();
7631 IEM_MC_END();
7632 break;
7633
7634 case IEMMODE_16BIT:
7635 /** @todo check this form. */
7636 return IEMOP_RAISE_INVALID_OPCODE();
7637 }
7638 }
7639 else
7640 return IEMOP_RAISE_INVALID_OPCODE();
7641 return VINF_SUCCESS;
7642}
7643/* Opcode 0x66 0x0f 0xc3 - invalid */
7644/* Opcode 0xf3 0x0f 0xc3 - invalid */
7645/* Opcode 0xf2 0x0f 0xc3 - invalid */
7646
7647/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7648FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7649/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7650FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7651/* Opcode 0xf3 0x0f 0xc4 - invalid */
7652/* Opcode 0xf2 0x0f 0xc4 - invalid */
7653
7654/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7655FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7656/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7657FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7658/* Opcode 0xf3 0x0f 0xc5 - invalid */
7659/* Opcode 0xf2 0x0f 0xc5 - invalid */
7660
7661/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7662FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7663/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7664FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7665/* Opcode 0xf3 0x0f 0xc6 - invalid */
7666/* Opcode 0xf2 0x0f 0xc6 - invalid */
7667
7668
7669/** Opcode 0x0f 0xc7 !11/1. */
7670FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7671{
7672 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7673
7674 IEM_MC_BEGIN(4, 3);
7675 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7676 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7677 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7678 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7679 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7680 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7682
7683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7684 IEMOP_HLP_DONE_DECODING();
7685 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7686
7687 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7688 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7689 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7690
7691 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7692 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7693 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7694
7695 IEM_MC_FETCH_EFLAGS(EFlags);
7696 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7697 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7698 else
7699 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7700
7701 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7702 IEM_MC_COMMIT_EFLAGS(EFlags);
7703 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7704 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7705 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7706 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7707 IEM_MC_ENDIF();
7708 IEM_MC_ADVANCE_RIP();
7709
7710 IEM_MC_END();
7711 return VINF_SUCCESS;
7712}
7713
7714
7715/** Opcode REX.W 0x0f 0xc7 !11/1. */
7716FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7717{
7718 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7719 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7720 {
7721#if 0
7722 RT_NOREF(bRm);
7723 IEMOP_BITCH_ABOUT_STUB();
7724 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7725#else
7726 IEM_MC_BEGIN(4, 3);
7727 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7728 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7729 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7730 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7731 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7732 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7734
7735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7736 IEMOP_HLP_DONE_DECODING();
7737 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7738 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7739
7740 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7741 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7742 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7743
7744 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7745 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7746 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7747
7748 IEM_MC_FETCH_EFLAGS(EFlags);
7749# ifdef RT_ARCH_AMD64
7750 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7751 {
7752 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7753 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7754 else
7755 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7756 }
7757 else
7758# endif
7759 {
7760 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7761 accesses and not all all atomic, which works fine on in UNI CPU guest
7762 configuration (ignoring DMA). If guest SMP is active we have no choice
7763 but to use a rendezvous callback here. Sigh. */
7764 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7765 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7766 else
7767 {
7768 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7769 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7770 }
7771 }
7772
7773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7774 IEM_MC_COMMIT_EFLAGS(EFlags);
7775 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7776 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7777 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7778 IEM_MC_ENDIF();
7779 IEM_MC_ADVANCE_RIP();
7780
7781 IEM_MC_END();
7782 return VINF_SUCCESS;
7783#endif
7784 }
7785 Log(("cmpxchg16b -> #UD\n"));
7786 return IEMOP_RAISE_INVALID_OPCODE();
7787}
7788
7789FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7790{
7791 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7792 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7793 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7794}
7795
7796/** Opcode 0x0f 0xc7 11/6. */
7797FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7798
7799/** Opcode 0x0f 0xc7 !11/6. */
7800FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7801
7802/** Opcode 0x66 0x0f 0xc7 !11/6. */
7803FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7804
7805/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7806FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7807
7808/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7809FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7810
7811/** Opcode 0x0f 0xc7 11/7. */
7812FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7813
7814
7815/**
7816 * Group 9 jump table for register variant.
7817 */
7818IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7819{ /* pfx: none, 066h, 0f3h, 0f2h */
7820 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7821 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7822 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7823 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7824 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7825 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7826 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7827 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7828};
7829AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7830
7831
7832/**
7833 * Group 9 jump table for memory variant.
7834 */
7835IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7836{ /* pfx: none, 066h, 0f3h, 0f2h */
7837 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7838 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7839 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7840 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7841 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7842 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7843 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7844 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7845};
7846AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7847
7848
7849/** Opcode 0x0f 0xc7. */
7850FNIEMOP_DEF(iemOp_Grp9)
7851{
7852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7853 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7854 /* register, register */
7855 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7856 + pVCpu->iem.s.idxPrefix], bRm);
7857 /* memory, register */
7858 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7859 + pVCpu->iem.s.idxPrefix], bRm);
7860}
7861
7862
7863/**
7864 * Common 'bswap register' helper.
7865 */
7866FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7867{
7868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7869 switch (pVCpu->iem.s.enmEffOpSize)
7870 {
7871 case IEMMODE_16BIT:
7872 IEM_MC_BEGIN(1, 0);
7873 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7874 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7875 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7876 IEM_MC_ADVANCE_RIP();
7877 IEM_MC_END();
7878 return VINF_SUCCESS;
7879
7880 case IEMMODE_32BIT:
7881 IEM_MC_BEGIN(1, 0);
7882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7883 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7884 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7885 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7886 IEM_MC_ADVANCE_RIP();
7887 IEM_MC_END();
7888 return VINF_SUCCESS;
7889
7890 case IEMMODE_64BIT:
7891 IEM_MC_BEGIN(1, 0);
7892 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7893 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7894 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7895 IEM_MC_ADVANCE_RIP();
7896 IEM_MC_END();
7897 return VINF_SUCCESS;
7898
7899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7900 }
7901}
7902
7903
7904/** Opcode 0x0f 0xc8. */
7905FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7906{
7907 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7908 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7909 prefix. REX.B is the correct prefix it appears. For a parallel
7910 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7911 IEMOP_HLP_MIN_486();
7912 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7913}
7914
7915
7916/** Opcode 0x0f 0xc9. */
7917FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7918{
7919 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7920 IEMOP_HLP_MIN_486();
7921 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7922}
7923
7924
7925/** Opcode 0x0f 0xca. */
7926FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7927{
7928 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7929 IEMOP_HLP_MIN_486();
7930 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7931}
7932
7933
7934/** Opcode 0x0f 0xcb. */
7935FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7936{
7937 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7938 IEMOP_HLP_MIN_486();
7939 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7940}
7941
7942
7943/** Opcode 0x0f 0xcc. */
7944FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7945{
7946 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7947 IEMOP_HLP_MIN_486();
7948 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7949}
7950
7951
7952/** Opcode 0x0f 0xcd. */
7953FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7954{
7955 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7956 IEMOP_HLP_MIN_486();
7957 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7958}
7959
7960
7961/** Opcode 0x0f 0xce. */
7962FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7963{
7964 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7965 IEMOP_HLP_MIN_486();
7966 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7967}
7968
7969
7970/** Opcode 0x0f 0xcf. */
7971FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7972{
7973 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7974 IEMOP_HLP_MIN_486();
7975 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7976}
7977
7978
7979/* Opcode 0x0f 0xd0 - invalid */
7980/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
7981FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
7982/* Opcode 0xf3 0x0f 0xd0 - invalid */
7983/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
7984FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
7985
7986/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7987FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7988/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
7989FNIEMOP_STUB(iemOp_psrlw_Vx_W);
7990/* Opcode 0xf3 0x0f 0xd1 - invalid */
7991/* Opcode 0xf2 0x0f 0xd1 - invalid */
7992
7993/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7994FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7995/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
7996FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
7997/* Opcode 0xf3 0x0f 0xd2 - invalid */
7998/* Opcode 0xf2 0x0f 0xd2 - invalid */
7999
8000/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8001FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8002/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8003FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8004/* Opcode 0xf3 0x0f 0xd3 - invalid */
8005/* Opcode 0xf2 0x0f 0xd3 - invalid */
8006
8007/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8008FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8009/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8010FNIEMOP_STUB(iemOp_paddq_Vx_W);
8011/* Opcode 0xf3 0x0f 0xd4 - invalid */
8012/* Opcode 0xf2 0x0f 0xd4 - invalid */
8013
8014/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8015FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8016/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8017FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8018/* Opcode 0xf3 0x0f 0xd5 - invalid */
8019/* Opcode 0xf2 0x0f 0xd5 - invalid */
8020
8021/* Opcode 0x0f 0xd6 - invalid */
8022
8023/**
8024 * @opcode 0xd6
8025 * @oppfx 0x66
8026 * @opcpuid sse2
8027 * @opgroup og_sse2_pcksclr_datamove
8028 * @opxcpttype none
8029 * @optest op1=-1 op2=2 -> op1=2
8030 * @optest op1=0 op2=-42 -> op1=-42
8031 */
8032FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8033{
8034 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8037 {
8038 /*
8039 * Register, register.
8040 */
8041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8042 IEM_MC_BEGIN(0, 2);
8043 IEM_MC_LOCAL(uint64_t, uSrc);
8044
8045 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8046 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8047
8048 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8049 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8050
8051 IEM_MC_ADVANCE_RIP();
8052 IEM_MC_END();
8053 }
8054 else
8055 {
8056 /*
8057 * Memory, register.
8058 */
8059 IEM_MC_BEGIN(0, 2);
8060 IEM_MC_LOCAL(uint64_t, uSrc);
8061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8062
8063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8065 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8066 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8067
8068 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8069 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8070
8071 IEM_MC_ADVANCE_RIP();
8072 IEM_MC_END();
8073 }
8074 return VINF_SUCCESS;
8075}
8076
8077
8078/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
8079FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
8080/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
8081FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
8082#if 0
8083FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
8084{
8085 /* Docs says register only. */
8086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8087
8088 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8089 {
8090 case IEM_OP_PRF_SIZE_OP: /* SSE */
8091 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
8092 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8093 IEM_MC_BEGIN(2, 0);
8094 IEM_MC_ARG(uint64_t *, pDst, 0);
8095 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8097 IEM_MC_PREPARE_SSE_USAGE();
8098 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8099 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8100 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8101 IEM_MC_ADVANCE_RIP();
8102 IEM_MC_END();
8103 return VINF_SUCCESS;
8104
8105 case 0: /* MMX */
8106 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
8107 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8108 IEM_MC_BEGIN(2, 0);
8109 IEM_MC_ARG(uint64_t *, pDst, 0);
8110 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8111 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8112 IEM_MC_PREPARE_FPU_USAGE();
8113 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8114 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8115 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8116 IEM_MC_ADVANCE_RIP();
8117 IEM_MC_END();
8118 return VINF_SUCCESS;
8119
8120 default:
8121 return IEMOP_RAISE_INVALID_OPCODE();
8122 }
8123}
8124#endif
8125
8126
8127/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8128FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8129{
8130 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8131 /** @todo testcase: Check that the instruction implicitly clears the high
8132 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8133 * and opcode modifications are made to work with the whole width (not
8134 * just 128). */
8135 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8136 /* Docs says register only. */
8137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8139 {
8140 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8141 IEM_MC_BEGIN(2, 0);
8142 IEM_MC_ARG(uint64_t *, pDst, 0);
8143 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8144 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8145 IEM_MC_PREPARE_FPU_USAGE();
8146 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8147 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8148 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8149 IEM_MC_ADVANCE_RIP();
8150 IEM_MC_END();
8151 return VINF_SUCCESS;
8152 }
8153 return IEMOP_RAISE_INVALID_OPCODE();
8154}
8155
8156/** Opcode 0x66 0x0f 0xd7 - */
8157FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8158{
8159 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8160 /** @todo testcase: Check that the instruction implicitly clears the high
8161 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8162 * and opcode modifications are made to work with the whole width (not
8163 * just 128). */
8164 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8165 /* Docs says register only. */
8166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8168 {
8169 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8170 IEM_MC_BEGIN(2, 0);
8171 IEM_MC_ARG(uint64_t *, pDst, 0);
8172 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8173 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8174 IEM_MC_PREPARE_SSE_USAGE();
8175 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8176 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8177 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8178 IEM_MC_ADVANCE_RIP();
8179 IEM_MC_END();
8180 return VINF_SUCCESS;
8181 }
8182 return IEMOP_RAISE_INVALID_OPCODE();
8183}
8184
8185/* Opcode 0xf3 0x0f 0xd7 - invalid */
8186/* Opcode 0xf2 0x0f 0xd7 - invalid */
8187
8188
8189/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8190FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8191/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8192FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8193/* Opcode 0xf3 0x0f 0xd8 - invalid */
8194/* Opcode 0xf2 0x0f 0xd8 - invalid */
8195
8196/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8197FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8198/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8199FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8200/* Opcode 0xf3 0x0f 0xd9 - invalid */
8201/* Opcode 0xf2 0x0f 0xd9 - invalid */
8202
8203/** Opcode 0x0f 0xda - pminub Pq, Qq */
8204FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8205/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8206FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8207/* Opcode 0xf3 0x0f 0xda - invalid */
8208/* Opcode 0xf2 0x0f 0xda - invalid */
8209
8210/** Opcode 0x0f 0xdb - pand Pq, Qq */
8211FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8212/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8213FNIEMOP_STUB(iemOp_pand_Vx_W);
8214/* Opcode 0xf3 0x0f 0xdb - invalid */
8215/* Opcode 0xf2 0x0f 0xdb - invalid */
8216
8217/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8218FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8219/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8220FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8221/* Opcode 0xf3 0x0f 0xdc - invalid */
8222/* Opcode 0xf2 0x0f 0xdc - invalid */
8223
8224/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8225FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8226/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8227FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8228/* Opcode 0xf3 0x0f 0xdd - invalid */
8229/* Opcode 0xf2 0x0f 0xdd - invalid */
8230
8231/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8232FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8233/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8234FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8235/* Opcode 0xf3 0x0f 0xde - invalid */
8236/* Opcode 0xf2 0x0f 0xde - invalid */
8237
8238/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8239FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8240/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8241FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8242/* Opcode 0xf3 0x0f 0xdf - invalid */
8243/* Opcode 0xf2 0x0f 0xdf - invalid */
8244
8245/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8246FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8247/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8248FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8249/* Opcode 0xf3 0x0f 0xe0 - invalid */
8250/* Opcode 0xf2 0x0f 0xe0 - invalid */
8251
8252/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8253FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8254/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8255FNIEMOP_STUB(iemOp_psraw_Vx_W);
8256/* Opcode 0xf3 0x0f 0xe1 - invalid */
8257/* Opcode 0xf2 0x0f 0xe1 - invalid */
8258
8259/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8260FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8261/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8262FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8263/* Opcode 0xf3 0x0f 0xe2 - invalid */
8264/* Opcode 0xf2 0x0f 0xe2 - invalid */
8265
8266/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8267FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8268/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8269FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8270/* Opcode 0xf3 0x0f 0xe3 - invalid */
8271/* Opcode 0xf2 0x0f 0xe3 - invalid */
8272
8273/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8274FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8275/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8276FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8277/* Opcode 0xf3 0x0f 0xe4 - invalid */
8278/* Opcode 0xf2 0x0f 0xe4 - invalid */
8279
8280/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8281FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8282/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8283FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8284/* Opcode 0xf3 0x0f 0xe5 - invalid */
8285/* Opcode 0xf2 0x0f 0xe5 - invalid */
8286
8287/* Opcode 0x0f 0xe6 - invalid */
8288/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8289FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8290/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8291FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8292/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8293FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8294
8295
8296/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8297FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8298{
8299 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8301 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8302 {
8303 /* Register, memory. */
8304 IEM_MC_BEGIN(0, 2);
8305 IEM_MC_LOCAL(uint64_t, uSrc);
8306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8307
8308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8310 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8311 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8312
8313 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8314 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8315
8316 IEM_MC_ADVANCE_RIP();
8317 IEM_MC_END();
8318 return VINF_SUCCESS;
8319 }
8320 /* The register, register encoding is invalid. */
8321 return IEMOP_RAISE_INVALID_OPCODE();
8322}
8323
8324/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8325FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8326{
8327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8328 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8329 {
8330 /* Register, memory. */
8331 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8332 IEM_MC_BEGIN(0, 2);
8333 IEM_MC_LOCAL(RTUINT128U, uSrc);
8334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8335
8336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8338 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8339 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8340
8341 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8342 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8343
8344 IEM_MC_ADVANCE_RIP();
8345 IEM_MC_END();
8346 return VINF_SUCCESS;
8347 }
8348
8349 /* The register, register encoding is invalid. */
8350 return IEMOP_RAISE_INVALID_OPCODE();
8351}
8352
8353/* Opcode 0xf3 0x0f 0xe7 - invalid */
8354/* Opcode 0xf2 0x0f 0xe7 - invalid */
8355
8356
8357/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8358FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8359/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8360FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8361/* Opcode 0xf3 0x0f 0xe8 - invalid */
8362/* Opcode 0xf2 0x0f 0xe8 - invalid */
8363
8364/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8365FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8366/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8367FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8368/* Opcode 0xf3 0x0f 0xe9 - invalid */
8369/* Opcode 0xf2 0x0f 0xe9 - invalid */
8370
8371/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8372FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8373/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8374FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8375/* Opcode 0xf3 0x0f 0xea - invalid */
8376/* Opcode 0xf2 0x0f 0xea - invalid */
8377
8378/** Opcode 0x0f 0xeb - por Pq, Qq */
8379FNIEMOP_STUB(iemOp_por_Pq_Qq);
8380/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8381FNIEMOP_STUB(iemOp_por_Vx_W);
8382/* Opcode 0xf3 0x0f 0xeb - invalid */
8383/* Opcode 0xf2 0x0f 0xeb - invalid */
8384
8385/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8386FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8387/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8388FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8389/* Opcode 0xf3 0x0f 0xec - invalid */
8390/* Opcode 0xf2 0x0f 0xec - invalid */
8391
8392/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8393FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8394/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8395FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8396/* Opcode 0xf3 0x0f 0xed - invalid */
8397/* Opcode 0xf2 0x0f 0xed - invalid */
8398
8399/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8400FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8401/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8402FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8403/* Opcode 0xf3 0x0f 0xee - invalid */
8404/* Opcode 0xf2 0x0f 0xee - invalid */
8405
8406
8407/** Opcode 0x0f 0xef - pxor Pq, Qq */
8408FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8409{
8410 IEMOP_MNEMONIC(pxor, "pxor");
8411 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8412}
8413
8414/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8415FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8416{
8417 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8418 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8419}
8420
8421/* Opcode 0xf3 0x0f 0xef - invalid */
8422/* Opcode 0xf2 0x0f 0xef - invalid */
8423
8424/* Opcode 0x0f 0xf0 - invalid */
8425/* Opcode 0x66 0x0f 0xf0 - invalid */
8426/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8427FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8428
8429/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8430FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8431/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8432FNIEMOP_STUB(iemOp_psllw_Vx_W);
8433/* Opcode 0xf2 0x0f 0xf1 - invalid */
8434
8435/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8436FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8437/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8438FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8439/* Opcode 0xf2 0x0f 0xf2 - invalid */
8440
8441/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8442FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8443/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8444FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8445/* Opcode 0xf2 0x0f 0xf3 - invalid */
8446
8447/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8448FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8449/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8450FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8451/* Opcode 0xf2 0x0f 0xf4 - invalid */
8452
8453/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8454FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8455/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8456FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8457/* Opcode 0xf2 0x0f 0xf5 - invalid */
8458
8459/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8460FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8461/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8462FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8463/* Opcode 0xf2 0x0f 0xf6 - invalid */
8464
8465/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8466FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8467/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8468FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8469/* Opcode 0xf2 0x0f 0xf7 - invalid */
8470
8471/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8472FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8473/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8474FNIEMOP_STUB(iemOp_psubb_Vx_W);
8475/* Opcode 0xf2 0x0f 0xf8 - invalid */
8476
8477/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8478FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8479/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8480FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8481/* Opcode 0xf2 0x0f 0xf9 - invalid */
8482
8483/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8484FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8485/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8486FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8487/* Opcode 0xf2 0x0f 0xfa - invalid */
8488
8489/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8490FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8491/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8492FNIEMOP_STUB(iemOp_psubq_Vx_W);
8493/* Opcode 0xf2 0x0f 0xfb - invalid */
8494
8495/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8496FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8497/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8498FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8499/* Opcode 0xf2 0x0f 0xfc - invalid */
8500
8501/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8502FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8503/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8504FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8505/* Opcode 0xf2 0x0f 0xfd - invalid */
8506
8507/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8508FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8509/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8510FNIEMOP_STUB(iemOp_paddd_Vx_W);
8511/* Opcode 0xf2 0x0f 0xfe - invalid */
8512
8513
8514/** Opcode **** 0x0f 0xff - UD0 */
8515FNIEMOP_DEF(iemOp_ud0)
8516{
8517 IEMOP_MNEMONIC(ud0, "ud0");
8518 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8519 {
8520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8521#ifndef TST_IEM_CHECK_MC
8522 RTGCPTR GCPtrEff;
8523 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8524 if (rcStrict != VINF_SUCCESS)
8525 return rcStrict;
8526#endif
8527 IEMOP_HLP_DONE_DECODING();
8528 }
8529 return IEMOP_RAISE_INVALID_OPCODE();
8530}
8531
8532
8533
8534/**
8535 * Two byte opcode map, first byte 0x0f.
8536 *
8537 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8538 * check if it needs updating as well when making changes.
8539 */
8540IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8541{
8542 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8543 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8544 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8545 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8546 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8547 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8548 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8549 /* 0x06 */ IEMOP_X4(iemOp_clts),
8550 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8551 /* 0x08 */ IEMOP_X4(iemOp_invd),
8552 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8553 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8554 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8555 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8556 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8557 /* 0x0e */ IEMOP_X4(iemOp_femms),
8558 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8559
8560 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
8561 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8562 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8563 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8564 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8565 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8566 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8567 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8568 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8569 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8570 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8571 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8572 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8573 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8574 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8575 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8576
8577 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8578 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8579 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8580 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8581 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8582 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8583 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8584 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8585 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8586 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8587 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8588 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8589 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8590 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8591 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8592 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8593
8594 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8595 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8596 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8597 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8598 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8599 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8600 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8601 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8602 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8603 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8604 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8605 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8606 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8607 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8608 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8609 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8610
8611 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8612 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8613 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8614 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8615 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8616 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8617 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8618 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8619 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8620 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8621 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8622 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8623 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8624 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8625 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8626 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8627
8628 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8629 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8630 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8631 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8632 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8633 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8634 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8635 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8636 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8637 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8638 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8639 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8640 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8641 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8642 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8643 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8644
8645 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8646 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8647 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8648 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8649 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8650 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8651 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8652 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8653 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8654 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8655 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8656 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8657 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8658 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8659 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8660 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8661
8662 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8663 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8664 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8665 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8666 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8667 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8668 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8669 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8670
8671 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8672 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8673 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8674 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8675 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8676 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8677 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8678 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8679
8680 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8681 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8682 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8683 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8684 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8685 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8686 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8687 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8688 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8689 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8690 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8691 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8692 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8693 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8694 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8695 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8696
8697 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8698 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8699 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8700 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8701 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8702 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8703 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8704 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8705 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8706 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8707 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8708 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8709 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8710 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8711 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8712 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8713
8714 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8715 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8716 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8717 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8718 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8719 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8720 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8721 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8722 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8723 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8724 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8725 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8726 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8727 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8728 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8729 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8730
8731 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8732 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8733 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8734 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8735 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8736 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8737 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8738 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8739 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8740 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8741 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8742 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8743 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8744 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8745 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8746 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8747
8748 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8749 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8750 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8751 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8752 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8753 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8754 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8755 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8756 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8757 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8758 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8759 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8760 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8761 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8762 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8763 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8764
8765 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8766 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8767 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8768 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8769 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8770 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8771 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8772 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8773 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8774 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8775 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8776 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8777 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8778 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8779 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8780 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8781
8782 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8783 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8784 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8785 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8786 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8787 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8788 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8789 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8790 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8791 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8792 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8793 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8794 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8795 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8796 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8797 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8798
8799 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8800 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8801 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8802 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8803 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8804 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8805 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8806 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8807 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8808 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8809 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8810 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8811 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8812 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8813 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8814 /* 0xff */ IEMOP_X4(iemOp_ud0),
8815};
8816AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8817
8818/** @} */
8819
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette