VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66974

最後變更 在這個檔案從66974是 66974,由 vboxsync 提交於 8 年 前

IEM: Added docs and tests to movaps and movapd.

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 323.2 KB
 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66974 2017-05-19 12:02:17Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.alldomusa.eu.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 */
1933FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1934{
1935 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1938 {
1939 /*
1940 * Register, register.
1941 */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_BEGIN(2, 0);
1944 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1945 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1946
1947 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1948 IEM_MC_PREPARE_SSE_USAGE();
1949
1950 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1952 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1953
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 }
1957 else
1958 {
1959 /*
1960 * Register, memory.
1961 */
1962 IEM_MC_BEGIN(2, 2);
1963 IEM_MC_LOCAL(RTUINT128U, uSrc);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1965 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1971 IEM_MC_PREPARE_SSE_USAGE();
1972
1973 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1974 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1975 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1976
1977 IEM_MC_ADVANCE_RIP();
1978 IEM_MC_END();
1979 }
1980 return VINF_SUCCESS;
1981}
1982
1983/**
1984 * @opdone
1985 * @opmnemonic udf30f16
1986 * @opcode 0x16
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994
1995/**
1996 * @opcode 0x17
1997 * @opcodesub !11 mr/reg
1998 * @oppfx none
1999 * @opcpuid sse
2000 * @opgroup og_sse_simdfp_datamove
2001 * @opxcpttype 5
2002 * @optest op1=1 op2=2 -> op1=2
2003 * @optest op1=0 op2=-42 -> op1=-42
2004 */
2005FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2006{
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2009 {
2010 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2011
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(uint64_t, uSrc);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 return VINF_SUCCESS;
2027 }
2028
2029 /**
2030 * @opdone
2031 * @opmnemonic ud0f17m3
2032 * @opcode 0x17
2033 * @opcodesub 11 mr/reg
2034 * @oppfx none
2035 * @opunused immediate
2036 * @opcpuid sse
2037 * @optest ->
2038 */
2039 return IEMOP_RAISE_INVALID_OPCODE();
2040}
2041
2042
2043/**
2044 * @opcode 0x17
2045 * @opcodesub !11 mr/reg
2046 * @oppfx 0x66
2047 * @opcpuid sse2
2048 * @opgroup og_sse2_pcksclr_datamove
2049 * @opxcpttype 5
2050 * @optest op1=1 op2=2 -> op1=2
2051 * @optest op1=0 op2=-42 -> op1=-42
2052 */
2053FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2054{
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2059
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(uint64_t, uSrc);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2068
2069 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075 }
2076
2077 /**
2078 * @opdone
2079 * @opmnemonic ud660f17m3
2080 * @opcode 0x17
2081 * @opcodesub 11 mr/reg
2082 * @oppfx 0x66
2083 * @opunused immediate
2084 * @opcpuid sse
2085 * @optest ->
2086 */
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/**
2092 * @opdone
2093 * @opmnemonic udf30f17
2094 * @opcode 0x17
2095 * @oppfx 0xf3
2096 * @opunused intel-modrm
2097 * @opcpuid sse
2098 * @optest ->
2099 * @opdone
2100 */
2101
2102/**
2103 * @opmnemonic udf20f17
2104 * @opcode 0x17
2105 * @oppfx 0xf2
2106 * @opunused intel-modrm
2107 * @opcpuid sse
2108 * @optest ->
2109 * @opdone
2110 */
2111
2112
2113/** Opcode 0x0f 0x18. */
2114FNIEMOP_DEF(iemOp_prefetch_Grp16)
2115{
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2118 {
2119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2120 {
2121 case 4: /* Aliased to /0 for the time being according to AMD. */
2122 case 5: /* Aliased to /0 for the time being according to AMD. */
2123 case 6: /* Aliased to /0 for the time being according to AMD. */
2124 case 7: /* Aliased to /0 for the time being according to AMD. */
2125 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2126 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2127 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2128 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2130 }
2131
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2136 /* Currently a NOP. */
2137 NOREF(GCPtrEffSrc);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 return VINF_SUCCESS;
2141 }
2142
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/** Opcode 0x0f 0x19..0x1f. */
2148FNIEMOP_DEF(iemOp_nop_Ev)
2149{
2150 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2153 {
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_ADVANCE_RIP();
2157 IEM_MC_END();
2158 }
2159 else
2160 {
2161 IEM_MC_BEGIN(0, 1);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 /* Currently a NOP. */
2166 NOREF(GCPtrEffSrc);
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x20. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2176{
2177 /* mod is ignored, as is operand size overrides. */
2178 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2179 IEMOP_HLP_MIN_386();
2180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2181 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2182 else
2183 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2184
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2188 {
2189 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2190 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2191 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2192 iCrReg |= 8;
2193 }
2194 switch (iCrReg)
2195 {
2196 case 0: case 2: case 3: case 4: case 8:
2197 break;
2198 default:
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200 }
2201 IEMOP_HLP_DONE_DECODING();
2202
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2204}
2205
2206
2207/** Opcode 0x0f 0x21. */
2208FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2209{
2210 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2211 IEMOP_HLP_MIN_386();
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2217 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2218 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2219}
2220
2221
2222/** Opcode 0x0f 0x22. */
2223FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2224{
2225 /* mod is ignored, as is operand size overrides. */
2226 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2227 IEMOP_HLP_MIN_386();
2228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2230 else
2231 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2232
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2236 {
2237 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2238 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2239 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2240 iCrReg |= 8;
2241 }
2242 switch (iCrReg)
2243 {
2244 case 0: case 2: case 3: case 4: case 8:
2245 break;
2246 default:
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 }
2249 IEMOP_HLP_DONE_DECODING();
2250
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2252}
2253
2254
2255/** Opcode 0x0f 0x23. */
2256FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2257{
2258 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2259 IEMOP_HLP_MIN_386();
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2265 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2266 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2267}
2268
2269
2270/** Opcode 0x0f 0x24. */
2271FNIEMOP_DEF(iemOp_mov_Rd_Td)
2272{
2273 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2274 /** @todo works on 386 and 486. */
2275 /* The RM byte is not considered, see testcase. */
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277}
2278
2279
2280/** Opcode 0x0f 0x26. */
2281FNIEMOP_DEF(iemOp_mov_Td_Rd)
2282{
2283 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2284 /** @todo works on 386 and 486. */
2285 /* The RM byte is not considered, see testcase. */
2286 return IEMOP_RAISE_INVALID_OPCODE();
2287}
2288
2289
2290/**
2291 * @opcode 0x28
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 1
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 * @oponly
2299 */
2300FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2301{
2302 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2305 {
2306 /*
2307 * Register, register.
2308 */
2309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2310 IEM_MC_BEGIN(0, 0);
2311 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2312 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2313 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2314 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2315 IEM_MC_ADVANCE_RIP();
2316 IEM_MC_END();
2317 }
2318 else
2319 {
2320 /*
2321 * Register, memory.
2322 */
2323 IEM_MC_BEGIN(0, 2);
2324 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2326
2327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2329 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2330 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2331
2332 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2333 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2334
2335 IEM_MC_ADVANCE_RIP();
2336 IEM_MC_END();
2337 }
2338 return VINF_SUCCESS;
2339}
2340
2341/**
2342 * @opcode 0x28
2343 * @oppfx 66
2344 * @opcpuid sse2
2345 * @opgroup og_sse2_pcksclr_datamove
2346 * @opxcpttype 1
2347 * @optest op1=1 op2=2 -> op1=2
2348 * @optest op1=0 op2=-42 -> op1=-42
2349 * @oponly
2350 */
2351FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2352{
2353 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2356 {
2357 /*
2358 * Register, register.
2359 */
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_BEGIN(0, 0);
2362 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2363 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2364 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2365 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2366 IEM_MC_ADVANCE_RIP();
2367 IEM_MC_END();
2368 }
2369 else
2370 {
2371 /*
2372 * Register, memory.
2373 */
2374 IEM_MC_BEGIN(0, 2);
2375 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2377
2378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2380 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2381 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2382
2383 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2384 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2385
2386 IEM_MC_ADVANCE_RIP();
2387 IEM_MC_END();
2388 }
2389 return VINF_SUCCESS;
2390}
2391
2392/* Opcode 0xf3 0x0f 0x28 - invalid */
2393/* Opcode 0xf2 0x0f 0x28 - invalid */
2394
2395/**
2396 * @opcode 0x29
2397 * @oppfx none
2398 * @opcpuid sse
2399 * @opgroup og_sse_simdfp_datamove
2400 * @opxcpttype 1
2401 * @optest op1=1 op2=2 -> op1=2
2402 * @optest op1=0 op2=-42 -> op1=-42
2403 * @oponly
2404 */
2405FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2406{
2407 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2410 {
2411 /*
2412 * Register, register.
2413 */
2414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2415 IEM_MC_BEGIN(0, 0);
2416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2417 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2418 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2419 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2420 IEM_MC_ADVANCE_RIP();
2421 IEM_MC_END();
2422 }
2423 else
2424 {
2425 /*
2426 * Memory, register.
2427 */
2428 IEM_MC_BEGIN(0, 2);
2429 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2431
2432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2436
2437 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2438 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2439
2440 IEM_MC_ADVANCE_RIP();
2441 IEM_MC_END();
2442 }
2443 return VINF_SUCCESS;
2444}
2445
2446/**
2447 * @opcode 0x29
2448 * @oppfx 66
2449 * @opcpuid sse2
2450 * @opgroup og_sse2_pcksclr_datamove
2451 * @opxcpttype 1
2452 * @optest op1=1 op2=2 -> op1=2
2453 * @optest op1=0 op2=-42 -> op1=-42
2454 * @oponly
2455 */
2456FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2457{
2458 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2461 {
2462 /*
2463 * Register, register.
2464 */
2465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2466 IEM_MC_BEGIN(0, 0);
2467 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2468 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2469 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2470 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2471 IEM_MC_ADVANCE_RIP();
2472 IEM_MC_END();
2473 }
2474 else
2475 {
2476 /*
2477 * Memory, register.
2478 */
2479 IEM_MC_BEGIN(0, 2);
2480 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2482
2483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2487
2488 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2489 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2490
2491 IEM_MC_ADVANCE_RIP();
2492 IEM_MC_END();
2493 }
2494 return VINF_SUCCESS;
2495}
2496
2497/* Opcode 0xf3 0x0f 0x29 - invalid */
2498/* Opcode 0xf2 0x0f 0x29 - invalid */
2499
2500
2501/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2502FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2503/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2504FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2505/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2506FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2507/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2508FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2509
2510
2511/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2512FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2513{
2514 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2516 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2517 {
2518 /*
2519 * memory, register.
2520 */
2521 IEM_MC_BEGIN(0, 2);
2522 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2524
2525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2527 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2529
2530 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2531 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2532
2533 IEM_MC_ADVANCE_RIP();
2534 IEM_MC_END();
2535 }
2536 /* The register, register encoding is invalid. */
2537 else
2538 return IEMOP_RAISE_INVALID_OPCODE();
2539 return VINF_SUCCESS;
2540}
2541
2542/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2543FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2544{
2545 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2547 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2548 {
2549 /*
2550 * memory, register.
2551 */
2552 IEM_MC_BEGIN(0, 2);
2553 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2555
2556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2558 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2559 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2560
2561 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2562 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2563
2564 IEM_MC_ADVANCE_RIP();
2565 IEM_MC_END();
2566 }
2567 /* The register, register encoding is invalid. */
2568 else
2569 return IEMOP_RAISE_INVALID_OPCODE();
2570 return VINF_SUCCESS;
2571}
2572/* Opcode 0xf3 0x0f 0x2b - invalid */
2573/* Opcode 0xf2 0x0f 0x2b - invalid */
2574
2575
2576/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2577FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2578/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2579FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2580/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2581FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2582/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2583FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2584
2585/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2586FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2587/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2588FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2589/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2590FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2591/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2592FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2593
2594/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2595FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2596/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2597FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2598/* Opcode 0xf3 0x0f 0x2e - invalid */
2599/* Opcode 0xf2 0x0f 0x2e - invalid */
2600
2601/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2602FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2603/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2604FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2605/* Opcode 0xf3 0x0f 0x2f - invalid */
2606/* Opcode 0xf2 0x0f 0x2f - invalid */
2607
2608/** Opcode 0x0f 0x30. */
2609FNIEMOP_DEF(iemOp_wrmsr)
2610{
2611 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2614}
2615
2616
2617/** Opcode 0x0f 0x31. */
2618FNIEMOP_DEF(iemOp_rdtsc)
2619{
2620 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2622 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2623}
2624
2625
2626/** Opcode 0x0f 0x33. */
2627FNIEMOP_DEF(iemOp_rdmsr)
2628{
2629 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2631 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2632}
2633
2634
2635/** Opcode 0x0f 0x34. */
2636FNIEMOP_DEF(iemOp_rdpmc)
2637{
2638 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2640 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2641}
2642
2643
2644/** Opcode 0x0f 0x34. */
2645FNIEMOP_STUB(iemOp_sysenter);
2646/** Opcode 0x0f 0x35. */
2647FNIEMOP_STUB(iemOp_sysexit);
2648/** Opcode 0x0f 0x37. */
2649FNIEMOP_STUB(iemOp_getsec);
2650
2651
2652/** Opcode 0x0f 0x38. */
2653FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2654{
2655#ifdef IEM_WITH_THREE_0F_38
2656 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2657 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2658#else
2659 IEMOP_BITCH_ABOUT_STUB();
2660 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2661#endif
2662}
2663
2664
2665/** Opcode 0x0f 0x3a. */
2666FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2667{
2668#ifdef IEM_WITH_THREE_0F_3A
2669 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2670 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2671#else
2672 IEMOP_BITCH_ABOUT_STUB();
2673 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2674#endif
2675}
2676
2677
2678/**
2679 * Implements a conditional move.
2680 *
2681 * Wish there was an obvious way to do this where we could share and reduce
2682 * code bloat.
2683 *
2684 * @param a_Cnd The conditional "microcode" operation.
2685 */
2686#define CMOV_X(a_Cnd) \
2687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2689 { \
2690 switch (pVCpu->iem.s.enmEffOpSize) \
2691 { \
2692 case IEMMODE_16BIT: \
2693 IEM_MC_BEGIN(0, 1); \
2694 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2695 a_Cnd { \
2696 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2697 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2698 } IEM_MC_ENDIF(); \
2699 IEM_MC_ADVANCE_RIP(); \
2700 IEM_MC_END(); \
2701 return VINF_SUCCESS; \
2702 \
2703 case IEMMODE_32BIT: \
2704 IEM_MC_BEGIN(0, 1); \
2705 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2706 a_Cnd { \
2707 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2708 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2709 } IEM_MC_ELSE() { \
2710 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2711 } IEM_MC_ENDIF(); \
2712 IEM_MC_ADVANCE_RIP(); \
2713 IEM_MC_END(); \
2714 return VINF_SUCCESS; \
2715 \
2716 case IEMMODE_64BIT: \
2717 IEM_MC_BEGIN(0, 1); \
2718 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2719 a_Cnd { \
2720 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2721 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2722 } IEM_MC_ENDIF(); \
2723 IEM_MC_ADVANCE_RIP(); \
2724 IEM_MC_END(); \
2725 return VINF_SUCCESS; \
2726 \
2727 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2728 } \
2729 } \
2730 else \
2731 { \
2732 switch (pVCpu->iem.s.enmEffOpSize) \
2733 { \
2734 case IEMMODE_16BIT: \
2735 IEM_MC_BEGIN(0, 2); \
2736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2737 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2739 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2740 a_Cnd { \
2741 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2742 } IEM_MC_ENDIF(); \
2743 IEM_MC_ADVANCE_RIP(); \
2744 IEM_MC_END(); \
2745 return VINF_SUCCESS; \
2746 \
2747 case IEMMODE_32BIT: \
2748 IEM_MC_BEGIN(0, 2); \
2749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2750 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2752 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2753 a_Cnd { \
2754 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2755 } IEM_MC_ELSE() { \
2756 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2757 } IEM_MC_ENDIF(); \
2758 IEM_MC_ADVANCE_RIP(); \
2759 IEM_MC_END(); \
2760 return VINF_SUCCESS; \
2761 \
2762 case IEMMODE_64BIT: \
2763 IEM_MC_BEGIN(0, 2); \
2764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2765 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2767 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2768 a_Cnd { \
2769 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2770 } IEM_MC_ENDIF(); \
2771 IEM_MC_ADVANCE_RIP(); \
2772 IEM_MC_END(); \
2773 return VINF_SUCCESS; \
2774 \
2775 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2776 } \
2777 } do {} while (0)
2778
2779
2780
2781/** Opcode 0x0f 0x40. */
2782FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2783{
2784 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2785 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2786}
2787
2788
2789/** Opcode 0x0f 0x41. */
2790FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2791{
2792 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2793 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2794}
2795
2796
2797/** Opcode 0x0f 0x42. */
2798FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2799{
2800 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2801 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2802}
2803
2804
2805/** Opcode 0x0f 0x43. */
2806FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2807{
2808 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2809 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2810}
2811
2812
2813/** Opcode 0x0f 0x44. */
2814FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2815{
2816 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2817 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2818}
2819
2820
2821/** Opcode 0x0f 0x45. */
2822FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2823{
2824 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2825 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2826}
2827
2828
2829/** Opcode 0x0f 0x46. */
2830FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2831{
2832 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2833 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2834}
2835
2836
2837/** Opcode 0x0f 0x47. */
2838FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2839{
2840 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2841 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2842}
2843
2844
2845/** Opcode 0x0f 0x48. */
2846FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2847{
2848 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2849 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2850}
2851
2852
2853/** Opcode 0x0f 0x49. */
2854FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2855{
2856 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2857 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2858}
2859
2860
2861/** Opcode 0x0f 0x4a. */
2862FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2863{
2864 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2865 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2866}
2867
2868
2869/** Opcode 0x0f 0x4b. */
2870FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2871{
2872 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2873 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2874}
2875
2876
2877/** Opcode 0x0f 0x4c. */
2878FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2879{
2880 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2881 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2882}
2883
2884
2885/** Opcode 0x0f 0x4d. */
2886FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2887{
2888 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2889 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2890}
2891
2892
2893/** Opcode 0x0f 0x4e. */
2894FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2895{
2896 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2897 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2898}
2899
2900
2901/** Opcode 0x0f 0x4f. */
2902FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2903{
2904 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2905 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2906}
2907
2908#undef CMOV_X
2909
2910/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2911FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2912/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2913FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2914/* Opcode 0xf3 0x0f 0x50 - invalid */
2915/* Opcode 0xf2 0x0f 0x50 - invalid */
2916
2917/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2918FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2919/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2920FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2921/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2922FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2923/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2924FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2925
2926/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2927FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2928/* Opcode 0x66 0x0f 0x52 - invalid */
2929/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2930FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2931/* Opcode 0xf2 0x0f 0x52 - invalid */
2932
2933/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2934FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2935/* Opcode 0x66 0x0f 0x53 - invalid */
2936/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2937FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2938/* Opcode 0xf2 0x0f 0x53 - invalid */
2939
2940/** Opcode 0x0f 0x54 - andps Vps, Wps */
2941FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2942/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2943FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2944/* Opcode 0xf3 0x0f 0x54 - invalid */
2945/* Opcode 0xf2 0x0f 0x54 - invalid */
2946
2947/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2948FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2949/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2950FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2951/* Opcode 0xf3 0x0f 0x55 - invalid */
2952/* Opcode 0xf2 0x0f 0x55 - invalid */
2953
2954/** Opcode 0x0f 0x56 - orps Vps, Wps */
2955FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2958/* Opcode 0xf3 0x0f 0x56 - invalid */
2959/* Opcode 0xf2 0x0f 0x56 - invalid */
2960
2961/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2962FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2963/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2964FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2965/* Opcode 0xf3 0x0f 0x57 - invalid */
2966/* Opcode 0xf2 0x0f 0x57 - invalid */
2967
2968/** Opcode 0x0f 0x58 - addps Vps, Wps */
2969FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2970/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2971FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2972/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2973FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2974/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2975FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2976
2977/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2978FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2979/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2980FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2981/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2982FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2983/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2984FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2985
2986/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2987FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2988/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2989FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2990/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2991FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2992/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2993FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2994
2995/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2996FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2997/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2998FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2999/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3000FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3001/* Opcode 0xf2 0x0f 0x5b - invalid */
3002
3003/** Opcode 0x0f 0x5c - subps Vps, Wps */
3004FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3005/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3006FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3007/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3008FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3009/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3010FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3011
3012/** Opcode 0x0f 0x5d - minps Vps, Wps */
3013FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3014/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3015FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3016/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3017FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3018/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3019FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3020
3021/** Opcode 0x0f 0x5e - divps Vps, Wps */
3022FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3023/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3024FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3025/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3026FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3027/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3028FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3029
3030/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3031FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3032/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3033FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3034/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3035FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3036/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3037FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3038
3039/**
3040 * Common worker for MMX instructions on the forms:
3041 * pxxxx mm1, mm2/mem32
3042 *
3043 * The 2nd operand is the first half of a register, which in the memory case
3044 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3045 * memory accessed for MMX.
3046 *
3047 * Exceptions type 4.
3048 */
3049FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3050{
3051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3053 {
3054 /*
3055 * Register, register.
3056 */
3057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3058 IEM_MC_BEGIN(2, 0);
3059 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3060 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3061 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3062 IEM_MC_PREPARE_SSE_USAGE();
3063 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3064 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3065 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3066 IEM_MC_ADVANCE_RIP();
3067 IEM_MC_END();
3068 }
3069 else
3070 {
3071 /*
3072 * Register, memory.
3073 */
3074 IEM_MC_BEGIN(2, 2);
3075 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3076 IEM_MC_LOCAL(uint64_t, uSrc);
3077 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3079
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3083 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3084
3085 IEM_MC_PREPARE_SSE_USAGE();
3086 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3087 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3088
3089 IEM_MC_ADVANCE_RIP();
3090 IEM_MC_END();
3091 }
3092 return VINF_SUCCESS;
3093}
3094
3095
3096/**
3097 * Common worker for SSE2 instructions on the forms:
3098 * pxxxx xmm1, xmm2/mem128
3099 *
3100 * The 2nd operand is the first half of a register, which in the memory case
3101 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3102 * memory accessed for MMX.
3103 *
3104 * Exceptions type 4.
3105 */
3106FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3107{
3108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3109 if (!pImpl->pfnU64)
3110 return IEMOP_RAISE_INVALID_OPCODE();
3111 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3112 {
3113 /*
3114 * Register, register.
3115 */
3116 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3117 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3119 IEM_MC_BEGIN(2, 0);
3120 IEM_MC_ARG(uint64_t *, pDst, 0);
3121 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3122 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3123 IEM_MC_PREPARE_FPU_USAGE();
3124 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3125 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3126 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3127 IEM_MC_ADVANCE_RIP();
3128 IEM_MC_END();
3129 }
3130 else
3131 {
3132 /*
3133 * Register, memory.
3134 */
3135 IEM_MC_BEGIN(2, 2);
3136 IEM_MC_ARG(uint64_t *, pDst, 0);
3137 IEM_MC_LOCAL(uint32_t, uSrc);
3138 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3140
3141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3144 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3145
3146 IEM_MC_PREPARE_FPU_USAGE();
3147 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3148 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3149
3150 IEM_MC_ADVANCE_RIP();
3151 IEM_MC_END();
3152 }
3153 return VINF_SUCCESS;
3154}
3155
3156
3157/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3158FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3159{
3160 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3161 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3162}
3163
3164/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3165FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3166{
3167 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3168 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3169}
3170
3171/* Opcode 0xf3 0x0f 0x60 - invalid */
3172
3173
3174/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3175FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3176{
3177 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3178 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3179}
3180
3181/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3182FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3183{
3184 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3185 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3186}
3187
3188/* Opcode 0xf3 0x0f 0x61 - invalid */
3189
3190
3191/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3192FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3193{
3194 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3195 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3196}
3197
3198/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3199FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3200{
3201 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3202 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3203}
3204
3205/* Opcode 0xf3 0x0f 0x62 - invalid */
3206
3207
3208
3209/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3210FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3211/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3212FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3213/* Opcode 0xf3 0x0f 0x63 - invalid */
3214
3215/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3216FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3217/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3218FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3219/* Opcode 0xf3 0x0f 0x64 - invalid */
3220
3221/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3222FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3223/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3224FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3225/* Opcode 0xf3 0x0f 0x65 - invalid */
3226
3227/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3228FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3229/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3230FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3231/* Opcode 0xf3 0x0f 0x66 - invalid */
3232
3233/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3234FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3235/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3236FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3237/* Opcode 0xf3 0x0f 0x67 - invalid */
3238
3239
3240/**
3241 * Common worker for MMX instructions on the form:
3242 * pxxxx mm1, mm2/mem64
3243 *
3244 * The 2nd operand is the second half of a register, which in the memory case
3245 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3246 * where it may read the full 128 bits or only the upper 64 bits.
3247 *
3248 * Exceptions type 4.
3249 */
3250FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3251{
3252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3253 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3255 {
3256 /*
3257 * Register, register.
3258 */
3259 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3260 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3262 IEM_MC_BEGIN(2, 0);
3263 IEM_MC_ARG(uint64_t *, pDst, 0);
3264 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3265 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3266 IEM_MC_PREPARE_FPU_USAGE();
3267 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3268 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3269 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3270 IEM_MC_ADVANCE_RIP();
3271 IEM_MC_END();
3272 }
3273 else
3274 {
3275 /*
3276 * Register, memory.
3277 */
3278 IEM_MC_BEGIN(2, 2);
3279 IEM_MC_ARG(uint64_t *, pDst, 0);
3280 IEM_MC_LOCAL(uint64_t, uSrc);
3281 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3283
3284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3286 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3287 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3288
3289 IEM_MC_PREPARE_FPU_USAGE();
3290 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3291 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3292
3293 IEM_MC_ADVANCE_RIP();
3294 IEM_MC_END();
3295 }
3296 return VINF_SUCCESS;
3297}
3298
3299
3300/**
3301 * Common worker for SSE2 instructions on the form:
3302 * pxxxx xmm1, xmm2/mem128
3303 *
3304 * The 2nd operand is the second half of a register, which in the memory case
3305 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3306 * where it may read the full 128 bits or only the upper 64 bits.
3307 *
3308 * Exceptions type 4.
3309 */
3310FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3311{
3312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3314 {
3315 /*
3316 * Register, register.
3317 */
3318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3319 IEM_MC_BEGIN(2, 0);
3320 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3321 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3322 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3323 IEM_MC_PREPARE_SSE_USAGE();
3324 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3325 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3326 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3327 IEM_MC_ADVANCE_RIP();
3328 IEM_MC_END();
3329 }
3330 else
3331 {
3332 /*
3333 * Register, memory.
3334 */
3335 IEM_MC_BEGIN(2, 2);
3336 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3337 IEM_MC_LOCAL(RTUINT128U, uSrc);
3338 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3340
3341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3344 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3345
3346 IEM_MC_PREPARE_SSE_USAGE();
3347 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3348 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3349
3350 IEM_MC_ADVANCE_RIP();
3351 IEM_MC_END();
3352 }
3353 return VINF_SUCCESS;
3354}
3355
3356
3357/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3358FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3359{
3360 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3361 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3362}
3363
3364/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3365FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3366{
3367 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3368 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3369}
3370/* Opcode 0xf3 0x0f 0x68 - invalid */
3371
3372
3373/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3374FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3375{
3376 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3377 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3378}
3379
3380/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3381FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3382{
3383 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3384 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3385
3386}
3387/* Opcode 0xf3 0x0f 0x69 - invalid */
3388
3389
3390/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3391FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3392{
3393 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3394 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3395}
3396
3397/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3398FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3399{
3400 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3401 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3402}
3403/* Opcode 0xf3 0x0f 0x6a - invalid */
3404
3405
3406/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3407FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3408/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3409FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3410/* Opcode 0xf3 0x0f 0x6b - invalid */
3411
3412
3413/* Opcode 0x0f 0x6c - invalid */
3414
3415/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3416FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3417{
3418 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3419 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3420}
3421
3422/* Opcode 0xf3 0x0f 0x6c - invalid */
3423/* Opcode 0xf2 0x0f 0x6c - invalid */
3424
3425
3426/* Opcode 0x0f 0x6d - invalid */
3427
3428/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3429FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3430{
3431 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3432 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3433}
3434
3435/* Opcode 0xf3 0x0f 0x6d - invalid */
3436
3437
3438/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3439FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3440{
3441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3442 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3443 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3444 else
3445 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3446 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3447 {
3448 /* MMX, greg */
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3450 IEM_MC_BEGIN(0, 1);
3451 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3453 IEM_MC_LOCAL(uint64_t, u64Tmp);
3454 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3455 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3456 else
3457 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3458 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3459 IEM_MC_ADVANCE_RIP();
3460 IEM_MC_END();
3461 }
3462 else
3463 {
3464 /* MMX, [mem] */
3465 IEM_MC_BEGIN(0, 2);
3466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3467 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3470 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3471 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3472 {
3473 IEM_MC_LOCAL(uint64_t, u64Tmp);
3474 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3475 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3476 }
3477 else
3478 {
3479 IEM_MC_LOCAL(uint32_t, u32Tmp);
3480 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3481 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3482 }
3483 IEM_MC_ADVANCE_RIP();
3484 IEM_MC_END();
3485 }
3486 return VINF_SUCCESS;
3487}
3488
3489/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3490FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3491{
3492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3493 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3494 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3495 else
3496 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3498 {
3499 /* XMM, greg*/
3500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3501 IEM_MC_BEGIN(0, 1);
3502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3503 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3504 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3505 {
3506 IEM_MC_LOCAL(uint64_t, u64Tmp);
3507 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3508 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3509 }
3510 else
3511 {
3512 IEM_MC_LOCAL(uint32_t, u32Tmp);
3513 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3514 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3515 }
3516 IEM_MC_ADVANCE_RIP();
3517 IEM_MC_END();
3518 }
3519 else
3520 {
3521 /* XMM, [mem] */
3522 IEM_MC_BEGIN(0, 2);
3523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3524 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3528 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3529 {
3530 IEM_MC_LOCAL(uint64_t, u64Tmp);
3531 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3532 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3533 }
3534 else
3535 {
3536 IEM_MC_LOCAL(uint32_t, u32Tmp);
3537 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3538 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3539 }
3540 IEM_MC_ADVANCE_RIP();
3541 IEM_MC_END();
3542 }
3543 return VINF_SUCCESS;
3544}
3545
3546/* Opcode 0xf3 0x0f 0x6e - invalid */
3547
3548
3549/** Opcode 0x0f 0x6f - movq Pq, Qq */
3550FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3551{
3552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3553 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3555 {
3556 /*
3557 * Register, register.
3558 */
3559 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3560 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3562 IEM_MC_BEGIN(0, 1);
3563 IEM_MC_LOCAL(uint64_t, u64Tmp);
3564 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3565 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3566 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3567 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3568 IEM_MC_ADVANCE_RIP();
3569 IEM_MC_END();
3570 }
3571 else
3572 {
3573 /*
3574 * Register, memory.
3575 */
3576 IEM_MC_BEGIN(0, 2);
3577 IEM_MC_LOCAL(uint64_t, u64Tmp);
3578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3579
3580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3582 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3583 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3584 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3585 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3586
3587 IEM_MC_ADVANCE_RIP();
3588 IEM_MC_END();
3589 }
3590 return VINF_SUCCESS;
3591}
3592
3593/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3594FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3595{
3596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3597 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3599 {
3600 /*
3601 * Register, register.
3602 */
3603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3604 IEM_MC_BEGIN(0, 0);
3605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3607 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3608 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3609 IEM_MC_ADVANCE_RIP();
3610 IEM_MC_END();
3611 }
3612 else
3613 {
3614 /*
3615 * Register, memory.
3616 */
3617 IEM_MC_BEGIN(0, 2);
3618 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3620
3621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3623 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3624 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3626 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3627
3628 IEM_MC_ADVANCE_RIP();
3629 IEM_MC_END();
3630 }
3631 return VINF_SUCCESS;
3632}
3633
3634/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3635FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3636{
3637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3638 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3640 {
3641 /*
3642 * Register, register.
3643 */
3644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3645 IEM_MC_BEGIN(0, 0);
3646 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3647 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3648 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3649 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3650 IEM_MC_ADVANCE_RIP();
3651 IEM_MC_END();
3652 }
3653 else
3654 {
3655 /*
3656 * Register, memory.
3657 */
3658 IEM_MC_BEGIN(0, 2);
3659 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3661
3662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3664 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3665 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3666 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3667 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3668
3669 IEM_MC_ADVANCE_RIP();
3670 IEM_MC_END();
3671 }
3672 return VINF_SUCCESS;
3673}
3674
3675
3676/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3677FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3678{
3679 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3682 {
3683 /*
3684 * Register, register.
3685 */
3686 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3688
3689 IEM_MC_BEGIN(3, 0);
3690 IEM_MC_ARG(uint64_t *, pDst, 0);
3691 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3692 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3693 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3694 IEM_MC_PREPARE_FPU_USAGE();
3695 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3696 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3697 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3698 IEM_MC_ADVANCE_RIP();
3699 IEM_MC_END();
3700 }
3701 else
3702 {
3703 /*
3704 * Register, memory.
3705 */
3706 IEM_MC_BEGIN(3, 2);
3707 IEM_MC_ARG(uint64_t *, pDst, 0);
3708 IEM_MC_LOCAL(uint64_t, uSrc);
3709 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3711
3712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3713 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3714 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3716 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3717
3718 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3719 IEM_MC_PREPARE_FPU_USAGE();
3720 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3721 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3722
3723 IEM_MC_ADVANCE_RIP();
3724 IEM_MC_END();
3725 }
3726 return VINF_SUCCESS;
3727}
3728
3729/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3730FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3731{
3732 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3734 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3735 {
3736 /*
3737 * Register, register.
3738 */
3739 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741
3742 IEM_MC_BEGIN(3, 0);
3743 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3744 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3745 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3746 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3747 IEM_MC_PREPARE_SSE_USAGE();
3748 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3749 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3750 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 }
3754 else
3755 {
3756 /*
3757 * Register, memory.
3758 */
3759 IEM_MC_BEGIN(3, 2);
3760 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3761 IEM_MC_LOCAL(RTUINT128U, uSrc);
3762 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3764
3765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3766 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3767 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3769 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3770
3771 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3772 IEM_MC_PREPARE_SSE_USAGE();
3773 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3774 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3775
3776 IEM_MC_ADVANCE_RIP();
3777 IEM_MC_END();
3778 }
3779 return VINF_SUCCESS;
3780}
3781
3782/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3783FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3784{
3785 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3788 {
3789 /*
3790 * Register, register.
3791 */
3792 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794
3795 IEM_MC_BEGIN(3, 0);
3796 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3797 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3798 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3799 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3800 IEM_MC_PREPARE_SSE_USAGE();
3801 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3802 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3803 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3804 IEM_MC_ADVANCE_RIP();
3805 IEM_MC_END();
3806 }
3807 else
3808 {
3809 /*
3810 * Register, memory.
3811 */
3812 IEM_MC_BEGIN(3, 2);
3813 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3814 IEM_MC_LOCAL(RTUINT128U, uSrc);
3815 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3817
3818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3819 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3820 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3823
3824 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3825 IEM_MC_PREPARE_SSE_USAGE();
3826 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3827 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3828
3829 IEM_MC_ADVANCE_RIP();
3830 IEM_MC_END();
3831 }
3832 return VINF_SUCCESS;
3833}
3834
3835/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3836FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3837{
3838 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3841 {
3842 /*
3843 * Register, register.
3844 */
3845 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3847
3848 IEM_MC_BEGIN(3, 0);
3849 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3850 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3851 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3852 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3853 IEM_MC_PREPARE_SSE_USAGE();
3854 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3855 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3856 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3857 IEM_MC_ADVANCE_RIP();
3858 IEM_MC_END();
3859 }
3860 else
3861 {
3862 /*
3863 * Register, memory.
3864 */
3865 IEM_MC_BEGIN(3, 2);
3866 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3867 IEM_MC_LOCAL(RTUINT128U, uSrc);
3868 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3870
3871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3872 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3873 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3875 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3876
3877 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3878 IEM_MC_PREPARE_SSE_USAGE();
3879 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3880 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3881
3882 IEM_MC_ADVANCE_RIP();
3883 IEM_MC_END();
3884 }
3885 return VINF_SUCCESS;
3886}
3887
3888
3889/** Opcode 0x0f 0x71 11/2. */
3890FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3891
3892/** Opcode 0x66 0x0f 0x71 11/2. */
3893FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3894
3895/** Opcode 0x0f 0x71 11/4. */
3896FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3897
3898/** Opcode 0x66 0x0f 0x71 11/4. */
3899FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3900
3901/** Opcode 0x0f 0x71 11/6. */
3902FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3903
3904/** Opcode 0x66 0x0f 0x71 11/6. */
3905FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3906
3907
3908/**
3909 * Group 12 jump table for register variant.
3910 */
3911IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3912{
3913 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3914 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3915 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3916 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3917 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3918 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3919 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3920 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3921};
3922AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3923
3924
3925/** Opcode 0x0f 0x71. */
3926FNIEMOP_DEF(iemOp_Grp12)
3927{
3928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3930 /* register, register */
3931 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3932 + pVCpu->iem.s.idxPrefix], bRm);
3933 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3934}
3935
3936
3937/** Opcode 0x0f 0x72 11/2. */
3938FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3939
3940/** Opcode 0x66 0x0f 0x72 11/2. */
3941FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3942
3943/** Opcode 0x0f 0x72 11/4. */
3944FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3945
3946/** Opcode 0x66 0x0f 0x72 11/4. */
3947FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3948
3949/** Opcode 0x0f 0x72 11/6. */
3950FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3951
3952/** Opcode 0x66 0x0f 0x72 11/6. */
3953FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3954
3955
3956/**
3957 * Group 13 jump table for register variant.
3958 */
3959IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3960{
3961 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3962 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3963 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3964 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3965 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3966 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3967 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3968 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3969};
3970AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3971
3972/** Opcode 0x0f 0x72. */
3973FNIEMOP_DEF(iemOp_Grp13)
3974{
3975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3977 /* register, register */
3978 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3979 + pVCpu->iem.s.idxPrefix], bRm);
3980 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3981}
3982
3983
3984/** Opcode 0x0f 0x73 11/2. */
3985FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3986
3987/** Opcode 0x66 0x0f 0x73 11/2. */
3988FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3989
3990/** Opcode 0x66 0x0f 0x73 11/3. */
3991FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3992
3993/** Opcode 0x0f 0x73 11/6. */
3994FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3995
3996/** Opcode 0x66 0x0f 0x73 11/6. */
3997FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3998
3999/** Opcode 0x66 0x0f 0x73 11/7. */
4000FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4001
4002/**
4003 * Group 14 jump table for register variant.
4004 */
4005IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4006{
4007 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4008 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4009 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4010 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4011 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4012 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4013 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4014 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4015};
4016AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4017
4018
4019/** Opcode 0x0f 0x73. */
4020FNIEMOP_DEF(iemOp_Grp14)
4021{
4022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4023 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4024 /* register, register */
4025 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4026 + pVCpu->iem.s.idxPrefix], bRm);
4027 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4028}
4029
4030
4031/**
4032 * Common worker for MMX instructions on the form:
4033 * pxxx mm1, mm2/mem64
4034 */
4035FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4036{
4037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4039 {
4040 /*
4041 * Register, register.
4042 */
4043 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4044 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4046 IEM_MC_BEGIN(2, 0);
4047 IEM_MC_ARG(uint64_t *, pDst, 0);
4048 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4049 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4050 IEM_MC_PREPARE_FPU_USAGE();
4051 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4052 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4053 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4054 IEM_MC_ADVANCE_RIP();
4055 IEM_MC_END();
4056 }
4057 else
4058 {
4059 /*
4060 * Register, memory.
4061 */
4062 IEM_MC_BEGIN(2, 2);
4063 IEM_MC_ARG(uint64_t *, pDst, 0);
4064 IEM_MC_LOCAL(uint64_t, uSrc);
4065 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4067
4068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4070 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4071 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4072
4073 IEM_MC_PREPARE_FPU_USAGE();
4074 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4075 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4076
4077 IEM_MC_ADVANCE_RIP();
4078 IEM_MC_END();
4079 }
4080 return VINF_SUCCESS;
4081}
4082
4083
4084/**
4085 * Common worker for SSE2 instructions on the forms:
4086 * pxxx xmm1, xmm2/mem128
4087 *
4088 * Proper alignment of the 128-bit operand is enforced.
4089 * Exceptions type 4. SSE2 cpuid checks.
4090 */
4091FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4092{
4093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4095 {
4096 /*
4097 * Register, register.
4098 */
4099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4100 IEM_MC_BEGIN(2, 0);
4101 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4102 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4103 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4104 IEM_MC_PREPARE_SSE_USAGE();
4105 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4106 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4107 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4108 IEM_MC_ADVANCE_RIP();
4109 IEM_MC_END();
4110 }
4111 else
4112 {
4113 /*
4114 * Register, memory.
4115 */
4116 IEM_MC_BEGIN(2, 2);
4117 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4118 IEM_MC_LOCAL(RTUINT128U, uSrc);
4119 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4121
4122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4124 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4125 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4126
4127 IEM_MC_PREPARE_SSE_USAGE();
4128 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4129 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4130
4131 IEM_MC_ADVANCE_RIP();
4132 IEM_MC_END();
4133 }
4134 return VINF_SUCCESS;
4135}
4136
4137
4138/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4139FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4140{
4141 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4142 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4143}
4144
4145/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4146FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4147{
4148 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4149 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4150}
4151
4152/* Opcode 0xf3 0x0f 0x74 - invalid */
4153/* Opcode 0xf2 0x0f 0x74 - invalid */
4154
4155
4156/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4157FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4158{
4159 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4160 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4161}
4162
4163/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4164FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4165{
4166 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4167 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4168}
4169
4170/* Opcode 0xf3 0x0f 0x75 - invalid */
4171/* Opcode 0xf2 0x0f 0x75 - invalid */
4172
4173
4174/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4175FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4176{
4177 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4178 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4179}
4180
4181/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4182FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4183{
4184 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4185 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4186}
4187
4188/* Opcode 0xf3 0x0f 0x76 - invalid */
4189/* Opcode 0xf2 0x0f 0x76 - invalid */
4190
4191
4192/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4193FNIEMOP_STUB(iemOp_emms);
4194/* Opcode 0x66 0x0f 0x77 - invalid */
4195/* Opcode 0xf3 0x0f 0x77 - invalid */
4196/* Opcode 0xf2 0x0f 0x77 - invalid */
4197
4198/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4199FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4200/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4201FNIEMOP_STUB(iemOp_AmdGrp17);
4202/* Opcode 0xf3 0x0f 0x78 - invalid */
4203/* Opcode 0xf2 0x0f 0x78 - invalid */
4204
4205/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4206FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4207/* Opcode 0x66 0x0f 0x79 - invalid */
4208/* Opcode 0xf3 0x0f 0x79 - invalid */
4209/* Opcode 0xf2 0x0f 0x79 - invalid */
4210
4211/* Opcode 0x0f 0x7a - invalid */
4212/* Opcode 0x66 0x0f 0x7a - invalid */
4213/* Opcode 0xf3 0x0f 0x7a - invalid */
4214/* Opcode 0xf2 0x0f 0x7a - invalid */
4215
4216/* Opcode 0x0f 0x7b - invalid */
4217/* Opcode 0x66 0x0f 0x7b - invalid */
4218/* Opcode 0xf3 0x0f 0x7b - invalid */
4219/* Opcode 0xf2 0x0f 0x7b - invalid */
4220
4221/* Opcode 0x0f 0x7c - invalid */
4222/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4223FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4224/* Opcode 0xf3 0x0f 0x7c - invalid */
4225/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4226FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4227
4228/* Opcode 0x0f 0x7d - invalid */
4229/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4230FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4231/* Opcode 0xf3 0x0f 0x7d - invalid */
4232/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4233FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4234
4235
4236/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4237FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4238{
4239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4240 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4241 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4242 else
4243 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4245 {
4246 /* greg, MMX */
4247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4248 IEM_MC_BEGIN(0, 1);
4249 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4250 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4251 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4252 {
4253 IEM_MC_LOCAL(uint64_t, u64Tmp);
4254 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4255 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4256 }
4257 else
4258 {
4259 IEM_MC_LOCAL(uint32_t, u32Tmp);
4260 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4261 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4262 }
4263 IEM_MC_ADVANCE_RIP();
4264 IEM_MC_END();
4265 }
4266 else
4267 {
4268 /* [mem], MMX */
4269 IEM_MC_BEGIN(0, 2);
4270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4271 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4274 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4275 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4276 {
4277 IEM_MC_LOCAL(uint64_t, u64Tmp);
4278 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4279 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4280 }
4281 else
4282 {
4283 IEM_MC_LOCAL(uint32_t, u32Tmp);
4284 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4285 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4286 }
4287 IEM_MC_ADVANCE_RIP();
4288 IEM_MC_END();
4289 }
4290 return VINF_SUCCESS;
4291}
4292
4293/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4294FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4295{
4296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4297 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4298 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4299 else
4300 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4302 {
4303 /* greg, XMM */
4304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4305 IEM_MC_BEGIN(0, 1);
4306 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4307 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4308 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4309 {
4310 IEM_MC_LOCAL(uint64_t, u64Tmp);
4311 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4312 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4313 }
4314 else
4315 {
4316 IEM_MC_LOCAL(uint32_t, u32Tmp);
4317 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4318 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4319 }
4320 IEM_MC_ADVANCE_RIP();
4321 IEM_MC_END();
4322 }
4323 else
4324 {
4325 /* [mem], XMM */
4326 IEM_MC_BEGIN(0, 2);
4327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4328 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4331 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4332 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4333 {
4334 IEM_MC_LOCAL(uint64_t, u64Tmp);
4335 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4336 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4337 }
4338 else
4339 {
4340 IEM_MC_LOCAL(uint32_t, u32Tmp);
4341 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4342 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4343 }
4344 IEM_MC_ADVANCE_RIP();
4345 IEM_MC_END();
4346 }
4347 return VINF_SUCCESS;
4348}
4349
4350
4351/**
4352 * @opcode 0x7e
4353 * @opcodesub !11 mr/reg
4354 * @oppfx 0xf3
4355 * @opcpuid sse2
4356 * @opgroup og_sse2_pcksclr_datamove
4357 * @opxcpttype 5
4358 * @optest op1=1 op2=2 -> op1=2
4359 * @optest op1=0 op2=-42 -> op1=-42
4360 */
4361FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4362{
4363 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
4364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4365 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4366 {
4367 /*
4368 * Register, register.
4369 */
4370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4371 IEM_MC_BEGIN(0, 2);
4372 IEM_MC_LOCAL(uint64_t, uSrc);
4373
4374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4375 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4376
4377 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4378 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4379
4380 IEM_MC_ADVANCE_RIP();
4381 IEM_MC_END();
4382 }
4383 else
4384 {
4385 /*
4386 * Memory, register.
4387 */
4388 IEM_MC_BEGIN(0, 2);
4389 IEM_MC_LOCAL(uint64_t, uSrc);
4390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4391
4392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4394 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4396
4397 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4398 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4399
4400 IEM_MC_ADVANCE_RIP();
4401 IEM_MC_END();
4402 }
4403 return VINF_SUCCESS;
4404}
4405
4406/* Opcode 0xf2 0x0f 0x7e - invalid */
4407
4408
4409/** Opcode 0x0f 0x7f - movq Qq, Pq */
4410FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4411{
4412 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4415 {
4416 /*
4417 * Register, register.
4418 */
4419 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4420 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4422 IEM_MC_BEGIN(0, 1);
4423 IEM_MC_LOCAL(uint64_t, u64Tmp);
4424 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4425 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4426 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4427 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4428 IEM_MC_ADVANCE_RIP();
4429 IEM_MC_END();
4430 }
4431 else
4432 {
4433 /*
4434 * Register, memory.
4435 */
4436 IEM_MC_BEGIN(0, 2);
4437 IEM_MC_LOCAL(uint64_t, u64Tmp);
4438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4439
4440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4442 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4443 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4444
4445 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4446 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4447
4448 IEM_MC_ADVANCE_RIP();
4449 IEM_MC_END();
4450 }
4451 return VINF_SUCCESS;
4452}
4453
4454/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4455FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4456{
4457 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4460 {
4461 /*
4462 * Register, register.
4463 */
4464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4465 IEM_MC_BEGIN(0, 0);
4466 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4467 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4468 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4469 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4470 IEM_MC_ADVANCE_RIP();
4471 IEM_MC_END();
4472 }
4473 else
4474 {
4475 /*
4476 * Register, memory.
4477 */
4478 IEM_MC_BEGIN(0, 2);
4479 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4481
4482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4484 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4485 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4486
4487 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4488 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4489
4490 IEM_MC_ADVANCE_RIP();
4491 IEM_MC_END();
4492 }
4493 return VINF_SUCCESS;
4494}
4495
4496/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4497FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4498{
4499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4500 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4502 {
4503 /*
4504 * Register, register.
4505 */
4506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4507 IEM_MC_BEGIN(0, 0);
4508 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4509 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4510 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4511 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4512 IEM_MC_ADVANCE_RIP();
4513 IEM_MC_END();
4514 }
4515 else
4516 {
4517 /*
4518 * Register, memory.
4519 */
4520 IEM_MC_BEGIN(0, 2);
4521 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4523
4524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4526 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4528
4529 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4530 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4531
4532 IEM_MC_ADVANCE_RIP();
4533 IEM_MC_END();
4534 }
4535 return VINF_SUCCESS;
4536}
4537
4538/* Opcode 0xf2 0x0f 0x7f - invalid */
4539
4540
4541
4542/** Opcode 0x0f 0x80. */
4543FNIEMOP_DEF(iemOp_jo_Jv)
4544{
4545 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4546 IEMOP_HLP_MIN_386();
4547 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4548 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4549 {
4550 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4552
4553 IEM_MC_BEGIN(0, 0);
4554 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4555 IEM_MC_REL_JMP_S16(i16Imm);
4556 } IEM_MC_ELSE() {
4557 IEM_MC_ADVANCE_RIP();
4558 } IEM_MC_ENDIF();
4559 IEM_MC_END();
4560 }
4561 else
4562 {
4563 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4565
4566 IEM_MC_BEGIN(0, 0);
4567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4568 IEM_MC_REL_JMP_S32(i32Imm);
4569 } IEM_MC_ELSE() {
4570 IEM_MC_ADVANCE_RIP();
4571 } IEM_MC_ENDIF();
4572 IEM_MC_END();
4573 }
4574 return VINF_SUCCESS;
4575}
4576
4577
4578/** Opcode 0x0f 0x81. */
4579FNIEMOP_DEF(iemOp_jno_Jv)
4580{
4581 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4582 IEMOP_HLP_MIN_386();
4583 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4584 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4585 {
4586 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4588
4589 IEM_MC_BEGIN(0, 0);
4590 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4591 IEM_MC_ADVANCE_RIP();
4592 } IEM_MC_ELSE() {
4593 IEM_MC_REL_JMP_S16(i16Imm);
4594 } IEM_MC_ENDIF();
4595 IEM_MC_END();
4596 }
4597 else
4598 {
4599 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4601
4602 IEM_MC_BEGIN(0, 0);
4603 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4604 IEM_MC_ADVANCE_RIP();
4605 } IEM_MC_ELSE() {
4606 IEM_MC_REL_JMP_S32(i32Imm);
4607 } IEM_MC_ENDIF();
4608 IEM_MC_END();
4609 }
4610 return VINF_SUCCESS;
4611}
4612
4613
4614/** Opcode 0x0f 0x82. */
4615FNIEMOP_DEF(iemOp_jc_Jv)
4616{
4617 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4618 IEMOP_HLP_MIN_386();
4619 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4620 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4621 {
4622 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4624
4625 IEM_MC_BEGIN(0, 0);
4626 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4627 IEM_MC_REL_JMP_S16(i16Imm);
4628 } IEM_MC_ELSE() {
4629 IEM_MC_ADVANCE_RIP();
4630 } IEM_MC_ENDIF();
4631 IEM_MC_END();
4632 }
4633 else
4634 {
4635 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4637
4638 IEM_MC_BEGIN(0, 0);
4639 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4640 IEM_MC_REL_JMP_S32(i32Imm);
4641 } IEM_MC_ELSE() {
4642 IEM_MC_ADVANCE_RIP();
4643 } IEM_MC_ENDIF();
4644 IEM_MC_END();
4645 }
4646 return VINF_SUCCESS;
4647}
4648
4649
4650/** Opcode 0x0f 0x83. */
4651FNIEMOP_DEF(iemOp_jnc_Jv)
4652{
4653 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4654 IEMOP_HLP_MIN_386();
4655 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4656 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4657 {
4658 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4660
4661 IEM_MC_BEGIN(0, 0);
4662 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4663 IEM_MC_ADVANCE_RIP();
4664 } IEM_MC_ELSE() {
4665 IEM_MC_REL_JMP_S16(i16Imm);
4666 } IEM_MC_ENDIF();
4667 IEM_MC_END();
4668 }
4669 else
4670 {
4671 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4673
4674 IEM_MC_BEGIN(0, 0);
4675 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4676 IEM_MC_ADVANCE_RIP();
4677 } IEM_MC_ELSE() {
4678 IEM_MC_REL_JMP_S32(i32Imm);
4679 } IEM_MC_ENDIF();
4680 IEM_MC_END();
4681 }
4682 return VINF_SUCCESS;
4683}
4684
4685
4686/** Opcode 0x0f 0x84. */
4687FNIEMOP_DEF(iemOp_je_Jv)
4688{
4689 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4690 IEMOP_HLP_MIN_386();
4691 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4692 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4693 {
4694 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4696
4697 IEM_MC_BEGIN(0, 0);
4698 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4699 IEM_MC_REL_JMP_S16(i16Imm);
4700 } IEM_MC_ELSE() {
4701 IEM_MC_ADVANCE_RIP();
4702 } IEM_MC_ENDIF();
4703 IEM_MC_END();
4704 }
4705 else
4706 {
4707 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4709
4710 IEM_MC_BEGIN(0, 0);
4711 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4712 IEM_MC_REL_JMP_S32(i32Imm);
4713 } IEM_MC_ELSE() {
4714 IEM_MC_ADVANCE_RIP();
4715 } IEM_MC_ENDIF();
4716 IEM_MC_END();
4717 }
4718 return VINF_SUCCESS;
4719}
4720
4721
4722/** Opcode 0x0f 0x85. */
4723FNIEMOP_DEF(iemOp_jne_Jv)
4724{
4725 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4726 IEMOP_HLP_MIN_386();
4727 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4728 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4729 {
4730 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4732
4733 IEM_MC_BEGIN(0, 0);
4734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4735 IEM_MC_ADVANCE_RIP();
4736 } IEM_MC_ELSE() {
4737 IEM_MC_REL_JMP_S16(i16Imm);
4738 } IEM_MC_ENDIF();
4739 IEM_MC_END();
4740 }
4741 else
4742 {
4743 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4745
4746 IEM_MC_BEGIN(0, 0);
4747 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4748 IEM_MC_ADVANCE_RIP();
4749 } IEM_MC_ELSE() {
4750 IEM_MC_REL_JMP_S32(i32Imm);
4751 } IEM_MC_ENDIF();
4752 IEM_MC_END();
4753 }
4754 return VINF_SUCCESS;
4755}
4756
4757
4758/** Opcode 0x0f 0x86. */
4759FNIEMOP_DEF(iemOp_jbe_Jv)
4760{
4761 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4762 IEMOP_HLP_MIN_386();
4763 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4764 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4765 {
4766 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768
4769 IEM_MC_BEGIN(0, 0);
4770 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4771 IEM_MC_REL_JMP_S16(i16Imm);
4772 } IEM_MC_ELSE() {
4773 IEM_MC_ADVANCE_RIP();
4774 } IEM_MC_ENDIF();
4775 IEM_MC_END();
4776 }
4777 else
4778 {
4779 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4781
4782 IEM_MC_BEGIN(0, 0);
4783 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4784 IEM_MC_REL_JMP_S32(i32Imm);
4785 } IEM_MC_ELSE() {
4786 IEM_MC_ADVANCE_RIP();
4787 } IEM_MC_ENDIF();
4788 IEM_MC_END();
4789 }
4790 return VINF_SUCCESS;
4791}
4792
4793
4794/** Opcode 0x0f 0x87. */
4795FNIEMOP_DEF(iemOp_jnbe_Jv)
4796{
4797 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4798 IEMOP_HLP_MIN_386();
4799 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4800 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4801 {
4802 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804
4805 IEM_MC_BEGIN(0, 0);
4806 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4807 IEM_MC_ADVANCE_RIP();
4808 } IEM_MC_ELSE() {
4809 IEM_MC_REL_JMP_S16(i16Imm);
4810 } IEM_MC_ENDIF();
4811 IEM_MC_END();
4812 }
4813 else
4814 {
4815 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4817
4818 IEM_MC_BEGIN(0, 0);
4819 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4820 IEM_MC_ADVANCE_RIP();
4821 } IEM_MC_ELSE() {
4822 IEM_MC_REL_JMP_S32(i32Imm);
4823 } IEM_MC_ENDIF();
4824 IEM_MC_END();
4825 }
4826 return VINF_SUCCESS;
4827}
4828
4829
4830/** Opcode 0x0f 0x88. */
4831FNIEMOP_DEF(iemOp_js_Jv)
4832{
4833 IEMOP_MNEMONIC(js_Jv, "js Jv");
4834 IEMOP_HLP_MIN_386();
4835 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4836 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4837 {
4838 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840
4841 IEM_MC_BEGIN(0, 0);
4842 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4843 IEM_MC_REL_JMP_S16(i16Imm);
4844 } IEM_MC_ELSE() {
4845 IEM_MC_ADVANCE_RIP();
4846 } IEM_MC_ENDIF();
4847 IEM_MC_END();
4848 }
4849 else
4850 {
4851 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4853
4854 IEM_MC_BEGIN(0, 0);
4855 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4856 IEM_MC_REL_JMP_S32(i32Imm);
4857 } IEM_MC_ELSE() {
4858 IEM_MC_ADVANCE_RIP();
4859 } IEM_MC_ENDIF();
4860 IEM_MC_END();
4861 }
4862 return VINF_SUCCESS;
4863}
4864
4865
4866/** Opcode 0x0f 0x89. */
4867FNIEMOP_DEF(iemOp_jns_Jv)
4868{
4869 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4870 IEMOP_HLP_MIN_386();
4871 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4872 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4873 {
4874 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4876
4877 IEM_MC_BEGIN(0, 0);
4878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4879 IEM_MC_ADVANCE_RIP();
4880 } IEM_MC_ELSE() {
4881 IEM_MC_REL_JMP_S16(i16Imm);
4882 } IEM_MC_ENDIF();
4883 IEM_MC_END();
4884 }
4885 else
4886 {
4887 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4889
4890 IEM_MC_BEGIN(0, 0);
4891 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4892 IEM_MC_ADVANCE_RIP();
4893 } IEM_MC_ELSE() {
4894 IEM_MC_REL_JMP_S32(i32Imm);
4895 } IEM_MC_ENDIF();
4896 IEM_MC_END();
4897 }
4898 return VINF_SUCCESS;
4899}
4900
4901
4902/** Opcode 0x0f 0x8a. */
4903FNIEMOP_DEF(iemOp_jp_Jv)
4904{
4905 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4906 IEMOP_HLP_MIN_386();
4907 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4908 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4909 {
4910 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4912
4913 IEM_MC_BEGIN(0, 0);
4914 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4915 IEM_MC_REL_JMP_S16(i16Imm);
4916 } IEM_MC_ELSE() {
4917 IEM_MC_ADVANCE_RIP();
4918 } IEM_MC_ENDIF();
4919 IEM_MC_END();
4920 }
4921 else
4922 {
4923 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4925
4926 IEM_MC_BEGIN(0, 0);
4927 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4928 IEM_MC_REL_JMP_S32(i32Imm);
4929 } IEM_MC_ELSE() {
4930 IEM_MC_ADVANCE_RIP();
4931 } IEM_MC_ENDIF();
4932 IEM_MC_END();
4933 }
4934 return VINF_SUCCESS;
4935}
4936
4937
4938/** Opcode 0x0f 0x8b. */
4939FNIEMOP_DEF(iemOp_jnp_Jv)
4940{
4941 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4942 IEMOP_HLP_MIN_386();
4943 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4944 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4945 {
4946 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4948
4949 IEM_MC_BEGIN(0, 0);
4950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4951 IEM_MC_ADVANCE_RIP();
4952 } IEM_MC_ELSE() {
4953 IEM_MC_REL_JMP_S16(i16Imm);
4954 } IEM_MC_ENDIF();
4955 IEM_MC_END();
4956 }
4957 else
4958 {
4959 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4961
4962 IEM_MC_BEGIN(0, 0);
4963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4964 IEM_MC_ADVANCE_RIP();
4965 } IEM_MC_ELSE() {
4966 IEM_MC_REL_JMP_S32(i32Imm);
4967 } IEM_MC_ENDIF();
4968 IEM_MC_END();
4969 }
4970 return VINF_SUCCESS;
4971}
4972
4973
4974/** Opcode 0x0f 0x8c. */
4975FNIEMOP_DEF(iemOp_jl_Jv)
4976{
4977 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4978 IEMOP_HLP_MIN_386();
4979 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4980 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4981 {
4982 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4984
4985 IEM_MC_BEGIN(0, 0);
4986 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4987 IEM_MC_REL_JMP_S16(i16Imm);
4988 } IEM_MC_ELSE() {
4989 IEM_MC_ADVANCE_RIP();
4990 } IEM_MC_ENDIF();
4991 IEM_MC_END();
4992 }
4993 else
4994 {
4995 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4997
4998 IEM_MC_BEGIN(0, 0);
4999 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5000 IEM_MC_REL_JMP_S32(i32Imm);
5001 } IEM_MC_ELSE() {
5002 IEM_MC_ADVANCE_RIP();
5003 } IEM_MC_ENDIF();
5004 IEM_MC_END();
5005 }
5006 return VINF_SUCCESS;
5007}
5008
5009
5010/** Opcode 0x0f 0x8d. */
5011FNIEMOP_DEF(iemOp_jnl_Jv)
5012{
5013 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5014 IEMOP_HLP_MIN_386();
5015 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5016 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5017 {
5018 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5020
5021 IEM_MC_BEGIN(0, 0);
5022 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5023 IEM_MC_ADVANCE_RIP();
5024 } IEM_MC_ELSE() {
5025 IEM_MC_REL_JMP_S16(i16Imm);
5026 } IEM_MC_ENDIF();
5027 IEM_MC_END();
5028 }
5029 else
5030 {
5031 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5033
5034 IEM_MC_BEGIN(0, 0);
5035 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5036 IEM_MC_ADVANCE_RIP();
5037 } IEM_MC_ELSE() {
5038 IEM_MC_REL_JMP_S32(i32Imm);
5039 } IEM_MC_ENDIF();
5040 IEM_MC_END();
5041 }
5042 return VINF_SUCCESS;
5043}
5044
5045
5046/** Opcode 0x0f 0x8e. */
5047FNIEMOP_DEF(iemOp_jle_Jv)
5048{
5049 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5050 IEMOP_HLP_MIN_386();
5051 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5052 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5053 {
5054 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5056
5057 IEM_MC_BEGIN(0, 0);
5058 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5059 IEM_MC_REL_JMP_S16(i16Imm);
5060 } IEM_MC_ELSE() {
5061 IEM_MC_ADVANCE_RIP();
5062 } IEM_MC_ENDIF();
5063 IEM_MC_END();
5064 }
5065 else
5066 {
5067 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5069
5070 IEM_MC_BEGIN(0, 0);
5071 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5072 IEM_MC_REL_JMP_S32(i32Imm);
5073 } IEM_MC_ELSE() {
5074 IEM_MC_ADVANCE_RIP();
5075 } IEM_MC_ENDIF();
5076 IEM_MC_END();
5077 }
5078 return VINF_SUCCESS;
5079}
5080
5081
5082/** Opcode 0x0f 0x8f. */
5083FNIEMOP_DEF(iemOp_jnle_Jv)
5084{
5085 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5086 IEMOP_HLP_MIN_386();
5087 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5088 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5089 {
5090 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5092
5093 IEM_MC_BEGIN(0, 0);
5094 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5095 IEM_MC_ADVANCE_RIP();
5096 } IEM_MC_ELSE() {
5097 IEM_MC_REL_JMP_S16(i16Imm);
5098 } IEM_MC_ENDIF();
5099 IEM_MC_END();
5100 }
5101 else
5102 {
5103 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5105
5106 IEM_MC_BEGIN(0, 0);
5107 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5108 IEM_MC_ADVANCE_RIP();
5109 } IEM_MC_ELSE() {
5110 IEM_MC_REL_JMP_S32(i32Imm);
5111 } IEM_MC_ENDIF();
5112 IEM_MC_END();
5113 }
5114 return VINF_SUCCESS;
5115}
5116
5117
5118/** Opcode 0x0f 0x90. */
5119FNIEMOP_DEF(iemOp_seto_Eb)
5120{
5121 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5122 IEMOP_HLP_MIN_386();
5123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5124
5125 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5126 * any way. AMD says it's "unused", whatever that means. We're
5127 * ignoring for now. */
5128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5129 {
5130 /* register target */
5131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5132 IEM_MC_BEGIN(0, 0);
5133 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5134 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5135 } IEM_MC_ELSE() {
5136 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5137 } IEM_MC_ENDIF();
5138 IEM_MC_ADVANCE_RIP();
5139 IEM_MC_END();
5140 }
5141 else
5142 {
5143 /* memory target */
5144 IEM_MC_BEGIN(0, 1);
5145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5148 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5149 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5150 } IEM_MC_ELSE() {
5151 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5152 } IEM_MC_ENDIF();
5153 IEM_MC_ADVANCE_RIP();
5154 IEM_MC_END();
5155 }
5156 return VINF_SUCCESS;
5157}
5158
5159
5160/** Opcode 0x0f 0x91. */
5161FNIEMOP_DEF(iemOp_setno_Eb)
5162{
5163 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5164 IEMOP_HLP_MIN_386();
5165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5166
5167 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5168 * any way. AMD says it's "unused", whatever that means. We're
5169 * ignoring for now. */
5170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5171 {
5172 /* register target */
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_BEGIN(0, 0);
5175 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5176 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5177 } IEM_MC_ELSE() {
5178 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5179 } IEM_MC_ENDIF();
5180 IEM_MC_ADVANCE_RIP();
5181 IEM_MC_END();
5182 }
5183 else
5184 {
5185 /* memory target */
5186 IEM_MC_BEGIN(0, 1);
5187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5190 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5191 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5192 } IEM_MC_ELSE() {
5193 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5194 } IEM_MC_ENDIF();
5195 IEM_MC_ADVANCE_RIP();
5196 IEM_MC_END();
5197 }
5198 return VINF_SUCCESS;
5199}
5200
5201
5202/** Opcode 0x0f 0x92. */
5203FNIEMOP_DEF(iemOp_setc_Eb)
5204{
5205 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5206 IEMOP_HLP_MIN_386();
5207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5208
5209 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5210 * any way. AMD says it's "unused", whatever that means. We're
5211 * ignoring for now. */
5212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5213 {
5214 /* register target */
5215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5216 IEM_MC_BEGIN(0, 0);
5217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5218 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5219 } IEM_MC_ELSE() {
5220 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5221 } IEM_MC_ENDIF();
5222 IEM_MC_ADVANCE_RIP();
5223 IEM_MC_END();
5224 }
5225 else
5226 {
5227 /* memory target */
5228 IEM_MC_BEGIN(0, 1);
5229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5233 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5234 } IEM_MC_ELSE() {
5235 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5236 } IEM_MC_ENDIF();
5237 IEM_MC_ADVANCE_RIP();
5238 IEM_MC_END();
5239 }
5240 return VINF_SUCCESS;
5241}
5242
5243
5244/** Opcode 0x0f 0x93. */
5245FNIEMOP_DEF(iemOp_setnc_Eb)
5246{
5247 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5248 IEMOP_HLP_MIN_386();
5249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5250
5251 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5252 * any way. AMD says it's "unused", whatever that means. We're
5253 * ignoring for now. */
5254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5255 {
5256 /* register target */
5257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5258 IEM_MC_BEGIN(0, 0);
5259 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5260 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5261 } IEM_MC_ELSE() {
5262 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5263 } IEM_MC_ENDIF();
5264 IEM_MC_ADVANCE_RIP();
5265 IEM_MC_END();
5266 }
5267 else
5268 {
5269 /* memory target */
5270 IEM_MC_BEGIN(0, 1);
5271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5275 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5276 } IEM_MC_ELSE() {
5277 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5278 } IEM_MC_ENDIF();
5279 IEM_MC_ADVANCE_RIP();
5280 IEM_MC_END();
5281 }
5282 return VINF_SUCCESS;
5283}
5284
5285
5286/** Opcode 0x0f 0x94. */
5287FNIEMOP_DEF(iemOp_sete_Eb)
5288{
5289 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5290 IEMOP_HLP_MIN_386();
5291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5292
5293 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5294 * any way. AMD says it's "unused", whatever that means. We're
5295 * ignoring for now. */
5296 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5297 {
5298 /* register target */
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300 IEM_MC_BEGIN(0, 0);
5301 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5302 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5303 } IEM_MC_ELSE() {
5304 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5305 } IEM_MC_ENDIF();
5306 IEM_MC_ADVANCE_RIP();
5307 IEM_MC_END();
5308 }
5309 else
5310 {
5311 /* memory target */
5312 IEM_MC_BEGIN(0, 1);
5313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5316 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5317 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5318 } IEM_MC_ELSE() {
5319 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5320 } IEM_MC_ENDIF();
5321 IEM_MC_ADVANCE_RIP();
5322 IEM_MC_END();
5323 }
5324 return VINF_SUCCESS;
5325}
5326
5327
5328/** Opcode 0x0f 0x95. */
5329FNIEMOP_DEF(iemOp_setne_Eb)
5330{
5331 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5332 IEMOP_HLP_MIN_386();
5333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5334
5335 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5336 * any way. AMD says it's "unused", whatever that means. We're
5337 * ignoring for now. */
5338 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5339 {
5340 /* register target */
5341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5342 IEM_MC_BEGIN(0, 0);
5343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5344 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5345 } IEM_MC_ELSE() {
5346 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5347 } IEM_MC_ENDIF();
5348 IEM_MC_ADVANCE_RIP();
5349 IEM_MC_END();
5350 }
5351 else
5352 {
5353 /* memory target */
5354 IEM_MC_BEGIN(0, 1);
5355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5358 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5359 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5360 } IEM_MC_ELSE() {
5361 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5362 } IEM_MC_ENDIF();
5363 IEM_MC_ADVANCE_RIP();
5364 IEM_MC_END();
5365 }
5366 return VINF_SUCCESS;
5367}
5368
5369
5370/** Opcode 0x0f 0x96. */
5371FNIEMOP_DEF(iemOp_setbe_Eb)
5372{
5373 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5374 IEMOP_HLP_MIN_386();
5375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5376
5377 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5378 * any way. AMD says it's "unused", whatever that means. We're
5379 * ignoring for now. */
5380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5381 {
5382 /* register target */
5383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5384 IEM_MC_BEGIN(0, 0);
5385 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5386 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5387 } IEM_MC_ELSE() {
5388 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5389 } IEM_MC_ENDIF();
5390 IEM_MC_ADVANCE_RIP();
5391 IEM_MC_END();
5392 }
5393 else
5394 {
5395 /* memory target */
5396 IEM_MC_BEGIN(0, 1);
5397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5400 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5401 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5402 } IEM_MC_ELSE() {
5403 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5404 } IEM_MC_ENDIF();
5405 IEM_MC_ADVANCE_RIP();
5406 IEM_MC_END();
5407 }
5408 return VINF_SUCCESS;
5409}
5410
5411
5412/** Opcode 0x0f 0x97. */
5413FNIEMOP_DEF(iemOp_setnbe_Eb)
5414{
5415 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5416 IEMOP_HLP_MIN_386();
5417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5418
5419 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5420 * any way. AMD says it's "unused", whatever that means. We're
5421 * ignoring for now. */
5422 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5423 {
5424 /* register target */
5425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5426 IEM_MC_BEGIN(0, 0);
5427 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5428 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5429 } IEM_MC_ELSE() {
5430 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5431 } IEM_MC_ENDIF();
5432 IEM_MC_ADVANCE_RIP();
5433 IEM_MC_END();
5434 }
5435 else
5436 {
5437 /* memory target */
5438 IEM_MC_BEGIN(0, 1);
5439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5442 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5443 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5444 } IEM_MC_ELSE() {
5445 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5446 } IEM_MC_ENDIF();
5447 IEM_MC_ADVANCE_RIP();
5448 IEM_MC_END();
5449 }
5450 return VINF_SUCCESS;
5451}
5452
5453
5454/** Opcode 0x0f 0x98. */
5455FNIEMOP_DEF(iemOp_sets_Eb)
5456{
5457 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5458 IEMOP_HLP_MIN_386();
5459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5460
5461 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5462 * any way. AMD says it's "unused", whatever that means. We're
5463 * ignoring for now. */
5464 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5465 {
5466 /* register target */
5467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5468 IEM_MC_BEGIN(0, 0);
5469 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5470 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5471 } IEM_MC_ELSE() {
5472 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5473 } IEM_MC_ENDIF();
5474 IEM_MC_ADVANCE_RIP();
5475 IEM_MC_END();
5476 }
5477 else
5478 {
5479 /* memory target */
5480 IEM_MC_BEGIN(0, 1);
5481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5484 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5485 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5486 } IEM_MC_ELSE() {
5487 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5488 } IEM_MC_ENDIF();
5489 IEM_MC_ADVANCE_RIP();
5490 IEM_MC_END();
5491 }
5492 return VINF_SUCCESS;
5493}
5494
5495
5496/** Opcode 0x0f 0x99. */
5497FNIEMOP_DEF(iemOp_setns_Eb)
5498{
5499 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5500 IEMOP_HLP_MIN_386();
5501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5502
5503 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5504 * any way. AMD says it's "unused", whatever that means. We're
5505 * ignoring for now. */
5506 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5507 {
5508 /* register target */
5509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5510 IEM_MC_BEGIN(0, 0);
5511 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5512 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5513 } IEM_MC_ELSE() {
5514 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5515 } IEM_MC_ENDIF();
5516 IEM_MC_ADVANCE_RIP();
5517 IEM_MC_END();
5518 }
5519 else
5520 {
5521 /* memory target */
5522 IEM_MC_BEGIN(0, 1);
5523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5527 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5528 } IEM_MC_ELSE() {
5529 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5530 } IEM_MC_ENDIF();
5531 IEM_MC_ADVANCE_RIP();
5532 IEM_MC_END();
5533 }
5534 return VINF_SUCCESS;
5535}
5536
5537
5538/** Opcode 0x0f 0x9a. */
5539FNIEMOP_DEF(iemOp_setp_Eb)
5540{
5541 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5542 IEMOP_HLP_MIN_386();
5543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5544
5545 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5546 * any way. AMD says it's "unused", whatever that means. We're
5547 * ignoring for now. */
5548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5549 {
5550 /* register target */
5551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5552 IEM_MC_BEGIN(0, 0);
5553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5554 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5555 } IEM_MC_ELSE() {
5556 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5557 } IEM_MC_ENDIF();
5558 IEM_MC_ADVANCE_RIP();
5559 IEM_MC_END();
5560 }
5561 else
5562 {
5563 /* memory target */
5564 IEM_MC_BEGIN(0, 1);
5565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5568 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5569 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5570 } IEM_MC_ELSE() {
5571 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5572 } IEM_MC_ENDIF();
5573 IEM_MC_ADVANCE_RIP();
5574 IEM_MC_END();
5575 }
5576 return VINF_SUCCESS;
5577}
5578
5579
5580/** Opcode 0x0f 0x9b. */
5581FNIEMOP_DEF(iemOp_setnp_Eb)
5582{
5583 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5584 IEMOP_HLP_MIN_386();
5585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5586
5587 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5588 * any way. AMD says it's "unused", whatever that means. We're
5589 * ignoring for now. */
5590 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5591 {
5592 /* register target */
5593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5594 IEM_MC_BEGIN(0, 0);
5595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5596 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5597 } IEM_MC_ELSE() {
5598 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5599 } IEM_MC_ENDIF();
5600 IEM_MC_ADVANCE_RIP();
5601 IEM_MC_END();
5602 }
5603 else
5604 {
5605 /* memory target */
5606 IEM_MC_BEGIN(0, 1);
5607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5610 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5611 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5612 } IEM_MC_ELSE() {
5613 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5614 } IEM_MC_ENDIF();
5615 IEM_MC_ADVANCE_RIP();
5616 IEM_MC_END();
5617 }
5618 return VINF_SUCCESS;
5619}
5620
5621
5622/** Opcode 0x0f 0x9c. */
5623FNIEMOP_DEF(iemOp_setl_Eb)
5624{
5625 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5626 IEMOP_HLP_MIN_386();
5627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5628
5629 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5630 * any way. AMD says it's "unused", whatever that means. We're
5631 * ignoring for now. */
5632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5633 {
5634 /* register target */
5635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5636 IEM_MC_BEGIN(0, 0);
5637 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5638 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5639 } IEM_MC_ELSE() {
5640 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5641 } IEM_MC_ENDIF();
5642 IEM_MC_ADVANCE_RIP();
5643 IEM_MC_END();
5644 }
5645 else
5646 {
5647 /* memory target */
5648 IEM_MC_BEGIN(0, 1);
5649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5653 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5654 } IEM_MC_ELSE() {
5655 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5656 } IEM_MC_ENDIF();
5657 IEM_MC_ADVANCE_RIP();
5658 IEM_MC_END();
5659 }
5660 return VINF_SUCCESS;
5661}
5662
5663
5664/** Opcode 0x0f 0x9d. */
5665FNIEMOP_DEF(iemOp_setnl_Eb)
5666{
5667 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5668 IEMOP_HLP_MIN_386();
5669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5670
5671 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5672 * any way. AMD says it's "unused", whatever that means. We're
5673 * ignoring for now. */
5674 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5675 {
5676 /* register target */
5677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5678 IEM_MC_BEGIN(0, 0);
5679 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5680 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5681 } IEM_MC_ELSE() {
5682 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5683 } IEM_MC_ENDIF();
5684 IEM_MC_ADVANCE_RIP();
5685 IEM_MC_END();
5686 }
5687 else
5688 {
5689 /* memory target */
5690 IEM_MC_BEGIN(0, 1);
5691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5694 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5695 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5696 } IEM_MC_ELSE() {
5697 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5698 } IEM_MC_ENDIF();
5699 IEM_MC_ADVANCE_RIP();
5700 IEM_MC_END();
5701 }
5702 return VINF_SUCCESS;
5703}
5704
5705
5706/** Opcode 0x0f 0x9e. */
5707FNIEMOP_DEF(iemOp_setle_Eb)
5708{
5709 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5710 IEMOP_HLP_MIN_386();
5711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5712
5713 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5714 * any way. AMD says it's "unused", whatever that means. We're
5715 * ignoring for now. */
5716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5717 {
5718 /* register target */
5719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5720 IEM_MC_BEGIN(0, 0);
5721 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5722 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5723 } IEM_MC_ELSE() {
5724 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5725 } IEM_MC_ENDIF();
5726 IEM_MC_ADVANCE_RIP();
5727 IEM_MC_END();
5728 }
5729 else
5730 {
5731 /* memory target */
5732 IEM_MC_BEGIN(0, 1);
5733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5736 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5737 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5738 } IEM_MC_ELSE() {
5739 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5740 } IEM_MC_ENDIF();
5741 IEM_MC_ADVANCE_RIP();
5742 IEM_MC_END();
5743 }
5744 return VINF_SUCCESS;
5745}
5746
5747
5748/** Opcode 0x0f 0x9f. */
5749FNIEMOP_DEF(iemOp_setnle_Eb)
5750{
5751 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5752 IEMOP_HLP_MIN_386();
5753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5754
5755 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5756 * any way. AMD says it's "unused", whatever that means. We're
5757 * ignoring for now. */
5758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5759 {
5760 /* register target */
5761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5762 IEM_MC_BEGIN(0, 0);
5763 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5764 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5765 } IEM_MC_ELSE() {
5766 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5767 } IEM_MC_ENDIF();
5768 IEM_MC_ADVANCE_RIP();
5769 IEM_MC_END();
5770 }
5771 else
5772 {
5773 /* memory target */
5774 IEM_MC_BEGIN(0, 1);
5775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5778 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5779 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5780 } IEM_MC_ELSE() {
5781 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5782 } IEM_MC_ENDIF();
5783 IEM_MC_ADVANCE_RIP();
5784 IEM_MC_END();
5785 }
5786 return VINF_SUCCESS;
5787}
5788
5789
5790/**
5791 * Common 'push segment-register' helper.
5792 */
5793FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5794{
5795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5796 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5797 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5798
5799 switch (pVCpu->iem.s.enmEffOpSize)
5800 {
5801 case IEMMODE_16BIT:
5802 IEM_MC_BEGIN(0, 1);
5803 IEM_MC_LOCAL(uint16_t, u16Value);
5804 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5805 IEM_MC_PUSH_U16(u16Value);
5806 IEM_MC_ADVANCE_RIP();
5807 IEM_MC_END();
5808 break;
5809
5810 case IEMMODE_32BIT:
5811 IEM_MC_BEGIN(0, 1);
5812 IEM_MC_LOCAL(uint32_t, u32Value);
5813 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5814 IEM_MC_PUSH_U32_SREG(u32Value);
5815 IEM_MC_ADVANCE_RIP();
5816 IEM_MC_END();
5817 break;
5818
5819 case IEMMODE_64BIT:
5820 IEM_MC_BEGIN(0, 1);
5821 IEM_MC_LOCAL(uint64_t, u64Value);
5822 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5823 IEM_MC_PUSH_U64(u64Value);
5824 IEM_MC_ADVANCE_RIP();
5825 IEM_MC_END();
5826 break;
5827 }
5828
5829 return VINF_SUCCESS;
5830}
5831
5832
5833/** Opcode 0x0f 0xa0. */
5834FNIEMOP_DEF(iemOp_push_fs)
5835{
5836 IEMOP_MNEMONIC(push_fs, "push fs");
5837 IEMOP_HLP_MIN_386();
5838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5839 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5840}
5841
5842
5843/** Opcode 0x0f 0xa1. */
5844FNIEMOP_DEF(iemOp_pop_fs)
5845{
5846 IEMOP_MNEMONIC(pop_fs, "pop fs");
5847 IEMOP_HLP_MIN_386();
5848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5849 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5850}
5851
5852
5853/** Opcode 0x0f 0xa2. */
5854FNIEMOP_DEF(iemOp_cpuid)
5855{
5856 IEMOP_MNEMONIC(cpuid, "cpuid");
5857 IEMOP_HLP_MIN_486(); /* not all 486es. */
5858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5859 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5860}
5861
5862
5863/**
5864 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5865 * iemOp_bts_Ev_Gv.
5866 */
5867FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5868{
5869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5870 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5871
5872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5873 {
5874 /* register destination. */
5875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5876 switch (pVCpu->iem.s.enmEffOpSize)
5877 {
5878 case IEMMODE_16BIT:
5879 IEM_MC_BEGIN(3, 0);
5880 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5881 IEM_MC_ARG(uint16_t, u16Src, 1);
5882 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5883
5884 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5885 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5886 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5887 IEM_MC_REF_EFLAGS(pEFlags);
5888 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5889
5890 IEM_MC_ADVANCE_RIP();
5891 IEM_MC_END();
5892 return VINF_SUCCESS;
5893
5894 case IEMMODE_32BIT:
5895 IEM_MC_BEGIN(3, 0);
5896 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5897 IEM_MC_ARG(uint32_t, u32Src, 1);
5898 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5899
5900 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5901 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5902 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5903 IEM_MC_REF_EFLAGS(pEFlags);
5904 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5905
5906 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5907 IEM_MC_ADVANCE_RIP();
5908 IEM_MC_END();
5909 return VINF_SUCCESS;
5910
5911 case IEMMODE_64BIT:
5912 IEM_MC_BEGIN(3, 0);
5913 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5914 IEM_MC_ARG(uint64_t, u64Src, 1);
5915 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5916
5917 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5918 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5919 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5920 IEM_MC_REF_EFLAGS(pEFlags);
5921 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5922
5923 IEM_MC_ADVANCE_RIP();
5924 IEM_MC_END();
5925 return VINF_SUCCESS;
5926
5927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5928 }
5929 }
5930 else
5931 {
5932 /* memory destination. */
5933
5934 uint32_t fAccess;
5935 if (pImpl->pfnLockedU16)
5936 fAccess = IEM_ACCESS_DATA_RW;
5937 else /* BT */
5938 fAccess = IEM_ACCESS_DATA_R;
5939
5940 /** @todo test negative bit offsets! */
5941 switch (pVCpu->iem.s.enmEffOpSize)
5942 {
5943 case IEMMODE_16BIT:
5944 IEM_MC_BEGIN(3, 2);
5945 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5946 IEM_MC_ARG(uint16_t, u16Src, 1);
5947 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5949 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5950
5951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5952 if (pImpl->pfnLockedU16)
5953 IEMOP_HLP_DONE_DECODING();
5954 else
5955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5956 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5957 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5958 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5959 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5960 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5961 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5962 IEM_MC_FETCH_EFLAGS(EFlags);
5963
5964 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5965 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5966 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5967 else
5968 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5970
5971 IEM_MC_COMMIT_EFLAGS(EFlags);
5972 IEM_MC_ADVANCE_RIP();
5973 IEM_MC_END();
5974 return VINF_SUCCESS;
5975
5976 case IEMMODE_32BIT:
5977 IEM_MC_BEGIN(3, 2);
5978 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5979 IEM_MC_ARG(uint32_t, u32Src, 1);
5980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5982 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5983
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5985 if (pImpl->pfnLockedU16)
5986 IEMOP_HLP_DONE_DECODING();
5987 else
5988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5989 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5990 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5991 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5992 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5993 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5994 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5995 IEM_MC_FETCH_EFLAGS(EFlags);
5996
5997 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5998 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5999 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6000 else
6001 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6002 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6003
6004 IEM_MC_COMMIT_EFLAGS(EFlags);
6005 IEM_MC_ADVANCE_RIP();
6006 IEM_MC_END();
6007 return VINF_SUCCESS;
6008
6009 case IEMMODE_64BIT:
6010 IEM_MC_BEGIN(3, 2);
6011 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6012 IEM_MC_ARG(uint64_t, u64Src, 1);
6013 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6015 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6016
6017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6018 if (pImpl->pfnLockedU16)
6019 IEMOP_HLP_DONE_DECODING();
6020 else
6021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6022 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6023 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6024 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6025 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6026 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6027 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6028 IEM_MC_FETCH_EFLAGS(EFlags);
6029
6030 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6031 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6032 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6033 else
6034 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6035 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6036
6037 IEM_MC_COMMIT_EFLAGS(EFlags);
6038 IEM_MC_ADVANCE_RIP();
6039 IEM_MC_END();
6040 return VINF_SUCCESS;
6041
6042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6043 }
6044 }
6045}
6046
6047
6048/** Opcode 0x0f 0xa3. */
6049FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6050{
6051 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6052 IEMOP_HLP_MIN_386();
6053 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6054}
6055
6056
6057/**
6058 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6059 */
6060FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6061{
6062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6063 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6064
6065 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6066 {
6067 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6069
6070 switch (pVCpu->iem.s.enmEffOpSize)
6071 {
6072 case IEMMODE_16BIT:
6073 IEM_MC_BEGIN(4, 0);
6074 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6075 IEM_MC_ARG(uint16_t, u16Src, 1);
6076 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6077 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6078
6079 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6080 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6081 IEM_MC_REF_EFLAGS(pEFlags);
6082 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6083
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 return VINF_SUCCESS;
6087
6088 case IEMMODE_32BIT:
6089 IEM_MC_BEGIN(4, 0);
6090 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6091 IEM_MC_ARG(uint32_t, u32Src, 1);
6092 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6093 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6094
6095 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6096 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6097 IEM_MC_REF_EFLAGS(pEFlags);
6098 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6099
6100 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6101 IEM_MC_ADVANCE_RIP();
6102 IEM_MC_END();
6103 return VINF_SUCCESS;
6104
6105 case IEMMODE_64BIT:
6106 IEM_MC_BEGIN(4, 0);
6107 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6108 IEM_MC_ARG(uint64_t, u64Src, 1);
6109 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6110 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6111
6112 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6113 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6114 IEM_MC_REF_EFLAGS(pEFlags);
6115 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6116
6117 IEM_MC_ADVANCE_RIP();
6118 IEM_MC_END();
6119 return VINF_SUCCESS;
6120
6121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6122 }
6123 }
6124 else
6125 {
6126 switch (pVCpu->iem.s.enmEffOpSize)
6127 {
6128 case IEMMODE_16BIT:
6129 IEM_MC_BEGIN(4, 2);
6130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6131 IEM_MC_ARG(uint16_t, u16Src, 1);
6132 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6133 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6135
6136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6137 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6138 IEM_MC_ASSIGN(cShiftArg, cShift);
6139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6140 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6141 IEM_MC_FETCH_EFLAGS(EFlags);
6142 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6143 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6144
6145 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6146 IEM_MC_COMMIT_EFLAGS(EFlags);
6147 IEM_MC_ADVANCE_RIP();
6148 IEM_MC_END();
6149 return VINF_SUCCESS;
6150
6151 case IEMMODE_32BIT:
6152 IEM_MC_BEGIN(4, 2);
6153 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6154 IEM_MC_ARG(uint32_t, u32Src, 1);
6155 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6156 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6158
6159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6160 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6161 IEM_MC_ASSIGN(cShiftArg, cShift);
6162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6163 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6164 IEM_MC_FETCH_EFLAGS(EFlags);
6165 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6166 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6167
6168 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6169 IEM_MC_COMMIT_EFLAGS(EFlags);
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_64BIT:
6175 IEM_MC_BEGIN(4, 2);
6176 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6177 IEM_MC_ARG(uint64_t, u64Src, 1);
6178 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6179 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6181
6182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6183 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6184 IEM_MC_ASSIGN(cShiftArg, cShift);
6185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6186 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6187 IEM_MC_FETCH_EFLAGS(EFlags);
6188 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6189 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6190
6191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6192 IEM_MC_COMMIT_EFLAGS(EFlags);
6193 IEM_MC_ADVANCE_RIP();
6194 IEM_MC_END();
6195 return VINF_SUCCESS;
6196
6197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6198 }
6199 }
6200}
6201
6202
6203/**
6204 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6205 */
6206FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6207{
6208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6209 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6210
6211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6212 {
6213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6214
6215 switch (pVCpu->iem.s.enmEffOpSize)
6216 {
6217 case IEMMODE_16BIT:
6218 IEM_MC_BEGIN(4, 0);
6219 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6220 IEM_MC_ARG(uint16_t, u16Src, 1);
6221 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6222 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6223
6224 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6225 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6226 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6227 IEM_MC_REF_EFLAGS(pEFlags);
6228 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6229
6230 IEM_MC_ADVANCE_RIP();
6231 IEM_MC_END();
6232 return VINF_SUCCESS;
6233
6234 case IEMMODE_32BIT:
6235 IEM_MC_BEGIN(4, 0);
6236 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6237 IEM_MC_ARG(uint32_t, u32Src, 1);
6238 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6239 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6240
6241 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6242 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6243 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6244 IEM_MC_REF_EFLAGS(pEFlags);
6245 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6246
6247 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6248 IEM_MC_ADVANCE_RIP();
6249 IEM_MC_END();
6250 return VINF_SUCCESS;
6251
6252 case IEMMODE_64BIT:
6253 IEM_MC_BEGIN(4, 0);
6254 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6255 IEM_MC_ARG(uint64_t, u64Src, 1);
6256 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6257 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6258
6259 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6260 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6261 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6262 IEM_MC_REF_EFLAGS(pEFlags);
6263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6264
6265 IEM_MC_ADVANCE_RIP();
6266 IEM_MC_END();
6267 return VINF_SUCCESS;
6268
6269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6270 }
6271 }
6272 else
6273 {
6274 switch (pVCpu->iem.s.enmEffOpSize)
6275 {
6276 case IEMMODE_16BIT:
6277 IEM_MC_BEGIN(4, 2);
6278 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6279 IEM_MC_ARG(uint16_t, u16Src, 1);
6280 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6283
6284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6286 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6287 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6288 IEM_MC_FETCH_EFLAGS(EFlags);
6289 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6290 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6291
6292 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6293 IEM_MC_COMMIT_EFLAGS(EFlags);
6294 IEM_MC_ADVANCE_RIP();
6295 IEM_MC_END();
6296 return VINF_SUCCESS;
6297
6298 case IEMMODE_32BIT:
6299 IEM_MC_BEGIN(4, 2);
6300 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6301 IEM_MC_ARG(uint32_t, u32Src, 1);
6302 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6303 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6305
6306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6308 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6309 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6310 IEM_MC_FETCH_EFLAGS(EFlags);
6311 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6312 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6313
6314 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6315 IEM_MC_COMMIT_EFLAGS(EFlags);
6316 IEM_MC_ADVANCE_RIP();
6317 IEM_MC_END();
6318 return VINF_SUCCESS;
6319
6320 case IEMMODE_64BIT:
6321 IEM_MC_BEGIN(4, 2);
6322 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6323 IEM_MC_ARG(uint64_t, u64Src, 1);
6324 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6325 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6327
6328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6330 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6331 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6332 IEM_MC_FETCH_EFLAGS(EFlags);
6333 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6334 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6335
6336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6337 IEM_MC_COMMIT_EFLAGS(EFlags);
6338 IEM_MC_ADVANCE_RIP();
6339 IEM_MC_END();
6340 return VINF_SUCCESS;
6341
6342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6343 }
6344 }
6345}
6346
6347
6348
6349/** Opcode 0x0f 0xa4. */
6350FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6351{
6352 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6353 IEMOP_HLP_MIN_386();
6354 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6355}
6356
6357
6358/** Opcode 0x0f 0xa5. */
6359FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6360{
6361 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6362 IEMOP_HLP_MIN_386();
6363 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6364}
6365
6366
6367/** Opcode 0x0f 0xa8. */
6368FNIEMOP_DEF(iemOp_push_gs)
6369{
6370 IEMOP_MNEMONIC(push_gs, "push gs");
6371 IEMOP_HLP_MIN_386();
6372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6373 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6374}
6375
6376
6377/** Opcode 0x0f 0xa9. */
6378FNIEMOP_DEF(iemOp_pop_gs)
6379{
6380 IEMOP_MNEMONIC(pop_gs, "pop gs");
6381 IEMOP_HLP_MIN_386();
6382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6383 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6384}
6385
6386
6387/** Opcode 0x0f 0xaa. */
6388FNIEMOP_DEF(iemOp_rsm)
6389{
6390 IEMOP_MNEMONIC(rsm, "rsm");
6391 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6392 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6393 * intercept). */
6394 IEMOP_BITCH_ABOUT_STUB();
6395 return IEMOP_RAISE_INVALID_OPCODE();
6396}
6397
6398//IEMOP_HLP_MIN_386();
6399
6400
6401/** Opcode 0x0f 0xab. */
6402FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6403{
6404 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6405 IEMOP_HLP_MIN_386();
6406 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6407}
6408
6409
6410/** Opcode 0x0f 0xac. */
6411FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6412{
6413 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6414 IEMOP_HLP_MIN_386();
6415 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6416}
6417
6418
6419/** Opcode 0x0f 0xad. */
6420FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6421{
6422 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6423 IEMOP_HLP_MIN_386();
6424 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6425}
6426
6427
6428/** Opcode 0x0f 0xae mem/0. */
6429FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6430{
6431 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6432 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6433 return IEMOP_RAISE_INVALID_OPCODE();
6434
6435 IEM_MC_BEGIN(3, 1);
6436 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6437 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6438 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6441 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6442 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6443 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6444 IEM_MC_END();
6445 return VINF_SUCCESS;
6446}
6447
6448
6449/** Opcode 0x0f 0xae mem/1. */
6450FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6451{
6452 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6453 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6454 return IEMOP_RAISE_INVALID_OPCODE();
6455
6456 IEM_MC_BEGIN(3, 1);
6457 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6458 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6459 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6463 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6464 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6465 IEM_MC_END();
6466 return VINF_SUCCESS;
6467}
6468
6469
6470/**
6471 * @opmaps grp15
6472 * @opcode !11/2
6473 * @oppfx none
6474 * @opcpuid sse
6475 * @opgroup og_sse_mxcsrsm
6476 * @opxcpttype 5
6477 * @optest op1=0 -> mxcsr=0
6478 * @optest op1=0x2083 -> mxcsr=0x2083
6479 * @optest op1=0xfffffffe -> value.xcpt=0xd
6480 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6481 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6482 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6483 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6484 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6485 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6486 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6487 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6488 */
6489FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6490{
6491 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6492 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6493 return IEMOP_RAISE_INVALID_OPCODE();
6494
6495 IEM_MC_BEGIN(2, 0);
6496 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6497 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6500 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6501 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6502 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6503 IEM_MC_END();
6504 return VINF_SUCCESS;
6505}
6506
6507
6508/**
6509 * @opmaps grp15
6510 * @opcode !11/3
6511 * @oppfx none
6512 * @opcpuid sse
6513 * @opgroup og_sse_mxcsrsm
6514 * @opxcpttype 5
6515 * @optest mxcsr=0 -> op1=0
6516 * @optest mxcsr=0x2083 -> op1=0x2083
6517 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6518 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6519 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6520 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6521 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6522 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6523 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6524 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6525 */
6526FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6527{
6528 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6529 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6530 return IEMOP_RAISE_INVALID_OPCODE();
6531
6532 IEM_MC_BEGIN(2, 0);
6533 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6534 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6538 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6539 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6540 IEM_MC_END();
6541 return VINF_SUCCESS;
6542}
6543
6544
6545/**
6546 * @opmaps grp15
6547 * @opcode !11/4
6548 * @oppfx none
6549 * @opcpuid xsave
6550 * @opgroup og_system
6551 * @opxcpttype none
6552 */
6553FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6554{
6555 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6556 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6557 return IEMOP_RAISE_INVALID_OPCODE();
6558
6559 IEM_MC_BEGIN(3, 0);
6560 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6561 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6562 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6565 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6566 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6567 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6568 IEM_MC_END();
6569 return VINF_SUCCESS;
6570}
6571
6572
6573/**
6574 * @opmaps grp15
6575 * @opcode !11/5
6576 * @oppfx none
6577 * @opcpuid xsave
6578 * @opgroup og_system
6579 * @opxcpttype none
6580 */
6581FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6582{
6583 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6584 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6585 return IEMOP_RAISE_INVALID_OPCODE();
6586
6587 IEM_MC_BEGIN(3, 0);
6588 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6589 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6590 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6593 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6594 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6595 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6596 IEM_MC_END();
6597 return VINF_SUCCESS;
6598}
6599
6600/** Opcode 0x0f 0xae mem/6. */
6601FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6602
6603/**
6604 * @opmaps grp15
6605 * @opcode !11/7
6606 * @oppfx none
6607 * @opcpuid clfsh
6608 * @opgroup og_cachectl
6609 * @optest op1=1 ->
6610 */
6611FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6612{
6613 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6614 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6615 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6616
6617 IEM_MC_BEGIN(2, 0);
6618 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6619 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6622 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6623 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6624 IEM_MC_END();
6625 return VINF_SUCCESS;
6626}
6627
6628/**
6629 * @opmaps grp15
6630 * @opcode !11/7
6631 * @oppfx 0x66
6632 * @opcpuid clflushopt
6633 * @opgroup og_cachectl
6634 * @optest op1=1 ->
6635 */
6636FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6637{
6638 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6639 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6640 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6641
6642 IEM_MC_BEGIN(2, 0);
6643 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6644 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6647 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6648 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6649 IEM_MC_END();
6650 return VINF_SUCCESS;
6651}
6652
6653
6654/** Opcode 0x0f 0xae 11b/5. */
6655FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6656{
6657 RT_NOREF_PV(bRm);
6658 IEMOP_MNEMONIC(lfence, "lfence");
6659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6660 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6661 return IEMOP_RAISE_INVALID_OPCODE();
6662
6663 IEM_MC_BEGIN(0, 0);
6664 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6665 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6666 else
6667 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6668 IEM_MC_ADVANCE_RIP();
6669 IEM_MC_END();
6670 return VINF_SUCCESS;
6671}
6672
6673
6674/** Opcode 0x0f 0xae 11b/6. */
6675FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6676{
6677 RT_NOREF_PV(bRm);
6678 IEMOP_MNEMONIC(mfence, "mfence");
6679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6680 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6681 return IEMOP_RAISE_INVALID_OPCODE();
6682
6683 IEM_MC_BEGIN(0, 0);
6684 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6685 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6686 else
6687 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6688 IEM_MC_ADVANCE_RIP();
6689 IEM_MC_END();
6690 return VINF_SUCCESS;
6691}
6692
6693
6694/** Opcode 0x0f 0xae 11b/7. */
6695FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6696{
6697 RT_NOREF_PV(bRm);
6698 IEMOP_MNEMONIC(sfence, "sfence");
6699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6700 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6701 return IEMOP_RAISE_INVALID_OPCODE();
6702
6703 IEM_MC_BEGIN(0, 0);
6704 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6705 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6706 else
6707 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6708 IEM_MC_ADVANCE_RIP();
6709 IEM_MC_END();
6710 return VINF_SUCCESS;
6711}
6712
6713
6714/** Opcode 0xf3 0x0f 0xae 11b/0. */
6715FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6716
6717/** Opcode 0xf3 0x0f 0xae 11b/1. */
6718FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6719
6720/** Opcode 0xf3 0x0f 0xae 11b/2. */
6721FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6722
6723/** Opcode 0xf3 0x0f 0xae 11b/3. */
6724FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6725
6726
6727/**
6728 * Group 15 jump table for register variant.
6729 */
6730IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6731{ /* pfx: none, 066h, 0f3h, 0f2h */
6732 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6733 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6734 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6735 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6736 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6737 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6738 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6739 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6740};
6741AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6742
6743
6744/**
6745 * Group 15 jump table for memory variant.
6746 */
6747IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6748{ /* pfx: none, 066h, 0f3h, 0f2h */
6749 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6750 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6751 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6752 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6753 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6754 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6755 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6756 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6757};
6758AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6759
6760
6761/** Opcode 0x0f 0xae. */
6762FNIEMOP_DEF(iemOp_Grp15)
6763{
6764 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6766 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6767 /* register, register */
6768 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6769 + pVCpu->iem.s.idxPrefix], bRm);
6770 /* memory, register */
6771 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6772 + pVCpu->iem.s.idxPrefix], bRm);
6773}
6774
6775
6776/** Opcode 0x0f 0xaf. */
6777FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6778{
6779 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6780 IEMOP_HLP_MIN_386();
6781 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6782 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6783}
6784
6785
6786/** Opcode 0x0f 0xb0. */
6787FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6788{
6789 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6790 IEMOP_HLP_MIN_486();
6791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6792
6793 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6794 {
6795 IEMOP_HLP_DONE_DECODING();
6796 IEM_MC_BEGIN(4, 0);
6797 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6798 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6799 IEM_MC_ARG(uint8_t, u8Src, 2);
6800 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6801
6802 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6803 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6804 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6805 IEM_MC_REF_EFLAGS(pEFlags);
6806 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6807 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6808 else
6809 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6810
6811 IEM_MC_ADVANCE_RIP();
6812 IEM_MC_END();
6813 }
6814 else
6815 {
6816 IEM_MC_BEGIN(4, 3);
6817 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6818 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6819 IEM_MC_ARG(uint8_t, u8Src, 2);
6820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6822 IEM_MC_LOCAL(uint8_t, u8Al);
6823
6824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6825 IEMOP_HLP_DONE_DECODING();
6826 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6827 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6828 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6829 IEM_MC_FETCH_EFLAGS(EFlags);
6830 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6831 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6832 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6833 else
6834 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6835
6836 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6837 IEM_MC_COMMIT_EFLAGS(EFlags);
6838 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6839 IEM_MC_ADVANCE_RIP();
6840 IEM_MC_END();
6841 }
6842 return VINF_SUCCESS;
6843}
6844
6845/** Opcode 0x0f 0xb1. */
6846FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6847{
6848 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6849 IEMOP_HLP_MIN_486();
6850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6851
6852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6853 {
6854 IEMOP_HLP_DONE_DECODING();
6855 switch (pVCpu->iem.s.enmEffOpSize)
6856 {
6857 case IEMMODE_16BIT:
6858 IEM_MC_BEGIN(4, 0);
6859 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6860 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6861 IEM_MC_ARG(uint16_t, u16Src, 2);
6862 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6863
6864 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6865 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6866 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6867 IEM_MC_REF_EFLAGS(pEFlags);
6868 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6869 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6870 else
6871 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6872
6873 IEM_MC_ADVANCE_RIP();
6874 IEM_MC_END();
6875 return VINF_SUCCESS;
6876
6877 case IEMMODE_32BIT:
6878 IEM_MC_BEGIN(4, 0);
6879 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6880 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6881 IEM_MC_ARG(uint32_t, u32Src, 2);
6882 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6883
6884 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6885 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6886 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6887 IEM_MC_REF_EFLAGS(pEFlags);
6888 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6889 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6890 else
6891 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6892
6893 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6894 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6895 IEM_MC_ADVANCE_RIP();
6896 IEM_MC_END();
6897 return VINF_SUCCESS;
6898
6899 case IEMMODE_64BIT:
6900 IEM_MC_BEGIN(4, 0);
6901 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6902 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6903#ifdef RT_ARCH_X86
6904 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6905#else
6906 IEM_MC_ARG(uint64_t, u64Src, 2);
6907#endif
6908 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6909
6910 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6911 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6912 IEM_MC_REF_EFLAGS(pEFlags);
6913#ifdef RT_ARCH_X86
6914 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6915 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6916 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6917 else
6918 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6919#else
6920 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6921 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6922 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6923 else
6924 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6925#endif
6926
6927 IEM_MC_ADVANCE_RIP();
6928 IEM_MC_END();
6929 return VINF_SUCCESS;
6930
6931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6932 }
6933 }
6934 else
6935 {
6936 switch (pVCpu->iem.s.enmEffOpSize)
6937 {
6938 case IEMMODE_16BIT:
6939 IEM_MC_BEGIN(4, 3);
6940 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6941 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6942 IEM_MC_ARG(uint16_t, u16Src, 2);
6943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6945 IEM_MC_LOCAL(uint16_t, u16Ax);
6946
6947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6948 IEMOP_HLP_DONE_DECODING();
6949 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6950 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6951 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6952 IEM_MC_FETCH_EFLAGS(EFlags);
6953 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6954 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6955 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6956 else
6957 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6958
6959 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6960 IEM_MC_COMMIT_EFLAGS(EFlags);
6961 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6962 IEM_MC_ADVANCE_RIP();
6963 IEM_MC_END();
6964 return VINF_SUCCESS;
6965
6966 case IEMMODE_32BIT:
6967 IEM_MC_BEGIN(4, 3);
6968 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6969 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6970 IEM_MC_ARG(uint32_t, u32Src, 2);
6971 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6973 IEM_MC_LOCAL(uint32_t, u32Eax);
6974
6975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6976 IEMOP_HLP_DONE_DECODING();
6977 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6978 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6979 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6980 IEM_MC_FETCH_EFLAGS(EFlags);
6981 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6982 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6983 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6984 else
6985 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6986
6987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6988 IEM_MC_COMMIT_EFLAGS(EFlags);
6989 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6990 IEM_MC_ADVANCE_RIP();
6991 IEM_MC_END();
6992 return VINF_SUCCESS;
6993
6994 case IEMMODE_64BIT:
6995 IEM_MC_BEGIN(4, 3);
6996 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6997 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6998#ifdef RT_ARCH_X86
6999 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7000#else
7001 IEM_MC_ARG(uint64_t, u64Src, 2);
7002#endif
7003 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7005 IEM_MC_LOCAL(uint64_t, u64Rax);
7006
7007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7008 IEMOP_HLP_DONE_DECODING();
7009 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7010 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7011 IEM_MC_FETCH_EFLAGS(EFlags);
7012 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7013#ifdef RT_ARCH_X86
7014 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7015 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7016 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7017 else
7018 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7019#else
7020 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7021 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7022 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7023 else
7024 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7025#endif
7026
7027 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7028 IEM_MC_COMMIT_EFLAGS(EFlags);
7029 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7030 IEM_MC_ADVANCE_RIP();
7031 IEM_MC_END();
7032 return VINF_SUCCESS;
7033
7034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7035 }
7036 }
7037}
7038
7039
7040FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7041{
7042 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7043 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7044
7045 switch (pVCpu->iem.s.enmEffOpSize)
7046 {
7047 case IEMMODE_16BIT:
7048 IEM_MC_BEGIN(5, 1);
7049 IEM_MC_ARG(uint16_t, uSel, 0);
7050 IEM_MC_ARG(uint16_t, offSeg, 1);
7051 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7052 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7054 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7057 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7058 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7059 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7060 IEM_MC_END();
7061 return VINF_SUCCESS;
7062
7063 case IEMMODE_32BIT:
7064 IEM_MC_BEGIN(5, 1);
7065 IEM_MC_ARG(uint16_t, uSel, 0);
7066 IEM_MC_ARG(uint32_t, offSeg, 1);
7067 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7068 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7069 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7070 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7073 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7074 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7075 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7076 IEM_MC_END();
7077 return VINF_SUCCESS;
7078
7079 case IEMMODE_64BIT:
7080 IEM_MC_BEGIN(5, 1);
7081 IEM_MC_ARG(uint16_t, uSel, 0);
7082 IEM_MC_ARG(uint64_t, offSeg, 1);
7083 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7084 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7085 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7086 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7089 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7090 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7091 else
7092 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7093 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7094 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7095 IEM_MC_END();
7096 return VINF_SUCCESS;
7097
7098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7099 }
7100}
7101
7102
7103/** Opcode 0x0f 0xb2. */
7104FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7105{
7106 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7107 IEMOP_HLP_MIN_386();
7108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7109 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7110 return IEMOP_RAISE_INVALID_OPCODE();
7111 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7112}
7113
7114
7115/** Opcode 0x0f 0xb3. */
7116FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7117{
7118 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7119 IEMOP_HLP_MIN_386();
7120 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7121}
7122
7123
7124/** Opcode 0x0f 0xb4. */
7125FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7126{
7127 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7128 IEMOP_HLP_MIN_386();
7129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7131 return IEMOP_RAISE_INVALID_OPCODE();
7132 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7133}
7134
7135
7136/** Opcode 0x0f 0xb5. */
7137FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7138{
7139 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7140 IEMOP_HLP_MIN_386();
7141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7143 return IEMOP_RAISE_INVALID_OPCODE();
7144 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7145}
7146
7147
7148/** Opcode 0x0f 0xb6. */
7149FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7150{
7151 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7152 IEMOP_HLP_MIN_386();
7153
7154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7155
7156 /*
7157 * If rm is denoting a register, no more instruction bytes.
7158 */
7159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7160 {
7161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7162 switch (pVCpu->iem.s.enmEffOpSize)
7163 {
7164 case IEMMODE_16BIT:
7165 IEM_MC_BEGIN(0, 1);
7166 IEM_MC_LOCAL(uint16_t, u16Value);
7167 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7168 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7169 IEM_MC_ADVANCE_RIP();
7170 IEM_MC_END();
7171 return VINF_SUCCESS;
7172
7173 case IEMMODE_32BIT:
7174 IEM_MC_BEGIN(0, 1);
7175 IEM_MC_LOCAL(uint32_t, u32Value);
7176 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7177 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7178 IEM_MC_ADVANCE_RIP();
7179 IEM_MC_END();
7180 return VINF_SUCCESS;
7181
7182 case IEMMODE_64BIT:
7183 IEM_MC_BEGIN(0, 1);
7184 IEM_MC_LOCAL(uint64_t, u64Value);
7185 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7186 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7187 IEM_MC_ADVANCE_RIP();
7188 IEM_MC_END();
7189 return VINF_SUCCESS;
7190
7191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7192 }
7193 }
7194 else
7195 {
7196 /*
7197 * We're loading a register from memory.
7198 */
7199 switch (pVCpu->iem.s.enmEffOpSize)
7200 {
7201 case IEMMODE_16BIT:
7202 IEM_MC_BEGIN(0, 2);
7203 IEM_MC_LOCAL(uint16_t, u16Value);
7204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7207 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7208 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7209 IEM_MC_ADVANCE_RIP();
7210 IEM_MC_END();
7211 return VINF_SUCCESS;
7212
7213 case IEMMODE_32BIT:
7214 IEM_MC_BEGIN(0, 2);
7215 IEM_MC_LOCAL(uint32_t, u32Value);
7216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7219 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7220 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7221 IEM_MC_ADVANCE_RIP();
7222 IEM_MC_END();
7223 return VINF_SUCCESS;
7224
7225 case IEMMODE_64BIT:
7226 IEM_MC_BEGIN(0, 2);
7227 IEM_MC_LOCAL(uint64_t, u64Value);
7228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7231 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7232 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7233 IEM_MC_ADVANCE_RIP();
7234 IEM_MC_END();
7235 return VINF_SUCCESS;
7236
7237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7238 }
7239 }
7240}
7241
7242
7243/** Opcode 0x0f 0xb7. */
7244FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7245{
7246 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7247 IEMOP_HLP_MIN_386();
7248
7249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7250
7251 /** @todo Not entirely sure how the operand size prefix is handled here,
7252 * assuming that it will be ignored. Would be nice to have a few
7253 * test for this. */
7254 /*
7255 * If rm is denoting a register, no more instruction bytes.
7256 */
7257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7258 {
7259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7260 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7261 {
7262 IEM_MC_BEGIN(0, 1);
7263 IEM_MC_LOCAL(uint32_t, u32Value);
7264 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7265 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7266 IEM_MC_ADVANCE_RIP();
7267 IEM_MC_END();
7268 }
7269 else
7270 {
7271 IEM_MC_BEGIN(0, 1);
7272 IEM_MC_LOCAL(uint64_t, u64Value);
7273 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7274 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7275 IEM_MC_ADVANCE_RIP();
7276 IEM_MC_END();
7277 }
7278 }
7279 else
7280 {
7281 /*
7282 * We're loading a register from memory.
7283 */
7284 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7285 {
7286 IEM_MC_BEGIN(0, 2);
7287 IEM_MC_LOCAL(uint32_t, u32Value);
7288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7291 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7292 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7293 IEM_MC_ADVANCE_RIP();
7294 IEM_MC_END();
7295 }
7296 else
7297 {
7298 IEM_MC_BEGIN(0, 2);
7299 IEM_MC_LOCAL(uint64_t, u64Value);
7300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7303 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7304 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7305 IEM_MC_ADVANCE_RIP();
7306 IEM_MC_END();
7307 }
7308 }
7309 return VINF_SUCCESS;
7310}
7311
7312
7313/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7314FNIEMOP_UD_STUB(iemOp_jmpe);
7315/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7316FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7317
7318
7319/**
7320 * @opcode 0xb9
7321 * @opinvalid intel-modrm
7322 * @optest ->
7323 */
7324FNIEMOP_DEF(iemOp_Grp10)
7325{
7326 /*
7327 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7328 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7329 */
7330 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7331 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
7332 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7333}
7334
7335
7336/** Opcode 0x0f 0xba. */
7337FNIEMOP_DEF(iemOp_Grp8)
7338{
7339 IEMOP_HLP_MIN_386();
7340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7341 PCIEMOPBINSIZES pImpl;
7342 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7343 {
7344 case 0: case 1: case 2: case 3:
7345 /* Both AMD and Intel want full modr/m decoding and imm8. */
7346 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7347 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7348 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7349 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7350 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7352 }
7353 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7354
7355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7356 {
7357 /* register destination. */
7358 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7360
7361 switch (pVCpu->iem.s.enmEffOpSize)
7362 {
7363 case IEMMODE_16BIT:
7364 IEM_MC_BEGIN(3, 0);
7365 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7366 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7367 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7368
7369 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7370 IEM_MC_REF_EFLAGS(pEFlags);
7371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7372
7373 IEM_MC_ADVANCE_RIP();
7374 IEM_MC_END();
7375 return VINF_SUCCESS;
7376
7377 case IEMMODE_32BIT:
7378 IEM_MC_BEGIN(3, 0);
7379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7380 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7382
7383 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7384 IEM_MC_REF_EFLAGS(pEFlags);
7385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7386
7387 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7388 IEM_MC_ADVANCE_RIP();
7389 IEM_MC_END();
7390 return VINF_SUCCESS;
7391
7392 case IEMMODE_64BIT:
7393 IEM_MC_BEGIN(3, 0);
7394 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7395 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7397
7398 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7399 IEM_MC_REF_EFLAGS(pEFlags);
7400 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7401
7402 IEM_MC_ADVANCE_RIP();
7403 IEM_MC_END();
7404 return VINF_SUCCESS;
7405
7406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7407 }
7408 }
7409 else
7410 {
7411 /* memory destination. */
7412
7413 uint32_t fAccess;
7414 if (pImpl->pfnLockedU16)
7415 fAccess = IEM_ACCESS_DATA_RW;
7416 else /* BT */
7417 fAccess = IEM_ACCESS_DATA_R;
7418
7419 /** @todo test negative bit offsets! */
7420 switch (pVCpu->iem.s.enmEffOpSize)
7421 {
7422 case IEMMODE_16BIT:
7423 IEM_MC_BEGIN(3, 1);
7424 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7425 IEM_MC_ARG(uint16_t, u16Src, 1);
7426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7428
7429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7430 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7431 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7432 if (pImpl->pfnLockedU16)
7433 IEMOP_HLP_DONE_DECODING();
7434 else
7435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7436 IEM_MC_FETCH_EFLAGS(EFlags);
7437 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7438 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7439 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7440 else
7441 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7442 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7443
7444 IEM_MC_COMMIT_EFLAGS(EFlags);
7445 IEM_MC_ADVANCE_RIP();
7446 IEM_MC_END();
7447 return VINF_SUCCESS;
7448
7449 case IEMMODE_32BIT:
7450 IEM_MC_BEGIN(3, 1);
7451 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7452 IEM_MC_ARG(uint32_t, u32Src, 1);
7453 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7455
7456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7457 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7458 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7459 if (pImpl->pfnLockedU16)
7460 IEMOP_HLP_DONE_DECODING();
7461 else
7462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7463 IEM_MC_FETCH_EFLAGS(EFlags);
7464 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7465 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7466 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7467 else
7468 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7469 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7470
7471 IEM_MC_COMMIT_EFLAGS(EFlags);
7472 IEM_MC_ADVANCE_RIP();
7473 IEM_MC_END();
7474 return VINF_SUCCESS;
7475
7476 case IEMMODE_64BIT:
7477 IEM_MC_BEGIN(3, 1);
7478 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7479 IEM_MC_ARG(uint64_t, u64Src, 1);
7480 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7482
7483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7484 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7485 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7486 if (pImpl->pfnLockedU16)
7487 IEMOP_HLP_DONE_DECODING();
7488 else
7489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7490 IEM_MC_FETCH_EFLAGS(EFlags);
7491 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7492 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7493 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7494 else
7495 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7496 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7497
7498 IEM_MC_COMMIT_EFLAGS(EFlags);
7499 IEM_MC_ADVANCE_RIP();
7500 IEM_MC_END();
7501 return VINF_SUCCESS;
7502
7503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7504 }
7505 }
7506}
7507
7508
7509/** Opcode 0x0f 0xbb. */
7510FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7511{
7512 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7513 IEMOP_HLP_MIN_386();
7514 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7515}
7516
7517
7518/** Opcode 0x0f 0xbc. */
7519FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7520{
7521 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7522 IEMOP_HLP_MIN_386();
7523 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7524 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7525}
7526
7527
7528/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7529FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7530
7531
7532/** Opcode 0x0f 0xbd. */
7533FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7534{
7535 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7536 IEMOP_HLP_MIN_386();
7537 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7538 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7539}
7540
7541
7542/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7543FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7544
7545
7546/** Opcode 0x0f 0xbe. */
7547FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7548{
7549 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7550 IEMOP_HLP_MIN_386();
7551
7552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7553
7554 /*
7555 * If rm is denoting a register, no more instruction bytes.
7556 */
7557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7558 {
7559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7560 switch (pVCpu->iem.s.enmEffOpSize)
7561 {
7562 case IEMMODE_16BIT:
7563 IEM_MC_BEGIN(0, 1);
7564 IEM_MC_LOCAL(uint16_t, u16Value);
7565 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7566 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7567 IEM_MC_ADVANCE_RIP();
7568 IEM_MC_END();
7569 return VINF_SUCCESS;
7570
7571 case IEMMODE_32BIT:
7572 IEM_MC_BEGIN(0, 1);
7573 IEM_MC_LOCAL(uint32_t, u32Value);
7574 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7575 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7576 IEM_MC_ADVANCE_RIP();
7577 IEM_MC_END();
7578 return VINF_SUCCESS;
7579
7580 case IEMMODE_64BIT:
7581 IEM_MC_BEGIN(0, 1);
7582 IEM_MC_LOCAL(uint64_t, u64Value);
7583 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7584 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7585 IEM_MC_ADVANCE_RIP();
7586 IEM_MC_END();
7587 return VINF_SUCCESS;
7588
7589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7590 }
7591 }
7592 else
7593 {
7594 /*
7595 * We're loading a register from memory.
7596 */
7597 switch (pVCpu->iem.s.enmEffOpSize)
7598 {
7599 case IEMMODE_16BIT:
7600 IEM_MC_BEGIN(0, 2);
7601 IEM_MC_LOCAL(uint16_t, u16Value);
7602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7605 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7606 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7607 IEM_MC_ADVANCE_RIP();
7608 IEM_MC_END();
7609 return VINF_SUCCESS;
7610
7611 case IEMMODE_32BIT:
7612 IEM_MC_BEGIN(0, 2);
7613 IEM_MC_LOCAL(uint32_t, u32Value);
7614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7617 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7618 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7619 IEM_MC_ADVANCE_RIP();
7620 IEM_MC_END();
7621 return VINF_SUCCESS;
7622
7623 case IEMMODE_64BIT:
7624 IEM_MC_BEGIN(0, 2);
7625 IEM_MC_LOCAL(uint64_t, u64Value);
7626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7629 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7630 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7631 IEM_MC_ADVANCE_RIP();
7632 IEM_MC_END();
7633 return VINF_SUCCESS;
7634
7635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7636 }
7637 }
7638}
7639
7640
7641/** Opcode 0x0f 0xbf. */
7642FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7643{
7644 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7645 IEMOP_HLP_MIN_386();
7646
7647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7648
7649 /** @todo Not entirely sure how the operand size prefix is handled here,
7650 * assuming that it will be ignored. Would be nice to have a few
7651 * test for this. */
7652 /*
7653 * If rm is denoting a register, no more instruction bytes.
7654 */
7655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7656 {
7657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7658 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7659 {
7660 IEM_MC_BEGIN(0, 1);
7661 IEM_MC_LOCAL(uint32_t, u32Value);
7662 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7663 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7664 IEM_MC_ADVANCE_RIP();
7665 IEM_MC_END();
7666 }
7667 else
7668 {
7669 IEM_MC_BEGIN(0, 1);
7670 IEM_MC_LOCAL(uint64_t, u64Value);
7671 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7672 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7673 IEM_MC_ADVANCE_RIP();
7674 IEM_MC_END();
7675 }
7676 }
7677 else
7678 {
7679 /*
7680 * We're loading a register from memory.
7681 */
7682 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7683 {
7684 IEM_MC_BEGIN(0, 2);
7685 IEM_MC_LOCAL(uint32_t, u32Value);
7686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7689 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7690 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7691 IEM_MC_ADVANCE_RIP();
7692 IEM_MC_END();
7693 }
7694 else
7695 {
7696 IEM_MC_BEGIN(0, 2);
7697 IEM_MC_LOCAL(uint64_t, u64Value);
7698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7701 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7702 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7703 IEM_MC_ADVANCE_RIP();
7704 IEM_MC_END();
7705 }
7706 }
7707 return VINF_SUCCESS;
7708}
7709
7710
7711/** Opcode 0x0f 0xc0. */
7712FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7713{
7714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7715 IEMOP_HLP_MIN_486();
7716 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7717
7718 /*
7719 * If rm is denoting a register, no more instruction bytes.
7720 */
7721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7722 {
7723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7724
7725 IEM_MC_BEGIN(3, 0);
7726 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7727 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7728 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7729
7730 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7731 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7732 IEM_MC_REF_EFLAGS(pEFlags);
7733 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7734
7735 IEM_MC_ADVANCE_RIP();
7736 IEM_MC_END();
7737 }
7738 else
7739 {
7740 /*
7741 * We're accessing memory.
7742 */
7743 IEM_MC_BEGIN(3, 3);
7744 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7745 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7746 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7747 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7749
7750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7751 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7752 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7753 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7754 IEM_MC_FETCH_EFLAGS(EFlags);
7755 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7756 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7757 else
7758 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7759
7760 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7761 IEM_MC_COMMIT_EFLAGS(EFlags);
7762 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7763 IEM_MC_ADVANCE_RIP();
7764 IEM_MC_END();
7765 return VINF_SUCCESS;
7766 }
7767 return VINF_SUCCESS;
7768}
7769
7770
7771/** Opcode 0x0f 0xc1. */
7772FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7773{
7774 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7775 IEMOP_HLP_MIN_486();
7776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7777
7778 /*
7779 * If rm is denoting a register, no more instruction bytes.
7780 */
7781 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7782 {
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784
7785 switch (pVCpu->iem.s.enmEffOpSize)
7786 {
7787 case IEMMODE_16BIT:
7788 IEM_MC_BEGIN(3, 0);
7789 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7790 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7791 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7792
7793 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7794 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7795 IEM_MC_REF_EFLAGS(pEFlags);
7796 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7797
7798 IEM_MC_ADVANCE_RIP();
7799 IEM_MC_END();
7800 return VINF_SUCCESS;
7801
7802 case IEMMODE_32BIT:
7803 IEM_MC_BEGIN(3, 0);
7804 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7805 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7806 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7807
7808 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7809 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7810 IEM_MC_REF_EFLAGS(pEFlags);
7811 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7812
7813 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7814 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7815 IEM_MC_ADVANCE_RIP();
7816 IEM_MC_END();
7817 return VINF_SUCCESS;
7818
7819 case IEMMODE_64BIT:
7820 IEM_MC_BEGIN(3, 0);
7821 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7822 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7823 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7824
7825 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7826 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7827 IEM_MC_REF_EFLAGS(pEFlags);
7828 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7829
7830 IEM_MC_ADVANCE_RIP();
7831 IEM_MC_END();
7832 return VINF_SUCCESS;
7833
7834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7835 }
7836 }
7837 else
7838 {
7839 /*
7840 * We're accessing memory.
7841 */
7842 switch (pVCpu->iem.s.enmEffOpSize)
7843 {
7844 case IEMMODE_16BIT:
7845 IEM_MC_BEGIN(3, 3);
7846 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7847 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7848 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7849 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7851
7852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7853 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7854 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7855 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7856 IEM_MC_FETCH_EFLAGS(EFlags);
7857 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7858 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7859 else
7860 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7861
7862 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7863 IEM_MC_COMMIT_EFLAGS(EFlags);
7864 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7865 IEM_MC_ADVANCE_RIP();
7866 IEM_MC_END();
7867 return VINF_SUCCESS;
7868
7869 case IEMMODE_32BIT:
7870 IEM_MC_BEGIN(3, 3);
7871 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7872 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7873 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7874 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7876
7877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7878 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7879 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7880 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7881 IEM_MC_FETCH_EFLAGS(EFlags);
7882 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7883 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7884 else
7885 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7886
7887 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7888 IEM_MC_COMMIT_EFLAGS(EFlags);
7889 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7890 IEM_MC_ADVANCE_RIP();
7891 IEM_MC_END();
7892 return VINF_SUCCESS;
7893
7894 case IEMMODE_64BIT:
7895 IEM_MC_BEGIN(3, 3);
7896 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7897 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7898 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7899 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7901
7902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7903 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7904 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7905 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7906 IEM_MC_FETCH_EFLAGS(EFlags);
7907 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7908 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7909 else
7910 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7911
7912 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7913 IEM_MC_COMMIT_EFLAGS(EFlags);
7914 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7915 IEM_MC_ADVANCE_RIP();
7916 IEM_MC_END();
7917 return VINF_SUCCESS;
7918
7919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7920 }
7921 }
7922}
7923
7924
7925/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7926FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7927/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7928FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7929/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7930FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7931/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7932FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7933
7934
7935/** Opcode 0x0f 0xc3. */
7936FNIEMOP_DEF(iemOp_movnti_My_Gy)
7937{
7938 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7939
7940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7941
7942 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7943 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7944 {
7945 switch (pVCpu->iem.s.enmEffOpSize)
7946 {
7947 case IEMMODE_32BIT:
7948 IEM_MC_BEGIN(0, 2);
7949 IEM_MC_LOCAL(uint32_t, u32Value);
7950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7951
7952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7954 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7955 return IEMOP_RAISE_INVALID_OPCODE();
7956
7957 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7958 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 break;
7962
7963 case IEMMODE_64BIT:
7964 IEM_MC_BEGIN(0, 2);
7965 IEM_MC_LOCAL(uint64_t, u64Value);
7966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7967
7968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7970 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7971 return IEMOP_RAISE_INVALID_OPCODE();
7972
7973 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7974 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7975 IEM_MC_ADVANCE_RIP();
7976 IEM_MC_END();
7977 break;
7978
7979 case IEMMODE_16BIT:
7980 /** @todo check this form. */
7981 return IEMOP_RAISE_INVALID_OPCODE();
7982 }
7983 }
7984 else
7985 return IEMOP_RAISE_INVALID_OPCODE();
7986 return VINF_SUCCESS;
7987}
7988/* Opcode 0x66 0x0f 0xc3 - invalid */
7989/* Opcode 0xf3 0x0f 0xc3 - invalid */
7990/* Opcode 0xf2 0x0f 0xc3 - invalid */
7991
7992/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7993FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7994/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7995FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7996/* Opcode 0xf3 0x0f 0xc4 - invalid */
7997/* Opcode 0xf2 0x0f 0xc4 - invalid */
7998
7999/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8000FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8001/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8002FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8003/* Opcode 0xf3 0x0f 0xc5 - invalid */
8004/* Opcode 0xf2 0x0f 0xc5 - invalid */
8005
8006/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8007FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8008/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8009FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8010/* Opcode 0xf3 0x0f 0xc6 - invalid */
8011/* Opcode 0xf2 0x0f 0xc6 - invalid */
8012
8013
8014/** Opcode 0x0f 0xc7 !11/1. */
8015FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8016{
8017 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8018
8019 IEM_MC_BEGIN(4, 3);
8020 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8021 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8022 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8023 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8024 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8025 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8027
8028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8029 IEMOP_HLP_DONE_DECODING();
8030 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8031
8032 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8033 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8034 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8035
8036 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8037 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8038 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8039
8040 IEM_MC_FETCH_EFLAGS(EFlags);
8041 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8042 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8043 else
8044 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8045
8046 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8047 IEM_MC_COMMIT_EFLAGS(EFlags);
8048 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8049 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8050 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8051 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8052 IEM_MC_ENDIF();
8053 IEM_MC_ADVANCE_RIP();
8054
8055 IEM_MC_END();
8056 return VINF_SUCCESS;
8057}
8058
8059
8060/** Opcode REX.W 0x0f 0xc7 !11/1. */
8061FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8062{
8063 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8064 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8065 {
8066#if 0
8067 RT_NOREF(bRm);
8068 IEMOP_BITCH_ABOUT_STUB();
8069 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8070#else
8071 IEM_MC_BEGIN(4, 3);
8072 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8073 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8074 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8075 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8076 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8077 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8079
8080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8081 IEMOP_HLP_DONE_DECODING();
8082 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8083 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8084
8085 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8086 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8087 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8088
8089 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8090 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8091 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8092
8093 IEM_MC_FETCH_EFLAGS(EFlags);
8094# ifdef RT_ARCH_AMD64
8095 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8096 {
8097 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8098 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8099 else
8100 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8101 }
8102 else
8103# endif
8104 {
8105 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8106 accesses and not all all atomic, which works fine on in UNI CPU guest
8107 configuration (ignoring DMA). If guest SMP is active we have no choice
8108 but to use a rendezvous callback here. Sigh. */
8109 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8110 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8111 else
8112 {
8113 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8114 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8115 }
8116 }
8117
8118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8119 IEM_MC_COMMIT_EFLAGS(EFlags);
8120 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8121 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8122 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8123 IEM_MC_ENDIF();
8124 IEM_MC_ADVANCE_RIP();
8125
8126 IEM_MC_END();
8127 return VINF_SUCCESS;
8128#endif
8129 }
8130 Log(("cmpxchg16b -> #UD\n"));
8131 return IEMOP_RAISE_INVALID_OPCODE();
8132}
8133
8134FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8135{
8136 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8137 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8138 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8139}
8140
8141/** Opcode 0x0f 0xc7 11/6. */
8142FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8143
8144/** Opcode 0x0f 0xc7 !11/6. */
8145FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8146
8147/** Opcode 0x66 0x0f 0xc7 !11/6. */
8148FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8149
8150/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8151FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8152
8153/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8154FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8155
8156/** Opcode 0x0f 0xc7 11/7. */
8157FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8158
8159
8160/**
8161 * Group 9 jump table for register variant.
8162 */
8163IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8164{ /* pfx: none, 066h, 0f3h, 0f2h */
8165 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8166 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8167 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8168 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8169 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8170 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8171 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8172 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8173};
8174AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8175
8176
8177/**
8178 * Group 9 jump table for memory variant.
8179 */
8180IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8181{ /* pfx: none, 066h, 0f3h, 0f2h */
8182 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8183 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8184 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8185 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8186 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8187 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8188 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8189 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8190};
8191AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8192
8193
8194/** Opcode 0x0f 0xc7. */
8195FNIEMOP_DEF(iemOp_Grp9)
8196{
8197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8199 /* register, register */
8200 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8201 + pVCpu->iem.s.idxPrefix], bRm);
8202 /* memory, register */
8203 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8204 + pVCpu->iem.s.idxPrefix], bRm);
8205}
8206
8207
8208/**
8209 * Common 'bswap register' helper.
8210 */
8211FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8212{
8213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8214 switch (pVCpu->iem.s.enmEffOpSize)
8215 {
8216 case IEMMODE_16BIT:
8217 IEM_MC_BEGIN(1, 0);
8218 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8219 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8220 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8221 IEM_MC_ADVANCE_RIP();
8222 IEM_MC_END();
8223 return VINF_SUCCESS;
8224
8225 case IEMMODE_32BIT:
8226 IEM_MC_BEGIN(1, 0);
8227 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8228 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8229 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8230 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8231 IEM_MC_ADVANCE_RIP();
8232 IEM_MC_END();
8233 return VINF_SUCCESS;
8234
8235 case IEMMODE_64BIT:
8236 IEM_MC_BEGIN(1, 0);
8237 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8238 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8239 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8240 IEM_MC_ADVANCE_RIP();
8241 IEM_MC_END();
8242 return VINF_SUCCESS;
8243
8244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8245 }
8246}
8247
8248
8249/** Opcode 0x0f 0xc8. */
8250FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8251{
8252 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8253 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8254 prefix. REX.B is the correct prefix it appears. For a parallel
8255 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8256 IEMOP_HLP_MIN_486();
8257 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8258}
8259
8260
8261/** Opcode 0x0f 0xc9. */
8262FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8263{
8264 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8265 IEMOP_HLP_MIN_486();
8266 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8267}
8268
8269
8270/** Opcode 0x0f 0xca. */
8271FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8272{
8273 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8274 IEMOP_HLP_MIN_486();
8275 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8276}
8277
8278
8279/** Opcode 0x0f 0xcb. */
8280FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8281{
8282 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8283 IEMOP_HLP_MIN_486();
8284 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8285}
8286
8287
8288/** Opcode 0x0f 0xcc. */
8289FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8290{
8291 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8292 IEMOP_HLP_MIN_486();
8293 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8294}
8295
8296
8297/** Opcode 0x0f 0xcd. */
8298FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8299{
8300 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8301 IEMOP_HLP_MIN_486();
8302 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8303}
8304
8305
8306/** Opcode 0x0f 0xce. */
8307FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8308{
8309 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8310 IEMOP_HLP_MIN_486();
8311 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8312}
8313
8314
8315/** Opcode 0x0f 0xcf. */
8316FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8317{
8318 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8319 IEMOP_HLP_MIN_486();
8320 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8321}
8322
8323
8324/* Opcode 0x0f 0xd0 - invalid */
8325/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8326FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8327/* Opcode 0xf3 0x0f 0xd0 - invalid */
8328/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8329FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8330
8331/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8332FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8333/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8334FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8335/* Opcode 0xf3 0x0f 0xd1 - invalid */
8336/* Opcode 0xf2 0x0f 0xd1 - invalid */
8337
8338/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8339FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8340/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8341FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8342/* Opcode 0xf3 0x0f 0xd2 - invalid */
8343/* Opcode 0xf2 0x0f 0xd2 - invalid */
8344
8345/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8346FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8347/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8348FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8349/* Opcode 0xf3 0x0f 0xd3 - invalid */
8350/* Opcode 0xf2 0x0f 0xd3 - invalid */
8351
8352/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8353FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8354/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8355FNIEMOP_STUB(iemOp_paddq_Vx_W);
8356/* Opcode 0xf3 0x0f 0xd4 - invalid */
8357/* Opcode 0xf2 0x0f 0xd4 - invalid */
8358
8359/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8360FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8361/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8362FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8363/* Opcode 0xf3 0x0f 0xd5 - invalid */
8364/* Opcode 0xf2 0x0f 0xd5 - invalid */
8365
8366/* Opcode 0x0f 0xd6 - invalid */
8367
8368/**
8369 * @opcode 0xd6
8370 * @oppfx 0x66
8371 * @opcpuid sse2
8372 * @opgroup og_sse2_pcksclr_datamove
8373 * @opxcpttype none
8374 * @optest op1=-1 op2=2 -> op1=2
8375 * @optest op1=0 op2=-42 -> op1=-42
8376 */
8377FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8378{
8379 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8381 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8382 {
8383 /*
8384 * Register, register.
8385 */
8386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8387 IEM_MC_BEGIN(0, 2);
8388 IEM_MC_LOCAL(uint64_t, uSrc);
8389
8390 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8391 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8392
8393 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8394 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8395
8396 IEM_MC_ADVANCE_RIP();
8397 IEM_MC_END();
8398 }
8399 else
8400 {
8401 /*
8402 * Memory, register.
8403 */
8404 IEM_MC_BEGIN(0, 2);
8405 IEM_MC_LOCAL(uint64_t, uSrc);
8406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8407
8408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8410 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8411 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8412
8413 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8414 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8415
8416 IEM_MC_ADVANCE_RIP();
8417 IEM_MC_END();
8418 }
8419 return VINF_SUCCESS;
8420}
8421
8422
8423/**
8424 * @opcode 0xd6
8425 * @opcodesub 11 mr/reg
8426 * @oppfx f3
8427 * @opcpuid sse2
8428 * @opgroup og_sse2_simdint_datamove
8429 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8430 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8431 */
8432FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8433{
8434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8435 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8436 {
8437 /*
8438 * Register, register.
8439 */
8440 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8442 IEM_MC_BEGIN(0, 1);
8443 IEM_MC_LOCAL(uint64_t, uSrc);
8444
8445 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8446 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8447
8448 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8449 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8450 IEM_MC_FPU_TO_MMX_MODE();
8451
8452 IEM_MC_ADVANCE_RIP();
8453 IEM_MC_END();
8454 return VINF_SUCCESS;
8455 }
8456
8457 /**
8458 * @opdone
8459 * @opmnemonic udf30fd6mem
8460 * @opcode 0xd6
8461 * @opcodesub !11 mr/reg
8462 * @oppfx f3
8463 * @opunused intel-modrm
8464 * @opcpuid sse
8465 * @optest ->
8466 */
8467 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8468}
8469
8470
8471/**
8472 * @opcode 0xd6
8473 * @opcodesub 11 mr/reg
8474 * @oppfx f2
8475 * @opcpuid sse2
8476 * @opgroup og_sse2_simdint_datamove
8477 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8478 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8479 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8480 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8481 * @optest op1=-42 op2=0xfedcba9876543210
8482 * -> op1=0xfedcba9876543210 ftw=0xff
8483 */
8484FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8485{
8486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8487 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8488 {
8489 /*
8490 * Register, register.
8491 */
8492 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
8493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8494 IEM_MC_BEGIN(0, 1);
8495 IEM_MC_LOCAL(uint64_t, uSrc);
8496
8497 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8498 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8499
8500 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8501 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8502 IEM_MC_FPU_TO_MMX_MODE();
8503
8504 IEM_MC_ADVANCE_RIP();
8505 IEM_MC_END();
8506 return VINF_SUCCESS;
8507 }
8508
8509 /**
8510 * @opdone
8511 * @opmnemonic udf20fd6mem
8512 * @opcode 0xd6
8513 * @opcodesub !11 mr/reg
8514 * @oppfx f2
8515 * @opunused intel-modrm
8516 * @opcpuid sse
8517 * @optest ->
8518 */
8519 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8520}
8521
8522/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8523FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8524{
8525 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8526 /** @todo testcase: Check that the instruction implicitly clears the high
8527 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8528 * and opcode modifications are made to work with the whole width (not
8529 * just 128). */
8530 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8531 /* Docs says register only. */
8532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8534 {
8535 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8536 IEM_MC_BEGIN(2, 0);
8537 IEM_MC_ARG(uint64_t *, pDst, 0);
8538 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8539 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8540 IEM_MC_PREPARE_FPU_USAGE();
8541 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8542 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8543 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8544 IEM_MC_ADVANCE_RIP();
8545 IEM_MC_END();
8546 return VINF_SUCCESS;
8547 }
8548 return IEMOP_RAISE_INVALID_OPCODE();
8549}
8550
8551/** Opcode 0x66 0x0f 0xd7 - */
8552FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8553{
8554 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8555 /** @todo testcase: Check that the instruction implicitly clears the high
8556 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8557 * and opcode modifications are made to work with the whole width (not
8558 * just 128). */
8559 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8560 /* Docs says register only. */
8561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8563 {
8564 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8565 IEM_MC_BEGIN(2, 0);
8566 IEM_MC_ARG(uint64_t *, pDst, 0);
8567 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8569 IEM_MC_PREPARE_SSE_USAGE();
8570 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8571 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8572 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8573 IEM_MC_ADVANCE_RIP();
8574 IEM_MC_END();
8575 return VINF_SUCCESS;
8576 }
8577 return IEMOP_RAISE_INVALID_OPCODE();
8578}
8579
8580/* Opcode 0xf3 0x0f 0xd7 - invalid */
8581/* Opcode 0xf2 0x0f 0xd7 - invalid */
8582
8583
8584/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8585FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8586/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8587FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8588/* Opcode 0xf3 0x0f 0xd8 - invalid */
8589/* Opcode 0xf2 0x0f 0xd8 - invalid */
8590
8591/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8592FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8593/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8594FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8595/* Opcode 0xf3 0x0f 0xd9 - invalid */
8596/* Opcode 0xf2 0x0f 0xd9 - invalid */
8597
8598/** Opcode 0x0f 0xda - pminub Pq, Qq */
8599FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8600/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8601FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8602/* Opcode 0xf3 0x0f 0xda - invalid */
8603/* Opcode 0xf2 0x0f 0xda - invalid */
8604
8605/** Opcode 0x0f 0xdb - pand Pq, Qq */
8606FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8607/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8608FNIEMOP_STUB(iemOp_pand_Vx_W);
8609/* Opcode 0xf3 0x0f 0xdb - invalid */
8610/* Opcode 0xf2 0x0f 0xdb - invalid */
8611
8612/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8613FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8614/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8615FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8616/* Opcode 0xf3 0x0f 0xdc - invalid */
8617/* Opcode 0xf2 0x0f 0xdc - invalid */
8618
8619/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8620FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8621/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8622FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8623/* Opcode 0xf3 0x0f 0xdd - invalid */
8624/* Opcode 0xf2 0x0f 0xdd - invalid */
8625
8626/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8627FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8628/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8629FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8630/* Opcode 0xf3 0x0f 0xde - invalid */
8631/* Opcode 0xf2 0x0f 0xde - invalid */
8632
8633/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8634FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8635/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8636FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8637/* Opcode 0xf3 0x0f 0xdf - invalid */
8638/* Opcode 0xf2 0x0f 0xdf - invalid */
8639
8640/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8641FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8642/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8643FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8644/* Opcode 0xf3 0x0f 0xe0 - invalid */
8645/* Opcode 0xf2 0x0f 0xe0 - invalid */
8646
8647/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8648FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8649/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8650FNIEMOP_STUB(iemOp_psraw_Vx_W);
8651/* Opcode 0xf3 0x0f 0xe1 - invalid */
8652/* Opcode 0xf2 0x0f 0xe1 - invalid */
8653
8654/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8655FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8656/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8657FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8658/* Opcode 0xf3 0x0f 0xe2 - invalid */
8659/* Opcode 0xf2 0x0f 0xe2 - invalid */
8660
8661/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8662FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8663/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8664FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8665/* Opcode 0xf3 0x0f 0xe3 - invalid */
8666/* Opcode 0xf2 0x0f 0xe3 - invalid */
8667
8668/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8669FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8670/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8671FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8672/* Opcode 0xf3 0x0f 0xe4 - invalid */
8673/* Opcode 0xf2 0x0f 0xe4 - invalid */
8674
8675/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8676FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8677/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8678FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8679/* Opcode 0xf3 0x0f 0xe5 - invalid */
8680/* Opcode 0xf2 0x0f 0xe5 - invalid */
8681
8682/* Opcode 0x0f 0xe6 - invalid */
8683/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8684FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8685/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8686FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8687/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8688FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8689
8690
8691/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8692FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8693{
8694 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8696 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8697 {
8698 /* Register, memory. */
8699 IEM_MC_BEGIN(0, 2);
8700 IEM_MC_LOCAL(uint64_t, uSrc);
8701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8702
8703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8705 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8706 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8707
8708 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8709 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8710
8711 IEM_MC_ADVANCE_RIP();
8712 IEM_MC_END();
8713 return VINF_SUCCESS;
8714 }
8715 /* The register, register encoding is invalid. */
8716 return IEMOP_RAISE_INVALID_OPCODE();
8717}
8718
8719/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8720FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8721{
8722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8723 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8724 {
8725 /* Register, memory. */
8726 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8727 IEM_MC_BEGIN(0, 2);
8728 IEM_MC_LOCAL(RTUINT128U, uSrc);
8729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8730
8731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8733 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8734 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8735
8736 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8737 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8738
8739 IEM_MC_ADVANCE_RIP();
8740 IEM_MC_END();
8741 return VINF_SUCCESS;
8742 }
8743
8744 /* The register, register encoding is invalid. */
8745 return IEMOP_RAISE_INVALID_OPCODE();
8746}
8747
8748/* Opcode 0xf3 0x0f 0xe7 - invalid */
8749/* Opcode 0xf2 0x0f 0xe7 - invalid */
8750
8751
8752/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8753FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8754/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8755FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8756/* Opcode 0xf3 0x0f 0xe8 - invalid */
8757/* Opcode 0xf2 0x0f 0xe8 - invalid */
8758
8759/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8760FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8761/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8762FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8763/* Opcode 0xf3 0x0f 0xe9 - invalid */
8764/* Opcode 0xf2 0x0f 0xe9 - invalid */
8765
8766/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8767FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8768/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8769FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8770/* Opcode 0xf3 0x0f 0xea - invalid */
8771/* Opcode 0xf2 0x0f 0xea - invalid */
8772
8773/** Opcode 0x0f 0xeb - por Pq, Qq */
8774FNIEMOP_STUB(iemOp_por_Pq_Qq);
8775/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8776FNIEMOP_STUB(iemOp_por_Vx_W);
8777/* Opcode 0xf3 0x0f 0xeb - invalid */
8778/* Opcode 0xf2 0x0f 0xeb - invalid */
8779
8780/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8781FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8782/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8783FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8784/* Opcode 0xf3 0x0f 0xec - invalid */
8785/* Opcode 0xf2 0x0f 0xec - invalid */
8786
8787/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8788FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8789/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8790FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8791/* Opcode 0xf3 0x0f 0xed - invalid */
8792/* Opcode 0xf2 0x0f 0xed - invalid */
8793
8794/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8795FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8796/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8797FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8798/* Opcode 0xf3 0x0f 0xee - invalid */
8799/* Opcode 0xf2 0x0f 0xee - invalid */
8800
8801
8802/** Opcode 0x0f 0xef - pxor Pq, Qq */
8803FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8804{
8805 IEMOP_MNEMONIC(pxor, "pxor");
8806 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8807}
8808
8809/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8810FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8811{
8812 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8813 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8814}
8815
8816/* Opcode 0xf3 0x0f 0xef - invalid */
8817/* Opcode 0xf2 0x0f 0xef - invalid */
8818
8819/* Opcode 0x0f 0xf0 - invalid */
8820/* Opcode 0x66 0x0f 0xf0 - invalid */
8821/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8822FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8823
8824/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8825FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8826/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8827FNIEMOP_STUB(iemOp_psllw_Vx_W);
8828/* Opcode 0xf2 0x0f 0xf1 - invalid */
8829
8830/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8831FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8832/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8833FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8834/* Opcode 0xf2 0x0f 0xf2 - invalid */
8835
8836/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8837FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8838/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8839FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8840/* Opcode 0xf2 0x0f 0xf3 - invalid */
8841
8842/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8843FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8844/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8845FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8846/* Opcode 0xf2 0x0f 0xf4 - invalid */
8847
8848/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8849FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8850/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8851FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8852/* Opcode 0xf2 0x0f 0xf5 - invalid */
8853
8854/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8855FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8856/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8857FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8858/* Opcode 0xf2 0x0f 0xf6 - invalid */
8859
8860/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8861FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8862/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8863FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8864/* Opcode 0xf2 0x0f 0xf7 - invalid */
8865
8866/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8867FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8868/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8869FNIEMOP_STUB(iemOp_psubb_Vx_W);
8870/* Opcode 0xf2 0x0f 0xf8 - invalid */
8871
8872/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8873FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8874/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8875FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8876/* Opcode 0xf2 0x0f 0xf9 - invalid */
8877
8878/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8879FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8880/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8881FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8882/* Opcode 0xf2 0x0f 0xfa - invalid */
8883
8884/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8885FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8886/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8887FNIEMOP_STUB(iemOp_psubq_Vx_W);
8888/* Opcode 0xf2 0x0f 0xfb - invalid */
8889
8890/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8891FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8892/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8893FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8894/* Opcode 0xf2 0x0f 0xfc - invalid */
8895
8896/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8897FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8898/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8899FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8900/* Opcode 0xf2 0x0f 0xfd - invalid */
8901
8902/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8903FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8904/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8905FNIEMOP_STUB(iemOp_paddd_Vx_W);
8906/* Opcode 0xf2 0x0f 0xfe - invalid */
8907
8908
8909/** Opcode **** 0x0f 0xff - UD0 */
8910FNIEMOP_DEF(iemOp_ud0)
8911{
8912 IEMOP_MNEMONIC(ud0, "ud0");
8913 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8914 {
8915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8916#ifndef TST_IEM_CHECK_MC
8917 RTGCPTR GCPtrEff;
8918 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8919 if (rcStrict != VINF_SUCCESS)
8920 return rcStrict;
8921#endif
8922 IEMOP_HLP_DONE_DECODING();
8923 }
8924 return IEMOP_RAISE_INVALID_OPCODE();
8925}
8926
8927
8928
8929/**
8930 * Two byte opcode map, first byte 0x0f.
8931 *
8932 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8933 * check if it needs updating as well when making changes.
8934 */
8935IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8936{
8937 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8938 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8939 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8940 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8941 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8942 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8943 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8944 /* 0x06 */ IEMOP_X4(iemOp_clts),
8945 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8946 /* 0x08 */ IEMOP_X4(iemOp_invd),
8947 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8948 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8949 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8950 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8951 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8952 /* 0x0e */ IEMOP_X4(iemOp_femms),
8953 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8954
8955 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
8956 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8957 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8958 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8959 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8960 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8961 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
8962 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8963 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8964 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8965 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8966 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8967 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8968 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8969 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8970 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8971
8972 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8973 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8974 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8975 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8976 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8977 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8978 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8979 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8980 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8981 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8982 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8983 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8984 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8985 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8986 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8987 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8988
8989 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8990 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8991 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8992 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8993 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8994 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8995 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8996 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8997 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8998 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8999 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9000 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9001 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9002 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9003 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9004 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9005
9006 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9007 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9008 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9009 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9010 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9011 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9012 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9013 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9014 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9015 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9016 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9017 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9018 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9019 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9020 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9021 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9022
9023 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9024 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9025 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9026 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9027 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9028 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9029 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9030 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9031 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9032 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9033 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9034 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9035 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9036 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9037 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9038 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9039
9040 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9041 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9042 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9043 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9044 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9045 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9046 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9047 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9048 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9049 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9050 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9051 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9052 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9053 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9054 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9055 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
9056
9057 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9058 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9059 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9060 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9061 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9062 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9063 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9064 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9065
9066 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9067 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9068 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9069 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9070 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9071 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9072 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9073 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9074
9075 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9076 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9077 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9078 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9079 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9080 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9081 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9082 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9083 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9084 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9085 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9086 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9087 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9088 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9089 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9090 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9091
9092 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9093 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9094 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9095 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9096 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9097 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9098 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9099 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9100 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9101 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9102 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9103 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9104 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9105 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9106 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9107 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9108
9109 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9110 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9111 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9112 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9113 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9114 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9115 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9116 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9117 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9118 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9119 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9120 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9121 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9122 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9123 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9124 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9125
9126 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9127 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9128 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9129 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9130 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9131 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9132 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9133 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9134 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9135 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9136 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9137 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9138 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9139 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9140 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9141 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9142
9143 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9144 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9145 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9146 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9147 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9148 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9149 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9150 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9151 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9152 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9153 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9154 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9155 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9156 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9157 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9158 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9159
9160 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9161 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9162 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9163 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9164 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9165 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9166 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9167 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9168 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9169 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9170 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9171 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9172 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9173 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9174 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9175 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9176
9177 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9178 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9179 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9180 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9181 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9182 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9183 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9184 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9185 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9186 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9187 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9188 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9189 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9190 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9191 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9192 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9193
9194 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9195 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9196 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9197 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9198 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9199 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9200 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9201 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9202 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9203 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9204 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9205 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9206 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9207 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9208 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9209 /* 0xff */ IEMOP_X4(iemOp_ud0),
9210};
9211AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9212
9213/** @} */
9214
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette