VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 67003

最後變更 在這個檔案從67003是 67003,由 vboxsync 提交於 8 年 前

IEM: movq Pq,Eq & movd Pd,Ed docs+tests+fixes.

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 325.4 KB
 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 67003 2017-05-22 10:03:15Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.alldomusa.eu.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 */
1933FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1934{
1935 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1938 {
1939 /*
1940 * Register, register.
1941 */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_BEGIN(2, 0);
1944 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1945 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1946
1947 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1948 IEM_MC_PREPARE_SSE_USAGE();
1949
1950 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1952 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1953
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 }
1957 else
1958 {
1959 /*
1960 * Register, memory.
1961 */
1962 IEM_MC_BEGIN(2, 2);
1963 IEM_MC_LOCAL(RTUINT128U, uSrc);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1965 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1971 IEM_MC_PREPARE_SSE_USAGE();
1972
1973 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1974 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1975 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1976
1977 IEM_MC_ADVANCE_RIP();
1978 IEM_MC_END();
1979 }
1980 return VINF_SUCCESS;
1981}
1982
1983/**
1984 * @opdone
1985 * @opmnemonic udf30f16
1986 * @opcode 0x16
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994
1995/**
1996 * @opcode 0x17
1997 * @opcodesub !11 mr/reg
1998 * @oppfx none
1999 * @opcpuid sse
2000 * @opgroup og_sse_simdfp_datamove
2001 * @opxcpttype 5
2002 * @optest op1=1 op2=2 -> op1=2
2003 * @optest op1=0 op2=-42 -> op1=-42
2004 */
2005FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2006{
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2009 {
2010 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2011
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(uint64_t, uSrc);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 return VINF_SUCCESS;
2027 }
2028
2029 /**
2030 * @opdone
2031 * @opmnemonic ud0f17m3
2032 * @opcode 0x17
2033 * @opcodesub 11 mr/reg
2034 * @oppfx none
2035 * @opunused immediate
2036 * @opcpuid sse
2037 * @optest ->
2038 */
2039 return IEMOP_RAISE_INVALID_OPCODE();
2040}
2041
2042
2043/**
2044 * @opcode 0x17
2045 * @opcodesub !11 mr/reg
2046 * @oppfx 0x66
2047 * @opcpuid sse2
2048 * @opgroup og_sse2_pcksclr_datamove
2049 * @opxcpttype 5
2050 * @optest op1=1 op2=2 -> op1=2
2051 * @optest op1=0 op2=-42 -> op1=-42
2052 */
2053FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2054{
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2059
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(uint64_t, uSrc);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2068
2069 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075 }
2076
2077 /**
2078 * @opdone
2079 * @opmnemonic ud660f17m3
2080 * @opcode 0x17
2081 * @opcodesub 11 mr/reg
2082 * @oppfx 0x66
2083 * @opunused immediate
2084 * @opcpuid sse
2085 * @optest ->
2086 */
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/**
2092 * @opdone
2093 * @opmnemonic udf30f17
2094 * @opcode 0x17
2095 * @oppfx 0xf3
2096 * @opunused intel-modrm
2097 * @opcpuid sse
2098 * @optest ->
2099 * @opdone
2100 */
2101
2102/**
2103 * @opmnemonic udf20f17
2104 * @opcode 0x17
2105 * @oppfx 0xf2
2106 * @opunused intel-modrm
2107 * @opcpuid sse
2108 * @optest ->
2109 * @opdone
2110 */
2111
2112
2113/** Opcode 0x0f 0x18. */
2114FNIEMOP_DEF(iemOp_prefetch_Grp16)
2115{
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2118 {
2119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2120 {
2121 case 4: /* Aliased to /0 for the time being according to AMD. */
2122 case 5: /* Aliased to /0 for the time being according to AMD. */
2123 case 6: /* Aliased to /0 for the time being according to AMD. */
2124 case 7: /* Aliased to /0 for the time being according to AMD. */
2125 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2126 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2127 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2128 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2130 }
2131
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2136 /* Currently a NOP. */
2137 NOREF(GCPtrEffSrc);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 return VINF_SUCCESS;
2141 }
2142
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/** Opcode 0x0f 0x19..0x1f. */
2148FNIEMOP_DEF(iemOp_nop_Ev)
2149{
2150 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2153 {
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_ADVANCE_RIP();
2157 IEM_MC_END();
2158 }
2159 else
2160 {
2161 IEM_MC_BEGIN(0, 1);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 /* Currently a NOP. */
2166 NOREF(GCPtrEffSrc);
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x20. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2176{
2177 /* mod is ignored, as is operand size overrides. */
2178 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2179 IEMOP_HLP_MIN_386();
2180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2181 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2182 else
2183 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2184
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2188 {
2189 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2190 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2191 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2192 iCrReg |= 8;
2193 }
2194 switch (iCrReg)
2195 {
2196 case 0: case 2: case 3: case 4: case 8:
2197 break;
2198 default:
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200 }
2201 IEMOP_HLP_DONE_DECODING();
2202
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2204}
2205
2206
2207/** Opcode 0x0f 0x21. */
2208FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2209{
2210 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2211 IEMOP_HLP_MIN_386();
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2217 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2218 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2219}
2220
2221
2222/** Opcode 0x0f 0x22. */
2223FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2224{
2225 /* mod is ignored, as is operand size overrides. */
2226 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2227 IEMOP_HLP_MIN_386();
2228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2230 else
2231 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2232
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2236 {
2237 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2238 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2239 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2240 iCrReg |= 8;
2241 }
2242 switch (iCrReg)
2243 {
2244 case 0: case 2: case 3: case 4: case 8:
2245 break;
2246 default:
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 }
2249 IEMOP_HLP_DONE_DECODING();
2250
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2252}
2253
2254
2255/** Opcode 0x0f 0x23. */
2256FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2257{
2258 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2259 IEMOP_HLP_MIN_386();
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2265 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2266 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2267}
2268
2269
2270/** Opcode 0x0f 0x24. */
2271FNIEMOP_DEF(iemOp_mov_Rd_Td)
2272{
2273 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2274 /** @todo works on 386 and 486. */
2275 /* The RM byte is not considered, see testcase. */
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277}
2278
2279
2280/** Opcode 0x0f 0x26. */
2281FNIEMOP_DEF(iemOp_mov_Td_Rd)
2282{
2283 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2284 /** @todo works on 386 and 486. */
2285 /* The RM byte is not considered, see testcase. */
2286 return IEMOP_RAISE_INVALID_OPCODE();
2287}
2288
2289
2290/**
2291 * @opcode 0x28
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 1
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2300{
2301 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2304 {
2305 /*
2306 * Register, register.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2313 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2314 IEM_MC_ADVANCE_RIP();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2332 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 return VINF_SUCCESS;
2338}
2339
2340/**
2341 * @opcode 0x28
2342 * @oppfx 66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 1
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2350{
2351 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2363 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * Register, memory.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380
2381 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2382 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP();
2385 IEM_MC_END();
2386 }
2387 return VINF_SUCCESS;
2388}
2389
2390/* Opcode 0xf3 0x0f 0x28 - invalid */
2391/* Opcode 0xf2 0x0f 0x28 - invalid */
2392
2393/**
2394 * @opcode 0x29
2395 * @oppfx none
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 1
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-42 -> op1=-42
2401 */
2402FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2407 {
2408 /*
2409 * Register, register.
2410 */
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEM_MC_BEGIN(0, 0);
2413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2415 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2416 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2417 IEM_MC_ADVANCE_RIP();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * Memory, register.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443/**
2444 * @opcode 0x29
2445 * @oppfx 66
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 1
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2457 {
2458 /*
2459 * Register, register.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_BEGIN(0, 0);
2463 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2464 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2465 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2466 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 else
2471 {
2472 /*
2473 * Memory, register.
2474 */
2475 IEM_MC_BEGIN(0, 2);
2476 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2483
2484 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 return VINF_SUCCESS;
2491}
2492
2493/* Opcode 0xf3 0x0f 0x29 - invalid */
2494/* Opcode 0xf2 0x0f 0x29 - invalid */
2495
2496
2497/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2498FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2499/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2500FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2501/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2502FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2503/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2504FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2505
2506
2507/**
2508 * @opcode 0x2b
2509 * @opcodesub !11 mr/reg
2510 * @oppfx none
2511 * @opcpuid sse
2512 * @opgroup og_sse1_cachect
2513 * @opxcpttype 1
2514 * @optest op1=1 op2=2 -> op1=2
2515 * @optest op1=0 op2=-42 -> op1=-42
2516 */
2517FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2518{
2519 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2522 {
2523 /*
2524 * memory, register.
2525 */
2526 IEM_MC_BEGIN(0, 2);
2527 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529
2530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2534
2535 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2536 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 /* The register, register encoding is invalid. */
2542 else
2543 return IEMOP_RAISE_INVALID_OPCODE();
2544 return VINF_SUCCESS;
2545}
2546
2547/**
2548 * @opcode 0x2b
2549 * @opcodesub !11 mr/reg
2550 * @oppfx 0x66
2551 * @opcpuid sse2
2552 * @opgroup og_sse2_cachect
2553 * @opxcpttype 1
2554 * @optest op1=1 op2=2 -> op1=2
2555 * @optest op1=0 op2=-42 -> op1=-42
2556 */
2557FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2558{
2559 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2561 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2562 {
2563 /*
2564 * memory, register.
2565 */
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2576 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 /* The register, register encoding is invalid. */
2582 else
2583 return IEMOP_RAISE_INVALID_OPCODE();
2584 return VINF_SUCCESS;
2585}
2586/* Opcode 0xf3 0x0f 0x2b - invalid */
2587/* Opcode 0xf2 0x0f 0x2b - invalid */
2588
2589
2590/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2591FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2592/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2593FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2594/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2595FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2596/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2597FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2598
2599/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2600FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2601/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2602FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2603/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2604FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2605/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2606FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2607
2608/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2609FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2610/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2611FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2612/* Opcode 0xf3 0x0f 0x2e - invalid */
2613/* Opcode 0xf2 0x0f 0x2e - invalid */
2614
2615/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2616FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2617/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2618FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2619/* Opcode 0xf3 0x0f 0x2f - invalid */
2620/* Opcode 0xf2 0x0f 0x2f - invalid */
2621
2622/** Opcode 0x0f 0x30. */
2623FNIEMOP_DEF(iemOp_wrmsr)
2624{
2625 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2628}
2629
2630
2631/** Opcode 0x0f 0x31. */
2632FNIEMOP_DEF(iemOp_rdtsc)
2633{
2634 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2637}
2638
2639
2640/** Opcode 0x0f 0x33. */
2641FNIEMOP_DEF(iemOp_rdmsr)
2642{
2643 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2646}
2647
2648
2649/** Opcode 0x0f 0x34. */
2650FNIEMOP_DEF(iemOp_rdpmc)
2651{
2652 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2655}
2656
2657
2658/** Opcode 0x0f 0x34. */
2659FNIEMOP_STUB(iemOp_sysenter);
2660/** Opcode 0x0f 0x35. */
2661FNIEMOP_STUB(iemOp_sysexit);
2662/** Opcode 0x0f 0x37. */
2663FNIEMOP_STUB(iemOp_getsec);
2664
2665
2666/** Opcode 0x0f 0x38. */
2667FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2668{
2669#ifdef IEM_WITH_THREE_0F_38
2670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2671 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2672#else
2673 IEMOP_BITCH_ABOUT_STUB();
2674 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2675#endif
2676}
2677
2678
2679/** Opcode 0x0f 0x3a. */
2680FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2681{
2682#ifdef IEM_WITH_THREE_0F_3A
2683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2684 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2685#else
2686 IEMOP_BITCH_ABOUT_STUB();
2687 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2688#endif
2689}
2690
2691
2692/**
2693 * Implements a conditional move.
2694 *
2695 * Wish there was an obvious way to do this where we could share and reduce
2696 * code bloat.
2697 *
2698 * @param a_Cnd The conditional "microcode" operation.
2699 */
2700#define CMOV_X(a_Cnd) \
2701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2703 { \
2704 switch (pVCpu->iem.s.enmEffOpSize) \
2705 { \
2706 case IEMMODE_16BIT: \
2707 IEM_MC_BEGIN(0, 1); \
2708 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2709 a_Cnd { \
2710 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2711 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2712 } IEM_MC_ENDIF(); \
2713 IEM_MC_ADVANCE_RIP(); \
2714 IEM_MC_END(); \
2715 return VINF_SUCCESS; \
2716 \
2717 case IEMMODE_32BIT: \
2718 IEM_MC_BEGIN(0, 1); \
2719 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2720 a_Cnd { \
2721 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2723 } IEM_MC_ELSE() { \
2724 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2725 } IEM_MC_ENDIF(); \
2726 IEM_MC_ADVANCE_RIP(); \
2727 IEM_MC_END(); \
2728 return VINF_SUCCESS; \
2729 \
2730 case IEMMODE_64BIT: \
2731 IEM_MC_BEGIN(0, 1); \
2732 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2733 a_Cnd { \
2734 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2735 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2736 } IEM_MC_ENDIF(); \
2737 IEM_MC_ADVANCE_RIP(); \
2738 IEM_MC_END(); \
2739 return VINF_SUCCESS; \
2740 \
2741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2742 } \
2743 } \
2744 else \
2745 { \
2746 switch (pVCpu->iem.s.enmEffOpSize) \
2747 { \
2748 case IEMMODE_16BIT: \
2749 IEM_MC_BEGIN(0, 2); \
2750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2751 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2753 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2754 a_Cnd { \
2755 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2756 } IEM_MC_ENDIF(); \
2757 IEM_MC_ADVANCE_RIP(); \
2758 IEM_MC_END(); \
2759 return VINF_SUCCESS; \
2760 \
2761 case IEMMODE_32BIT: \
2762 IEM_MC_BEGIN(0, 2); \
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2764 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2766 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2767 a_Cnd { \
2768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2769 } IEM_MC_ELSE() { \
2770 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2771 } IEM_MC_ENDIF(); \
2772 IEM_MC_ADVANCE_RIP(); \
2773 IEM_MC_END(); \
2774 return VINF_SUCCESS; \
2775 \
2776 case IEMMODE_64BIT: \
2777 IEM_MC_BEGIN(0, 2); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2781 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2782 a_Cnd { \
2783 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2784 } IEM_MC_ENDIF(); \
2785 IEM_MC_ADVANCE_RIP(); \
2786 IEM_MC_END(); \
2787 return VINF_SUCCESS; \
2788 \
2789 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2790 } \
2791 } do {} while (0)
2792
2793
2794
2795/** Opcode 0x0f 0x40. */
2796FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2800}
2801
2802
2803/** Opcode 0x0f 0x41. */
2804FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2808}
2809
2810
2811/** Opcode 0x0f 0x42. */
2812FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2816}
2817
2818
2819/** Opcode 0x0f 0x43. */
2820FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2824}
2825
2826
2827/** Opcode 0x0f 0x44. */
2828FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2829{
2830 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2831 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2832}
2833
2834
2835/** Opcode 0x0f 0x45. */
2836FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2837{
2838 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2839 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2840}
2841
2842
2843/** Opcode 0x0f 0x46. */
2844FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2845{
2846 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2847 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2848}
2849
2850
2851/** Opcode 0x0f 0x47. */
2852FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2853{
2854 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2855 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2856}
2857
2858
2859/** Opcode 0x0f 0x48. */
2860FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2861{
2862 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2863 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2864}
2865
2866
2867/** Opcode 0x0f 0x49. */
2868FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2869{
2870 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2871 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2872}
2873
2874
2875/** Opcode 0x0f 0x4a. */
2876FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2877{
2878 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2879 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2880}
2881
2882
2883/** Opcode 0x0f 0x4b. */
2884FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2885{
2886 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2887 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2888}
2889
2890
2891/** Opcode 0x0f 0x4c. */
2892FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2893{
2894 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2895 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2896}
2897
2898
2899/** Opcode 0x0f 0x4d. */
2900FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2901{
2902 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2903 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2904}
2905
2906
2907/** Opcode 0x0f 0x4e. */
2908FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2909{
2910 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2911 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2912}
2913
2914
2915/** Opcode 0x0f 0x4f. */
2916FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2917{
2918 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2919 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2920}
2921
2922#undef CMOV_X
2923
2924/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2925FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2926/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2927FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2928/* Opcode 0xf3 0x0f 0x50 - invalid */
2929/* Opcode 0xf2 0x0f 0x50 - invalid */
2930
2931/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2932FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2933/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2934FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2935/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2936FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2937/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2938FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2939
2940/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2941FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2942/* Opcode 0x66 0x0f 0x52 - invalid */
2943/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2944FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2945/* Opcode 0xf2 0x0f 0x52 - invalid */
2946
2947/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2948FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2949/* Opcode 0x66 0x0f 0x53 - invalid */
2950/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2951FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2952/* Opcode 0xf2 0x0f 0x53 - invalid */
2953
2954/** Opcode 0x0f 0x54 - andps Vps, Wps */
2955FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2958/* Opcode 0xf3 0x0f 0x54 - invalid */
2959/* Opcode 0xf2 0x0f 0x54 - invalid */
2960
2961/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2962FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2963/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2964FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2965/* Opcode 0xf3 0x0f 0x55 - invalid */
2966/* Opcode 0xf2 0x0f 0x55 - invalid */
2967
2968/** Opcode 0x0f 0x56 - orps Vps, Wps */
2969FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2970/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2971FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2972/* Opcode 0xf3 0x0f 0x56 - invalid */
2973/* Opcode 0xf2 0x0f 0x56 - invalid */
2974
2975/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2976FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2977/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2978FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2979/* Opcode 0xf3 0x0f 0x57 - invalid */
2980/* Opcode 0xf2 0x0f 0x57 - invalid */
2981
2982/** Opcode 0x0f 0x58 - addps Vps, Wps */
2983FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2984/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2985FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2986/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2987FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2988/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2989FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2990
2991/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2992FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2993/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2994FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2995/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2996FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2997/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2998FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2999
3000/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3001FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3002/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3003FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3004/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3005FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3006/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3007FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3008
3009/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3010FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3011/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3012FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3013/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3014FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3015/* Opcode 0xf2 0x0f 0x5b - invalid */
3016
3017/** Opcode 0x0f 0x5c - subps Vps, Wps */
3018FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3019/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3020FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3021/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3022FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3023/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3024FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3025
3026/** Opcode 0x0f 0x5d - minps Vps, Wps */
3027FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3028/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3029FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3030/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3031FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3032/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3033FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3034
3035/** Opcode 0x0f 0x5e - divps Vps, Wps */
3036FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3037/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3038FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3039/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3040FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3041/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3042FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3043
3044/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3045FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3046/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3047FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3048/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3049FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3050/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3051FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3052
3053/**
3054 * Common worker for MMX instructions on the forms:
3055 * pxxxx mm1, mm2/mem32
3056 *
3057 * The 2nd operand is the first half of a register, which in the memory case
3058 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3059 * memory accessed for MMX.
3060 *
3061 * Exceptions type 4.
3062 */
3063FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3064{
3065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 /*
3069 * Register, register.
3070 */
3071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3072 IEM_MC_BEGIN(2, 0);
3073 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3074 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3075 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3076 IEM_MC_PREPARE_SSE_USAGE();
3077 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3078 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3079 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3080 IEM_MC_ADVANCE_RIP();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * Register, memory.
3087 */
3088 IEM_MC_BEGIN(2, 2);
3089 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3090 IEM_MC_LOCAL(uint64_t, uSrc);
3091 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3093
3094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3097 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3098
3099 IEM_MC_PREPARE_SSE_USAGE();
3100 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3101 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3102
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 return VINF_SUCCESS;
3107}
3108
3109
3110/**
3111 * Common worker for SSE2 instructions on the forms:
3112 * pxxxx xmm1, xmm2/mem128
3113 *
3114 * The 2nd operand is the first half of a register, which in the memory case
3115 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3116 * memory accessed for MMX.
3117 *
3118 * Exceptions type 4.
3119 */
3120FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3121{
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123 if (!pImpl->pfnU64)
3124 return IEMOP_RAISE_INVALID_OPCODE();
3125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3131 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_BEGIN(2, 0);
3134 IEM_MC_ARG(uint64_t *, pDst, 0);
3135 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3137 IEM_MC_PREPARE_FPU_USAGE();
3138 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3139 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3140 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /*
3147 * Register, memory.
3148 */
3149 IEM_MC_BEGIN(2, 2);
3150 IEM_MC_ARG(uint64_t *, pDst, 0);
3151 IEM_MC_LOCAL(uint32_t, uSrc);
3152 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3159
3160 IEM_MC_PREPARE_FPU_USAGE();
3161 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3162 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3163
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 return VINF_SUCCESS;
3168}
3169
3170
3171/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3172FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3173{
3174 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3175 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3176}
3177
3178/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3179FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3180{
3181 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3182 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3183}
3184
3185/* Opcode 0xf3 0x0f 0x60 - invalid */
3186
3187
3188/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3189FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3190{
3191 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3193}
3194
3195/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3196FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3197{
3198 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3199 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3200}
3201
3202/* Opcode 0xf3 0x0f 0x61 - invalid */
3203
3204
3205/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3206FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3207{
3208 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3210}
3211
3212/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3213FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3214{
3215 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3217}
3218
3219/* Opcode 0xf3 0x0f 0x62 - invalid */
3220
3221
3222
3223/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3224FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3225/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3226FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3227/* Opcode 0xf3 0x0f 0x63 - invalid */
3228
3229/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3230FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3231/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3232FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3233/* Opcode 0xf3 0x0f 0x64 - invalid */
3234
3235/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3236FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3237/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3238FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3239/* Opcode 0xf3 0x0f 0x65 - invalid */
3240
3241/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3242FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3243/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3244FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3245/* Opcode 0xf3 0x0f 0x66 - invalid */
3246
3247/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3248FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3249/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3250FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3251/* Opcode 0xf3 0x0f 0x67 - invalid */
3252
3253
3254/**
3255 * Common worker for MMX instructions on the form:
3256 * pxxxx mm1, mm2/mem64
3257 *
3258 * The 2nd operand is the second half of a register, which in the memory case
3259 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3260 * where it may read the full 128 bits or only the upper 64 bits.
3261 *
3262 * Exceptions type 4.
3263 */
3264FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3265{
3266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3267 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3269 {
3270 /*
3271 * Register, register.
3272 */
3273 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3274 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_BEGIN(2, 0);
3277 IEM_MC_ARG(uint64_t *, pDst, 0);
3278 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3279 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3280 IEM_MC_PREPARE_FPU_USAGE();
3281 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3282 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3283 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3284 IEM_MC_ADVANCE_RIP();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /*
3290 * Register, memory.
3291 */
3292 IEM_MC_BEGIN(2, 2);
3293 IEM_MC_ARG(uint64_t *, pDst, 0);
3294 IEM_MC_LOCAL(uint64_t, uSrc);
3295 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3301 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3302
3303 IEM_MC_PREPARE_FPU_USAGE();
3304 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3305 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3306
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313
3314/**
3315 * Common worker for SSE2 instructions on the form:
3316 * pxxxx xmm1, xmm2/mem128
3317 *
3318 * The 2nd operand is the second half of a register, which in the memory case
3319 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3320 * where it may read the full 128 bits or only the upper 64 bits.
3321 *
3322 * Exceptions type 4.
3323 */
3324FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3325{
3326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3328 {
3329 /*
3330 * Register, register.
3331 */
3332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3333 IEM_MC_BEGIN(2, 0);
3334 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3335 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3336 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3337 IEM_MC_PREPARE_SSE_USAGE();
3338 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3339 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3340 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3341 IEM_MC_ADVANCE_RIP();
3342 IEM_MC_END();
3343 }
3344 else
3345 {
3346 /*
3347 * Register, memory.
3348 */
3349 IEM_MC_BEGIN(2, 2);
3350 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3351 IEM_MC_LOCAL(RTUINT128U, uSrc);
3352 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3354
3355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3358 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3359
3360 IEM_MC_PREPARE_SSE_USAGE();
3361 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3362 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3363
3364 IEM_MC_ADVANCE_RIP();
3365 IEM_MC_END();
3366 }
3367 return VINF_SUCCESS;
3368}
3369
3370
3371/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3372FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3373{
3374 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3375 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3376}
3377
3378/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3379FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3380{
3381 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3382 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3383}
3384/* Opcode 0xf3 0x0f 0x68 - invalid */
3385
3386
3387/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3388FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3389{
3390 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3391 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3392}
3393
3394/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3395FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3396{
3397 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3398 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3399
3400}
3401/* Opcode 0xf3 0x0f 0x69 - invalid */
3402
3403
3404/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3405FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3406{
3407 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3409}
3410
3411/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3412FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3413{
3414 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3416}
3417/* Opcode 0xf3 0x0f 0x6a - invalid */
3418
3419
3420/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3421FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3422/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3423FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3424/* Opcode 0xf3 0x0f 0x6b - invalid */
3425
3426
3427/* Opcode 0x0f 0x6c - invalid */
3428
3429/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3430FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3431{
3432 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3433 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3434}
3435
3436/* Opcode 0xf3 0x0f 0x6c - invalid */
3437/* Opcode 0xf2 0x0f 0x6c - invalid */
3438
3439
3440/* Opcode 0x0f 0x6d - invalid */
3441
3442/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3443FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3444{
3445 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3446 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3447}
3448
3449/* Opcode 0xf3 0x0f 0x6d - invalid */
3450
3451
3452FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3453{
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3456 {
3457 /**
3458 * @opcode 0x6e
3459 * @opcodesub rex.w=1
3460 * @oppfx none
3461 * @opcpuid mmx
3462 * @opgroup og_mmx_datamove
3463 * @opxcpttype 5
3464 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3465 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3466 */
3467 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /* MMX, greg64 */
3471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3472 IEM_MC_BEGIN(0, 1);
3473 IEM_MC_LOCAL(uint64_t, u64Tmp);
3474
3475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3477
3478 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3479 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3480 IEM_MC_FPU_TO_MMX_MODE();
3481
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /* MMX, [mem64] */
3488 IEM_MC_BEGIN(0, 2);
3489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3490 IEM_MC_LOCAL(uint64_t, u64Tmp);
3491
3492 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3495 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3496
3497 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3498 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3499 IEM_MC_FPU_TO_MMX_MODE();
3500
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 }
3505 else
3506 {
3507 /**
3508 * @opdone
3509 * @opcode 0x6e
3510 * @opcodesub rex.w=0
3511 * @oppfx none
3512 * @opcpuid mmx
3513 * @opgroup og_mmx_datamove
3514 * @opxcpttype 5
3515 * @opfunction iemOp_movd_q_Pd_Ey
3516 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3517 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3518 */
3519 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3521 {
3522 /* MMX, greg */
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 IEM_MC_BEGIN(0, 1);
3525 IEM_MC_LOCAL(uint64_t, u64Tmp);
3526
3527 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3528 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3529
3530 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3531 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3532 IEM_MC_FPU_TO_MMX_MODE();
3533
3534 IEM_MC_ADVANCE_RIP();
3535 IEM_MC_END();
3536 }
3537 else
3538 {
3539 /* MMX, [mem] */
3540 IEM_MC_BEGIN(0, 2);
3541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3542 IEM_MC_LOCAL(uint32_t, u32Tmp);
3543
3544 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3547 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3548
3549 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3550 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3551 IEM_MC_FPU_TO_MMX_MODE();
3552
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 }
3556 }
3557 return VINF_SUCCESS;
3558}
3559
3560/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3561FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3562{
3563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3564 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3565 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3566 else
3567 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3569 {
3570 /* XMM, greg*/
3571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3572 IEM_MC_BEGIN(0, 1);
3573 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3574 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3575 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3576 {
3577 IEM_MC_LOCAL(uint64_t, u64Tmp);
3578 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3579 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3580 }
3581 else
3582 {
3583 IEM_MC_LOCAL(uint32_t, u32Tmp);
3584 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3585 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3586 }
3587 IEM_MC_ADVANCE_RIP();
3588 IEM_MC_END();
3589 }
3590 else
3591 {
3592 /* XMM, [mem] */
3593 IEM_MC_BEGIN(0, 2);
3594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3595 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3598 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3599 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3600 {
3601 IEM_MC_LOCAL(uint64_t, u64Tmp);
3602 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3603 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3604 }
3605 else
3606 {
3607 IEM_MC_LOCAL(uint32_t, u32Tmp);
3608 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3609 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3610 }
3611 IEM_MC_ADVANCE_RIP();
3612 IEM_MC_END();
3613 }
3614 return VINF_SUCCESS;
3615}
3616
3617/* Opcode 0xf3 0x0f 0x6e - invalid */
3618
3619
3620/** Opcode 0x0f 0x6f - movq Pq, Qq */
3621FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3622{
3623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3624 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3626 {
3627 /*
3628 * Register, register.
3629 */
3630 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3631 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3633 IEM_MC_BEGIN(0, 1);
3634 IEM_MC_LOCAL(uint64_t, u64Tmp);
3635 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3636 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3637 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3638 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3639 IEM_MC_ADVANCE_RIP();
3640 IEM_MC_END();
3641 }
3642 else
3643 {
3644 /*
3645 * Register, memory.
3646 */
3647 IEM_MC_BEGIN(0, 2);
3648 IEM_MC_LOCAL(uint64_t, u64Tmp);
3649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3650
3651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3653 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3654 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3655 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3656 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3657
3658 IEM_MC_ADVANCE_RIP();
3659 IEM_MC_END();
3660 }
3661 return VINF_SUCCESS;
3662}
3663
3664/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3665FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3666{
3667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3668 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3670 {
3671 /*
3672 * Register, register.
3673 */
3674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3675 IEM_MC_BEGIN(0, 0);
3676 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3678 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3679 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3680 IEM_MC_ADVANCE_RIP();
3681 IEM_MC_END();
3682 }
3683 else
3684 {
3685 /*
3686 * Register, memory.
3687 */
3688 IEM_MC_BEGIN(0, 2);
3689 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3691
3692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3694 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3695 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3696 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3697 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3698
3699 IEM_MC_ADVANCE_RIP();
3700 IEM_MC_END();
3701 }
3702 return VINF_SUCCESS;
3703}
3704
3705/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3706FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3707{
3708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3709 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3711 {
3712 /*
3713 * Register, register.
3714 */
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3716 IEM_MC_BEGIN(0, 0);
3717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3719 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3720 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3721 IEM_MC_ADVANCE_RIP();
3722 IEM_MC_END();
3723 }
3724 else
3725 {
3726 /*
3727 * Register, memory.
3728 */
3729 IEM_MC_BEGIN(0, 2);
3730 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3732
3733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3735 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3736 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3737 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3738 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3739
3740 IEM_MC_ADVANCE_RIP();
3741 IEM_MC_END();
3742 }
3743 return VINF_SUCCESS;
3744}
3745
3746
3747/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3748FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3749{
3750 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3753 {
3754 /*
3755 * Register, register.
3756 */
3757 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3759
3760 IEM_MC_BEGIN(3, 0);
3761 IEM_MC_ARG(uint64_t *, pDst, 0);
3762 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3763 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3764 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3765 IEM_MC_PREPARE_FPU_USAGE();
3766 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3767 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3768 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3769 IEM_MC_ADVANCE_RIP();
3770 IEM_MC_END();
3771 }
3772 else
3773 {
3774 /*
3775 * Register, memory.
3776 */
3777 IEM_MC_BEGIN(3, 2);
3778 IEM_MC_ARG(uint64_t *, pDst, 0);
3779 IEM_MC_LOCAL(uint64_t, uSrc);
3780 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3782
3783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3784 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3785 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3787 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3788
3789 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3790 IEM_MC_PREPARE_FPU_USAGE();
3791 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3792 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3793
3794 IEM_MC_ADVANCE_RIP();
3795 IEM_MC_END();
3796 }
3797 return VINF_SUCCESS;
3798}
3799
3800/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3801FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3802{
3803 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3806 {
3807 /*
3808 * Register, register.
3809 */
3810 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3812
3813 IEM_MC_BEGIN(3, 0);
3814 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3815 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3816 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3817 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3818 IEM_MC_PREPARE_SSE_USAGE();
3819 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3820 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3821 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3822 IEM_MC_ADVANCE_RIP();
3823 IEM_MC_END();
3824 }
3825 else
3826 {
3827 /*
3828 * Register, memory.
3829 */
3830 IEM_MC_BEGIN(3, 2);
3831 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3832 IEM_MC_LOCAL(RTUINT128U, uSrc);
3833 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3835
3836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3837 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3838 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3841
3842 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3843 IEM_MC_PREPARE_SSE_USAGE();
3844 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3845 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3846
3847 IEM_MC_ADVANCE_RIP();
3848 IEM_MC_END();
3849 }
3850 return VINF_SUCCESS;
3851}
3852
3853/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3854FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3855{
3856 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3858 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3859 {
3860 /*
3861 * Register, register.
3862 */
3863 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3865
3866 IEM_MC_BEGIN(3, 0);
3867 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3868 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3869 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3870 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3871 IEM_MC_PREPARE_SSE_USAGE();
3872 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3873 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3874 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3875 IEM_MC_ADVANCE_RIP();
3876 IEM_MC_END();
3877 }
3878 else
3879 {
3880 /*
3881 * Register, memory.
3882 */
3883 IEM_MC_BEGIN(3, 2);
3884 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3885 IEM_MC_LOCAL(RTUINT128U, uSrc);
3886 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3888
3889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3890 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3891 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3894
3895 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3896 IEM_MC_PREPARE_SSE_USAGE();
3897 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3898 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3899
3900 IEM_MC_ADVANCE_RIP();
3901 IEM_MC_END();
3902 }
3903 return VINF_SUCCESS;
3904}
3905
3906/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3907FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3908{
3909 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3911 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3912 {
3913 /*
3914 * Register, register.
3915 */
3916 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3918
3919 IEM_MC_BEGIN(3, 0);
3920 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3921 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3922 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3923 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3924 IEM_MC_PREPARE_SSE_USAGE();
3925 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3926 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3927 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3928 IEM_MC_ADVANCE_RIP();
3929 IEM_MC_END();
3930 }
3931 else
3932 {
3933 /*
3934 * Register, memory.
3935 */
3936 IEM_MC_BEGIN(3, 2);
3937 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3938 IEM_MC_LOCAL(RTUINT128U, uSrc);
3939 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3941
3942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3943 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3944 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3946 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3947
3948 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3949 IEM_MC_PREPARE_SSE_USAGE();
3950 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3951 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3952
3953 IEM_MC_ADVANCE_RIP();
3954 IEM_MC_END();
3955 }
3956 return VINF_SUCCESS;
3957}
3958
3959
3960/** Opcode 0x0f 0x71 11/2. */
3961FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3962
3963/** Opcode 0x66 0x0f 0x71 11/2. */
3964FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3965
3966/** Opcode 0x0f 0x71 11/4. */
3967FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3968
3969/** Opcode 0x66 0x0f 0x71 11/4. */
3970FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3971
3972/** Opcode 0x0f 0x71 11/6. */
3973FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3974
3975/** Opcode 0x66 0x0f 0x71 11/6. */
3976FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3977
3978
3979/**
3980 * Group 12 jump table for register variant.
3981 */
3982IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3983{
3984 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3985 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3986 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3987 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3988 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3989 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3990 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3991 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3992};
3993AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3994
3995
3996/** Opcode 0x0f 0x71. */
3997FNIEMOP_DEF(iemOp_Grp12)
3998{
3999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4001 /* register, register */
4002 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4003 + pVCpu->iem.s.idxPrefix], bRm);
4004 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4005}
4006
4007
4008/** Opcode 0x0f 0x72 11/2. */
4009FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4010
4011/** Opcode 0x66 0x0f 0x72 11/2. */
4012FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4013
4014/** Opcode 0x0f 0x72 11/4. */
4015FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4016
4017/** Opcode 0x66 0x0f 0x72 11/4. */
4018FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4019
4020/** Opcode 0x0f 0x72 11/6. */
4021FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4022
4023/** Opcode 0x66 0x0f 0x72 11/6. */
4024FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4025
4026
4027/**
4028 * Group 13 jump table for register variant.
4029 */
4030IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4031{
4032 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4033 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4034 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4035 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4036 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4037 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4038 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4039 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4040};
4041AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4042
4043/** Opcode 0x0f 0x72. */
4044FNIEMOP_DEF(iemOp_Grp13)
4045{
4046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4047 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4048 /* register, register */
4049 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4050 + pVCpu->iem.s.idxPrefix], bRm);
4051 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4052}
4053
4054
4055/** Opcode 0x0f 0x73 11/2. */
4056FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4057
4058/** Opcode 0x66 0x0f 0x73 11/2. */
4059FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4060
4061/** Opcode 0x66 0x0f 0x73 11/3. */
4062FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4063
4064/** Opcode 0x0f 0x73 11/6. */
4065FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4066
4067/** Opcode 0x66 0x0f 0x73 11/6. */
4068FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4069
4070/** Opcode 0x66 0x0f 0x73 11/7. */
4071FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4072
4073/**
4074 * Group 14 jump table for register variant.
4075 */
4076IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4077{
4078 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4079 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4080 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4081 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4082 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4083 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4084 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4085 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4086};
4087AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4088
4089
4090/** Opcode 0x0f 0x73. */
4091FNIEMOP_DEF(iemOp_Grp14)
4092{
4093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4095 /* register, register */
4096 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4097 + pVCpu->iem.s.idxPrefix], bRm);
4098 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4099}
4100
4101
4102/**
4103 * Common worker for MMX instructions on the form:
4104 * pxxx mm1, mm2/mem64
4105 */
4106FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4107{
4108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4109 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4110 {
4111 /*
4112 * Register, register.
4113 */
4114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4117 IEM_MC_BEGIN(2, 0);
4118 IEM_MC_ARG(uint64_t *, pDst, 0);
4119 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4120 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4121 IEM_MC_PREPARE_FPU_USAGE();
4122 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4123 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4124 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4125 IEM_MC_ADVANCE_RIP();
4126 IEM_MC_END();
4127 }
4128 else
4129 {
4130 /*
4131 * Register, memory.
4132 */
4133 IEM_MC_BEGIN(2, 2);
4134 IEM_MC_ARG(uint64_t *, pDst, 0);
4135 IEM_MC_LOCAL(uint64_t, uSrc);
4136 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4138
4139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4141 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4142 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4143
4144 IEM_MC_PREPARE_FPU_USAGE();
4145 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4146 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4147
4148 IEM_MC_ADVANCE_RIP();
4149 IEM_MC_END();
4150 }
4151 return VINF_SUCCESS;
4152}
4153
4154
4155/**
4156 * Common worker for SSE2 instructions on the forms:
4157 * pxxx xmm1, xmm2/mem128
4158 *
4159 * Proper alignment of the 128-bit operand is enforced.
4160 * Exceptions type 4. SSE2 cpuid checks.
4161 */
4162FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4163{
4164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4166 {
4167 /*
4168 * Register, register.
4169 */
4170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4171 IEM_MC_BEGIN(2, 0);
4172 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4173 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4174 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4175 IEM_MC_PREPARE_SSE_USAGE();
4176 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4177 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4178 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4179 IEM_MC_ADVANCE_RIP();
4180 IEM_MC_END();
4181 }
4182 else
4183 {
4184 /*
4185 * Register, memory.
4186 */
4187 IEM_MC_BEGIN(2, 2);
4188 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4189 IEM_MC_LOCAL(RTUINT128U, uSrc);
4190 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4192
4193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4196 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4197
4198 IEM_MC_PREPARE_SSE_USAGE();
4199 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4200 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4201
4202 IEM_MC_ADVANCE_RIP();
4203 IEM_MC_END();
4204 }
4205 return VINF_SUCCESS;
4206}
4207
4208
4209/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4210FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4211{
4212 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4213 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4214}
4215
4216/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4217FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4218{
4219 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4220 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4221}
4222
4223/* Opcode 0xf3 0x0f 0x74 - invalid */
4224/* Opcode 0xf2 0x0f 0x74 - invalid */
4225
4226
4227/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4228FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4229{
4230 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4231 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4232}
4233
4234/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4235FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4236{
4237 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4238 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4239}
4240
4241/* Opcode 0xf3 0x0f 0x75 - invalid */
4242/* Opcode 0xf2 0x0f 0x75 - invalid */
4243
4244
4245/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4246FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4247{
4248 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4249 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4250}
4251
4252/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4253FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4254{
4255 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4256 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4257}
4258
4259/* Opcode 0xf3 0x0f 0x76 - invalid */
4260/* Opcode 0xf2 0x0f 0x76 - invalid */
4261
4262
4263/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4264FNIEMOP_STUB(iemOp_emms);
4265/* Opcode 0x66 0x0f 0x77 - invalid */
4266/* Opcode 0xf3 0x0f 0x77 - invalid */
4267/* Opcode 0xf2 0x0f 0x77 - invalid */
4268
4269/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4270FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4271/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4272FNIEMOP_STUB(iemOp_AmdGrp17);
4273/* Opcode 0xf3 0x0f 0x78 - invalid */
4274/* Opcode 0xf2 0x0f 0x78 - invalid */
4275
4276/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4277FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4278/* Opcode 0x66 0x0f 0x79 - invalid */
4279/* Opcode 0xf3 0x0f 0x79 - invalid */
4280/* Opcode 0xf2 0x0f 0x79 - invalid */
4281
4282/* Opcode 0x0f 0x7a - invalid */
4283/* Opcode 0x66 0x0f 0x7a - invalid */
4284/* Opcode 0xf3 0x0f 0x7a - invalid */
4285/* Opcode 0xf2 0x0f 0x7a - invalid */
4286
4287/* Opcode 0x0f 0x7b - invalid */
4288/* Opcode 0x66 0x0f 0x7b - invalid */
4289/* Opcode 0xf3 0x0f 0x7b - invalid */
4290/* Opcode 0xf2 0x0f 0x7b - invalid */
4291
4292/* Opcode 0x0f 0x7c - invalid */
4293/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4294FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4295/* Opcode 0xf3 0x0f 0x7c - invalid */
4296/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4297FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4298
4299/* Opcode 0x0f 0x7d - invalid */
4300/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4301FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4302/* Opcode 0xf3 0x0f 0x7d - invalid */
4303/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4304FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4305
4306
4307/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4308FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4309{
4310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4311 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4312 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4313 else
4314 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4316 {
4317 /* greg, MMX */
4318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4319 IEM_MC_BEGIN(0, 1);
4320 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4321 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4322 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4323 {
4324 IEM_MC_LOCAL(uint64_t, u64Tmp);
4325 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4326 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4327 }
4328 else
4329 {
4330 IEM_MC_LOCAL(uint32_t, u32Tmp);
4331 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4332 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4333 }
4334 IEM_MC_ADVANCE_RIP();
4335 IEM_MC_END();
4336 }
4337 else
4338 {
4339 /* [mem], MMX */
4340 IEM_MC_BEGIN(0, 2);
4341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4342 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4345 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4346 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4347 {
4348 IEM_MC_LOCAL(uint64_t, u64Tmp);
4349 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4350 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4351 }
4352 else
4353 {
4354 IEM_MC_LOCAL(uint32_t, u32Tmp);
4355 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4356 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4357 }
4358 IEM_MC_ADVANCE_RIP();
4359 IEM_MC_END();
4360 }
4361 return VINF_SUCCESS;
4362}
4363
4364/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4365FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4366{
4367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4368 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4369 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4370 else
4371 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4373 {
4374 /* greg, XMM */
4375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4376 IEM_MC_BEGIN(0, 1);
4377 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4378 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4379 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4380 {
4381 IEM_MC_LOCAL(uint64_t, u64Tmp);
4382 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4383 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4384 }
4385 else
4386 {
4387 IEM_MC_LOCAL(uint32_t, u32Tmp);
4388 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4389 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4390 }
4391 IEM_MC_ADVANCE_RIP();
4392 IEM_MC_END();
4393 }
4394 else
4395 {
4396 /* [mem], XMM */
4397 IEM_MC_BEGIN(0, 2);
4398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4399 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4403 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4404 {
4405 IEM_MC_LOCAL(uint64_t, u64Tmp);
4406 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4407 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4408 }
4409 else
4410 {
4411 IEM_MC_LOCAL(uint32_t, u32Tmp);
4412 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4413 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4414 }
4415 IEM_MC_ADVANCE_RIP();
4416 IEM_MC_END();
4417 }
4418 return VINF_SUCCESS;
4419}
4420
4421
4422/**
4423 * @opcode 0x7e
4424 * @opcodesub !11 mr/reg
4425 * @oppfx 0xf3
4426 * @opcpuid sse2
4427 * @opgroup og_sse2_pcksclr_datamove
4428 * @opxcpttype 5
4429 * @optest op1=1 op2=2 -> op1=2
4430 * @optest op1=0 op2=-42 -> op1=-42
4431 */
4432FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4433{
4434 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4437 {
4438 /*
4439 * Register, register.
4440 */
4441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4442 IEM_MC_BEGIN(0, 2);
4443 IEM_MC_LOCAL(uint64_t, uSrc);
4444
4445 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4446 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4447
4448 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4449 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4450
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 }
4454 else
4455 {
4456 /*
4457 * Memory, register.
4458 */
4459 IEM_MC_BEGIN(0, 2);
4460 IEM_MC_LOCAL(uint64_t, uSrc);
4461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4462
4463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4465 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4467
4468 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4469 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4470
4471 IEM_MC_ADVANCE_RIP();
4472 IEM_MC_END();
4473 }
4474 return VINF_SUCCESS;
4475}
4476
4477/* Opcode 0xf2 0x0f 0x7e - invalid */
4478
4479
4480/** Opcode 0x0f 0x7f - movq Qq, Pq */
4481FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4482{
4483 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4485 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4486 {
4487 /*
4488 * Register, register.
4489 */
4490 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4491 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4493 IEM_MC_BEGIN(0, 1);
4494 IEM_MC_LOCAL(uint64_t, u64Tmp);
4495 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4496 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4497 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4498 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4499 IEM_MC_ADVANCE_RIP();
4500 IEM_MC_END();
4501 }
4502 else
4503 {
4504 /*
4505 * Register, memory.
4506 */
4507 IEM_MC_BEGIN(0, 2);
4508 IEM_MC_LOCAL(uint64_t, u64Tmp);
4509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4510
4511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4513 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4514 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4515
4516 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4517 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4518
4519 IEM_MC_ADVANCE_RIP();
4520 IEM_MC_END();
4521 }
4522 return VINF_SUCCESS;
4523}
4524
4525/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4526FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4527{
4528 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4531 {
4532 /*
4533 * Register, register.
4534 */
4535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4536 IEM_MC_BEGIN(0, 0);
4537 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4538 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4539 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4540 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4541 IEM_MC_ADVANCE_RIP();
4542 IEM_MC_END();
4543 }
4544 else
4545 {
4546 /*
4547 * Register, memory.
4548 */
4549 IEM_MC_BEGIN(0, 2);
4550 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4552
4553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4556 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4557
4558 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4559 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4560
4561 IEM_MC_ADVANCE_RIP();
4562 IEM_MC_END();
4563 }
4564 return VINF_SUCCESS;
4565}
4566
4567/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4568FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4569{
4570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4571 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4573 {
4574 /*
4575 * Register, register.
4576 */
4577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4578 IEM_MC_BEGIN(0, 0);
4579 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4580 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4581 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4582 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4583 IEM_MC_ADVANCE_RIP();
4584 IEM_MC_END();
4585 }
4586 else
4587 {
4588 /*
4589 * Register, memory.
4590 */
4591 IEM_MC_BEGIN(0, 2);
4592 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4594
4595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4597 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4598 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4599
4600 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4601 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4602
4603 IEM_MC_ADVANCE_RIP();
4604 IEM_MC_END();
4605 }
4606 return VINF_SUCCESS;
4607}
4608
4609/* Opcode 0xf2 0x0f 0x7f - invalid */
4610
4611
4612
4613/** Opcode 0x0f 0x80. */
4614FNIEMOP_DEF(iemOp_jo_Jv)
4615{
4616 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4617 IEMOP_HLP_MIN_386();
4618 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4619 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4620 {
4621 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4623
4624 IEM_MC_BEGIN(0, 0);
4625 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4626 IEM_MC_REL_JMP_S16(i16Imm);
4627 } IEM_MC_ELSE() {
4628 IEM_MC_ADVANCE_RIP();
4629 } IEM_MC_ENDIF();
4630 IEM_MC_END();
4631 }
4632 else
4633 {
4634 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4636
4637 IEM_MC_BEGIN(0, 0);
4638 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4639 IEM_MC_REL_JMP_S32(i32Imm);
4640 } IEM_MC_ELSE() {
4641 IEM_MC_ADVANCE_RIP();
4642 } IEM_MC_ENDIF();
4643 IEM_MC_END();
4644 }
4645 return VINF_SUCCESS;
4646}
4647
4648
4649/** Opcode 0x0f 0x81. */
4650FNIEMOP_DEF(iemOp_jno_Jv)
4651{
4652 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4653 IEMOP_HLP_MIN_386();
4654 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4655 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4656 {
4657 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4659
4660 IEM_MC_BEGIN(0, 0);
4661 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4662 IEM_MC_ADVANCE_RIP();
4663 } IEM_MC_ELSE() {
4664 IEM_MC_REL_JMP_S16(i16Imm);
4665 } IEM_MC_ENDIF();
4666 IEM_MC_END();
4667 }
4668 else
4669 {
4670 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4672
4673 IEM_MC_BEGIN(0, 0);
4674 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4675 IEM_MC_ADVANCE_RIP();
4676 } IEM_MC_ELSE() {
4677 IEM_MC_REL_JMP_S32(i32Imm);
4678 } IEM_MC_ENDIF();
4679 IEM_MC_END();
4680 }
4681 return VINF_SUCCESS;
4682}
4683
4684
4685/** Opcode 0x0f 0x82. */
4686FNIEMOP_DEF(iemOp_jc_Jv)
4687{
4688 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4689 IEMOP_HLP_MIN_386();
4690 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4691 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4692 {
4693 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4695
4696 IEM_MC_BEGIN(0, 0);
4697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4698 IEM_MC_REL_JMP_S16(i16Imm);
4699 } IEM_MC_ELSE() {
4700 IEM_MC_ADVANCE_RIP();
4701 } IEM_MC_ENDIF();
4702 IEM_MC_END();
4703 }
4704 else
4705 {
4706 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4708
4709 IEM_MC_BEGIN(0, 0);
4710 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4711 IEM_MC_REL_JMP_S32(i32Imm);
4712 } IEM_MC_ELSE() {
4713 IEM_MC_ADVANCE_RIP();
4714 } IEM_MC_ENDIF();
4715 IEM_MC_END();
4716 }
4717 return VINF_SUCCESS;
4718}
4719
4720
4721/** Opcode 0x0f 0x83. */
4722FNIEMOP_DEF(iemOp_jnc_Jv)
4723{
4724 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4725 IEMOP_HLP_MIN_386();
4726 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4727 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4728 {
4729 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4731
4732 IEM_MC_BEGIN(0, 0);
4733 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4734 IEM_MC_ADVANCE_RIP();
4735 } IEM_MC_ELSE() {
4736 IEM_MC_REL_JMP_S16(i16Imm);
4737 } IEM_MC_ENDIF();
4738 IEM_MC_END();
4739 }
4740 else
4741 {
4742 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4744
4745 IEM_MC_BEGIN(0, 0);
4746 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4747 IEM_MC_ADVANCE_RIP();
4748 } IEM_MC_ELSE() {
4749 IEM_MC_REL_JMP_S32(i32Imm);
4750 } IEM_MC_ENDIF();
4751 IEM_MC_END();
4752 }
4753 return VINF_SUCCESS;
4754}
4755
4756
4757/** Opcode 0x0f 0x84. */
4758FNIEMOP_DEF(iemOp_je_Jv)
4759{
4760 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4761 IEMOP_HLP_MIN_386();
4762 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4763 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4764 {
4765 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4767
4768 IEM_MC_BEGIN(0, 0);
4769 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4770 IEM_MC_REL_JMP_S16(i16Imm);
4771 } IEM_MC_ELSE() {
4772 IEM_MC_ADVANCE_RIP();
4773 } IEM_MC_ENDIF();
4774 IEM_MC_END();
4775 }
4776 else
4777 {
4778 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4780
4781 IEM_MC_BEGIN(0, 0);
4782 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4783 IEM_MC_REL_JMP_S32(i32Imm);
4784 } IEM_MC_ELSE() {
4785 IEM_MC_ADVANCE_RIP();
4786 } IEM_MC_ENDIF();
4787 IEM_MC_END();
4788 }
4789 return VINF_SUCCESS;
4790}
4791
4792
4793/** Opcode 0x0f 0x85. */
4794FNIEMOP_DEF(iemOp_jne_Jv)
4795{
4796 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4797 IEMOP_HLP_MIN_386();
4798 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4799 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4800 {
4801 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4803
4804 IEM_MC_BEGIN(0, 0);
4805 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4806 IEM_MC_ADVANCE_RIP();
4807 } IEM_MC_ELSE() {
4808 IEM_MC_REL_JMP_S16(i16Imm);
4809 } IEM_MC_ENDIF();
4810 IEM_MC_END();
4811 }
4812 else
4813 {
4814 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4816
4817 IEM_MC_BEGIN(0, 0);
4818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4819 IEM_MC_ADVANCE_RIP();
4820 } IEM_MC_ELSE() {
4821 IEM_MC_REL_JMP_S32(i32Imm);
4822 } IEM_MC_ENDIF();
4823 IEM_MC_END();
4824 }
4825 return VINF_SUCCESS;
4826}
4827
4828
4829/** Opcode 0x0f 0x86. */
4830FNIEMOP_DEF(iemOp_jbe_Jv)
4831{
4832 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4833 IEMOP_HLP_MIN_386();
4834 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4835 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4836 {
4837 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4839
4840 IEM_MC_BEGIN(0, 0);
4841 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4842 IEM_MC_REL_JMP_S16(i16Imm);
4843 } IEM_MC_ELSE() {
4844 IEM_MC_ADVANCE_RIP();
4845 } IEM_MC_ENDIF();
4846 IEM_MC_END();
4847 }
4848 else
4849 {
4850 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4852
4853 IEM_MC_BEGIN(0, 0);
4854 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4855 IEM_MC_REL_JMP_S32(i32Imm);
4856 } IEM_MC_ELSE() {
4857 IEM_MC_ADVANCE_RIP();
4858 } IEM_MC_ENDIF();
4859 IEM_MC_END();
4860 }
4861 return VINF_SUCCESS;
4862}
4863
4864
4865/** Opcode 0x0f 0x87. */
4866FNIEMOP_DEF(iemOp_jnbe_Jv)
4867{
4868 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4869 IEMOP_HLP_MIN_386();
4870 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4871 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4872 {
4873 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4875
4876 IEM_MC_BEGIN(0, 0);
4877 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4878 IEM_MC_ADVANCE_RIP();
4879 } IEM_MC_ELSE() {
4880 IEM_MC_REL_JMP_S16(i16Imm);
4881 } IEM_MC_ENDIF();
4882 IEM_MC_END();
4883 }
4884 else
4885 {
4886 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4888
4889 IEM_MC_BEGIN(0, 0);
4890 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4891 IEM_MC_ADVANCE_RIP();
4892 } IEM_MC_ELSE() {
4893 IEM_MC_REL_JMP_S32(i32Imm);
4894 } IEM_MC_ENDIF();
4895 IEM_MC_END();
4896 }
4897 return VINF_SUCCESS;
4898}
4899
4900
4901/** Opcode 0x0f 0x88. */
4902FNIEMOP_DEF(iemOp_js_Jv)
4903{
4904 IEMOP_MNEMONIC(js_Jv, "js Jv");
4905 IEMOP_HLP_MIN_386();
4906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4907 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4908 {
4909 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4911
4912 IEM_MC_BEGIN(0, 0);
4913 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4914 IEM_MC_REL_JMP_S16(i16Imm);
4915 } IEM_MC_ELSE() {
4916 IEM_MC_ADVANCE_RIP();
4917 } IEM_MC_ENDIF();
4918 IEM_MC_END();
4919 }
4920 else
4921 {
4922 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4924
4925 IEM_MC_BEGIN(0, 0);
4926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4927 IEM_MC_REL_JMP_S32(i32Imm);
4928 } IEM_MC_ELSE() {
4929 IEM_MC_ADVANCE_RIP();
4930 } IEM_MC_ENDIF();
4931 IEM_MC_END();
4932 }
4933 return VINF_SUCCESS;
4934}
4935
4936
4937/** Opcode 0x0f 0x89. */
4938FNIEMOP_DEF(iemOp_jns_Jv)
4939{
4940 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4941 IEMOP_HLP_MIN_386();
4942 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4943 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4944 {
4945 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4947
4948 IEM_MC_BEGIN(0, 0);
4949 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4950 IEM_MC_ADVANCE_RIP();
4951 } IEM_MC_ELSE() {
4952 IEM_MC_REL_JMP_S16(i16Imm);
4953 } IEM_MC_ENDIF();
4954 IEM_MC_END();
4955 }
4956 else
4957 {
4958 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4960
4961 IEM_MC_BEGIN(0, 0);
4962 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4963 IEM_MC_ADVANCE_RIP();
4964 } IEM_MC_ELSE() {
4965 IEM_MC_REL_JMP_S32(i32Imm);
4966 } IEM_MC_ENDIF();
4967 IEM_MC_END();
4968 }
4969 return VINF_SUCCESS;
4970}
4971
4972
4973/** Opcode 0x0f 0x8a. */
4974FNIEMOP_DEF(iemOp_jp_Jv)
4975{
4976 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4977 IEMOP_HLP_MIN_386();
4978 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4979 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4980 {
4981 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4983
4984 IEM_MC_BEGIN(0, 0);
4985 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4986 IEM_MC_REL_JMP_S16(i16Imm);
4987 } IEM_MC_ELSE() {
4988 IEM_MC_ADVANCE_RIP();
4989 } IEM_MC_ENDIF();
4990 IEM_MC_END();
4991 }
4992 else
4993 {
4994 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4996
4997 IEM_MC_BEGIN(0, 0);
4998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4999 IEM_MC_REL_JMP_S32(i32Imm);
5000 } IEM_MC_ELSE() {
5001 IEM_MC_ADVANCE_RIP();
5002 } IEM_MC_ENDIF();
5003 IEM_MC_END();
5004 }
5005 return VINF_SUCCESS;
5006}
5007
5008
5009/** Opcode 0x0f 0x8b. */
5010FNIEMOP_DEF(iemOp_jnp_Jv)
5011{
5012 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5013 IEMOP_HLP_MIN_386();
5014 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5015 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5016 {
5017 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5019
5020 IEM_MC_BEGIN(0, 0);
5021 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5022 IEM_MC_ADVANCE_RIP();
5023 } IEM_MC_ELSE() {
5024 IEM_MC_REL_JMP_S16(i16Imm);
5025 } IEM_MC_ENDIF();
5026 IEM_MC_END();
5027 }
5028 else
5029 {
5030 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5032
5033 IEM_MC_BEGIN(0, 0);
5034 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5035 IEM_MC_ADVANCE_RIP();
5036 } IEM_MC_ELSE() {
5037 IEM_MC_REL_JMP_S32(i32Imm);
5038 } IEM_MC_ENDIF();
5039 IEM_MC_END();
5040 }
5041 return VINF_SUCCESS;
5042}
5043
5044
5045/** Opcode 0x0f 0x8c. */
5046FNIEMOP_DEF(iemOp_jl_Jv)
5047{
5048 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5049 IEMOP_HLP_MIN_386();
5050 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5051 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5052 {
5053 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5055
5056 IEM_MC_BEGIN(0, 0);
5057 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5058 IEM_MC_REL_JMP_S16(i16Imm);
5059 } IEM_MC_ELSE() {
5060 IEM_MC_ADVANCE_RIP();
5061 } IEM_MC_ENDIF();
5062 IEM_MC_END();
5063 }
5064 else
5065 {
5066 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5068
5069 IEM_MC_BEGIN(0, 0);
5070 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5071 IEM_MC_REL_JMP_S32(i32Imm);
5072 } IEM_MC_ELSE() {
5073 IEM_MC_ADVANCE_RIP();
5074 } IEM_MC_ENDIF();
5075 IEM_MC_END();
5076 }
5077 return VINF_SUCCESS;
5078}
5079
5080
5081/** Opcode 0x0f 0x8d. */
5082FNIEMOP_DEF(iemOp_jnl_Jv)
5083{
5084 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5085 IEMOP_HLP_MIN_386();
5086 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5087 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5088 {
5089 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5091
5092 IEM_MC_BEGIN(0, 0);
5093 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5094 IEM_MC_ADVANCE_RIP();
5095 } IEM_MC_ELSE() {
5096 IEM_MC_REL_JMP_S16(i16Imm);
5097 } IEM_MC_ENDIF();
5098 IEM_MC_END();
5099 }
5100 else
5101 {
5102 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5104
5105 IEM_MC_BEGIN(0, 0);
5106 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5107 IEM_MC_ADVANCE_RIP();
5108 } IEM_MC_ELSE() {
5109 IEM_MC_REL_JMP_S32(i32Imm);
5110 } IEM_MC_ENDIF();
5111 IEM_MC_END();
5112 }
5113 return VINF_SUCCESS;
5114}
5115
5116
5117/** Opcode 0x0f 0x8e. */
5118FNIEMOP_DEF(iemOp_jle_Jv)
5119{
5120 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5121 IEMOP_HLP_MIN_386();
5122 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5123 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5124 {
5125 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5127
5128 IEM_MC_BEGIN(0, 0);
5129 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5130 IEM_MC_REL_JMP_S16(i16Imm);
5131 } IEM_MC_ELSE() {
5132 IEM_MC_ADVANCE_RIP();
5133 } IEM_MC_ENDIF();
5134 IEM_MC_END();
5135 }
5136 else
5137 {
5138 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5140
5141 IEM_MC_BEGIN(0, 0);
5142 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5143 IEM_MC_REL_JMP_S32(i32Imm);
5144 } IEM_MC_ELSE() {
5145 IEM_MC_ADVANCE_RIP();
5146 } IEM_MC_ENDIF();
5147 IEM_MC_END();
5148 }
5149 return VINF_SUCCESS;
5150}
5151
5152
5153/** Opcode 0x0f 0x8f. */
5154FNIEMOP_DEF(iemOp_jnle_Jv)
5155{
5156 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5157 IEMOP_HLP_MIN_386();
5158 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5159 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5160 {
5161 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5163
5164 IEM_MC_BEGIN(0, 0);
5165 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5166 IEM_MC_ADVANCE_RIP();
5167 } IEM_MC_ELSE() {
5168 IEM_MC_REL_JMP_S16(i16Imm);
5169 } IEM_MC_ENDIF();
5170 IEM_MC_END();
5171 }
5172 else
5173 {
5174 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5176
5177 IEM_MC_BEGIN(0, 0);
5178 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5179 IEM_MC_ADVANCE_RIP();
5180 } IEM_MC_ELSE() {
5181 IEM_MC_REL_JMP_S32(i32Imm);
5182 } IEM_MC_ENDIF();
5183 IEM_MC_END();
5184 }
5185 return VINF_SUCCESS;
5186}
5187
5188
5189/** Opcode 0x0f 0x90. */
5190FNIEMOP_DEF(iemOp_seto_Eb)
5191{
5192 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5193 IEMOP_HLP_MIN_386();
5194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5195
5196 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5197 * any way. AMD says it's "unused", whatever that means. We're
5198 * ignoring for now. */
5199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5200 {
5201 /* register target */
5202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5203 IEM_MC_BEGIN(0, 0);
5204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5205 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5206 } IEM_MC_ELSE() {
5207 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5208 } IEM_MC_ENDIF();
5209 IEM_MC_ADVANCE_RIP();
5210 IEM_MC_END();
5211 }
5212 else
5213 {
5214 /* memory target */
5215 IEM_MC_BEGIN(0, 1);
5216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5219 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5220 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5221 } IEM_MC_ELSE() {
5222 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5223 } IEM_MC_ENDIF();
5224 IEM_MC_ADVANCE_RIP();
5225 IEM_MC_END();
5226 }
5227 return VINF_SUCCESS;
5228}
5229
5230
5231/** Opcode 0x0f 0x91. */
5232FNIEMOP_DEF(iemOp_setno_Eb)
5233{
5234 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5235 IEMOP_HLP_MIN_386();
5236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5237
5238 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5239 * any way. AMD says it's "unused", whatever that means. We're
5240 * ignoring for now. */
5241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5242 {
5243 /* register target */
5244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5245 IEM_MC_BEGIN(0, 0);
5246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5247 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5248 } IEM_MC_ELSE() {
5249 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5250 } IEM_MC_ENDIF();
5251 IEM_MC_ADVANCE_RIP();
5252 IEM_MC_END();
5253 }
5254 else
5255 {
5256 /* memory target */
5257 IEM_MC_BEGIN(0, 1);
5258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5261 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5262 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5263 } IEM_MC_ELSE() {
5264 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5265 } IEM_MC_ENDIF();
5266 IEM_MC_ADVANCE_RIP();
5267 IEM_MC_END();
5268 }
5269 return VINF_SUCCESS;
5270}
5271
5272
5273/** Opcode 0x0f 0x92. */
5274FNIEMOP_DEF(iemOp_setc_Eb)
5275{
5276 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5277 IEMOP_HLP_MIN_386();
5278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5279
5280 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5281 * any way. AMD says it's "unused", whatever that means. We're
5282 * ignoring for now. */
5283 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5284 {
5285 /* register target */
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5287 IEM_MC_BEGIN(0, 0);
5288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5289 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5290 } IEM_MC_ELSE() {
5291 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5292 } IEM_MC_ENDIF();
5293 IEM_MC_ADVANCE_RIP();
5294 IEM_MC_END();
5295 }
5296 else
5297 {
5298 /* memory target */
5299 IEM_MC_BEGIN(0, 1);
5300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5304 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5305 } IEM_MC_ELSE() {
5306 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5307 } IEM_MC_ENDIF();
5308 IEM_MC_ADVANCE_RIP();
5309 IEM_MC_END();
5310 }
5311 return VINF_SUCCESS;
5312}
5313
5314
5315/** Opcode 0x0f 0x93. */
5316FNIEMOP_DEF(iemOp_setnc_Eb)
5317{
5318 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5319 IEMOP_HLP_MIN_386();
5320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5321
5322 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5323 * any way. AMD says it's "unused", whatever that means. We're
5324 * ignoring for now. */
5325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5326 {
5327 /* register target */
5328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5329 IEM_MC_BEGIN(0, 0);
5330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5331 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5332 } IEM_MC_ELSE() {
5333 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5334 } IEM_MC_ENDIF();
5335 IEM_MC_ADVANCE_RIP();
5336 IEM_MC_END();
5337 }
5338 else
5339 {
5340 /* memory target */
5341 IEM_MC_BEGIN(0, 1);
5342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5345 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5346 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5347 } IEM_MC_ELSE() {
5348 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5349 } IEM_MC_ENDIF();
5350 IEM_MC_ADVANCE_RIP();
5351 IEM_MC_END();
5352 }
5353 return VINF_SUCCESS;
5354}
5355
5356
5357/** Opcode 0x0f 0x94. */
5358FNIEMOP_DEF(iemOp_sete_Eb)
5359{
5360 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5361 IEMOP_HLP_MIN_386();
5362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5363
5364 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5365 * any way. AMD says it's "unused", whatever that means. We're
5366 * ignoring for now. */
5367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5368 {
5369 /* register target */
5370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5371 IEM_MC_BEGIN(0, 0);
5372 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5373 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5374 } IEM_MC_ELSE() {
5375 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5376 } IEM_MC_ENDIF();
5377 IEM_MC_ADVANCE_RIP();
5378 IEM_MC_END();
5379 }
5380 else
5381 {
5382 /* memory target */
5383 IEM_MC_BEGIN(0, 1);
5384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5387 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5388 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5389 } IEM_MC_ELSE() {
5390 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5391 } IEM_MC_ENDIF();
5392 IEM_MC_ADVANCE_RIP();
5393 IEM_MC_END();
5394 }
5395 return VINF_SUCCESS;
5396}
5397
5398
5399/** Opcode 0x0f 0x95. */
5400FNIEMOP_DEF(iemOp_setne_Eb)
5401{
5402 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5403 IEMOP_HLP_MIN_386();
5404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5405
5406 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5407 * any way. AMD says it's "unused", whatever that means. We're
5408 * ignoring for now. */
5409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5410 {
5411 /* register target */
5412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5413 IEM_MC_BEGIN(0, 0);
5414 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5415 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5416 } IEM_MC_ELSE() {
5417 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5418 } IEM_MC_ENDIF();
5419 IEM_MC_ADVANCE_RIP();
5420 IEM_MC_END();
5421 }
5422 else
5423 {
5424 /* memory target */
5425 IEM_MC_BEGIN(0, 1);
5426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5429 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5430 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5431 } IEM_MC_ELSE() {
5432 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5433 } IEM_MC_ENDIF();
5434 IEM_MC_ADVANCE_RIP();
5435 IEM_MC_END();
5436 }
5437 return VINF_SUCCESS;
5438}
5439
5440
5441/** Opcode 0x0f 0x96. */
5442FNIEMOP_DEF(iemOp_setbe_Eb)
5443{
5444 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5445 IEMOP_HLP_MIN_386();
5446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5447
5448 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5449 * any way. AMD says it's "unused", whatever that means. We're
5450 * ignoring for now. */
5451 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5452 {
5453 /* register target */
5454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5455 IEM_MC_BEGIN(0, 0);
5456 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5457 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5458 } IEM_MC_ELSE() {
5459 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5460 } IEM_MC_ENDIF();
5461 IEM_MC_ADVANCE_RIP();
5462 IEM_MC_END();
5463 }
5464 else
5465 {
5466 /* memory target */
5467 IEM_MC_BEGIN(0, 1);
5468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5471 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5472 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5473 } IEM_MC_ELSE() {
5474 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5475 } IEM_MC_ENDIF();
5476 IEM_MC_ADVANCE_RIP();
5477 IEM_MC_END();
5478 }
5479 return VINF_SUCCESS;
5480}
5481
5482
5483/** Opcode 0x0f 0x97. */
5484FNIEMOP_DEF(iemOp_setnbe_Eb)
5485{
5486 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5487 IEMOP_HLP_MIN_386();
5488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5489
5490 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5491 * any way. AMD says it's "unused", whatever that means. We're
5492 * ignoring for now. */
5493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5494 {
5495 /* register target */
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5497 IEM_MC_BEGIN(0, 0);
5498 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5499 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5500 } IEM_MC_ELSE() {
5501 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5502 } IEM_MC_ENDIF();
5503 IEM_MC_ADVANCE_RIP();
5504 IEM_MC_END();
5505 }
5506 else
5507 {
5508 /* memory target */
5509 IEM_MC_BEGIN(0, 1);
5510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5513 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5514 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5515 } IEM_MC_ELSE() {
5516 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5517 } IEM_MC_ENDIF();
5518 IEM_MC_ADVANCE_RIP();
5519 IEM_MC_END();
5520 }
5521 return VINF_SUCCESS;
5522}
5523
5524
5525/** Opcode 0x0f 0x98. */
5526FNIEMOP_DEF(iemOp_sets_Eb)
5527{
5528 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5529 IEMOP_HLP_MIN_386();
5530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5531
5532 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5533 * any way. AMD says it's "unused", whatever that means. We're
5534 * ignoring for now. */
5535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5536 {
5537 /* register target */
5538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5539 IEM_MC_BEGIN(0, 0);
5540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5541 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5542 } IEM_MC_ELSE() {
5543 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5544 } IEM_MC_ENDIF();
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 }
5548 else
5549 {
5550 /* memory target */
5551 IEM_MC_BEGIN(0, 1);
5552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5556 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5557 } IEM_MC_ELSE() {
5558 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5559 } IEM_MC_ENDIF();
5560 IEM_MC_ADVANCE_RIP();
5561 IEM_MC_END();
5562 }
5563 return VINF_SUCCESS;
5564}
5565
5566
5567/** Opcode 0x0f 0x99. */
5568FNIEMOP_DEF(iemOp_setns_Eb)
5569{
5570 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5571 IEMOP_HLP_MIN_386();
5572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5573
5574 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5575 * any way. AMD says it's "unused", whatever that means. We're
5576 * ignoring for now. */
5577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5578 {
5579 /* register target */
5580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5581 IEM_MC_BEGIN(0, 0);
5582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5583 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5584 } IEM_MC_ELSE() {
5585 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5586 } IEM_MC_ENDIF();
5587 IEM_MC_ADVANCE_RIP();
5588 IEM_MC_END();
5589 }
5590 else
5591 {
5592 /* memory target */
5593 IEM_MC_BEGIN(0, 1);
5594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5598 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5599 } IEM_MC_ELSE() {
5600 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5601 } IEM_MC_ENDIF();
5602 IEM_MC_ADVANCE_RIP();
5603 IEM_MC_END();
5604 }
5605 return VINF_SUCCESS;
5606}
5607
5608
5609/** Opcode 0x0f 0x9a. */
5610FNIEMOP_DEF(iemOp_setp_Eb)
5611{
5612 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5613 IEMOP_HLP_MIN_386();
5614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5615
5616 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5617 * any way. AMD says it's "unused", whatever that means. We're
5618 * ignoring for now. */
5619 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5620 {
5621 /* register target */
5622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5623 IEM_MC_BEGIN(0, 0);
5624 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5625 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5626 } IEM_MC_ELSE() {
5627 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5628 } IEM_MC_ENDIF();
5629 IEM_MC_ADVANCE_RIP();
5630 IEM_MC_END();
5631 }
5632 else
5633 {
5634 /* memory target */
5635 IEM_MC_BEGIN(0, 1);
5636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5639 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5640 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5641 } IEM_MC_ELSE() {
5642 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5643 } IEM_MC_ENDIF();
5644 IEM_MC_ADVANCE_RIP();
5645 IEM_MC_END();
5646 }
5647 return VINF_SUCCESS;
5648}
5649
5650
5651/** Opcode 0x0f 0x9b. */
5652FNIEMOP_DEF(iemOp_setnp_Eb)
5653{
5654 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5655 IEMOP_HLP_MIN_386();
5656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5657
5658 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5659 * any way. AMD says it's "unused", whatever that means. We're
5660 * ignoring for now. */
5661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5662 {
5663 /* register target */
5664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5665 IEM_MC_BEGIN(0, 0);
5666 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5667 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5668 } IEM_MC_ELSE() {
5669 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5670 } IEM_MC_ENDIF();
5671 IEM_MC_ADVANCE_RIP();
5672 IEM_MC_END();
5673 }
5674 else
5675 {
5676 /* memory target */
5677 IEM_MC_BEGIN(0, 1);
5678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5681 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5682 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5683 } IEM_MC_ELSE() {
5684 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5685 } IEM_MC_ENDIF();
5686 IEM_MC_ADVANCE_RIP();
5687 IEM_MC_END();
5688 }
5689 return VINF_SUCCESS;
5690}
5691
5692
5693/** Opcode 0x0f 0x9c. */
5694FNIEMOP_DEF(iemOp_setl_Eb)
5695{
5696 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5697 IEMOP_HLP_MIN_386();
5698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5699
5700 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5701 * any way. AMD says it's "unused", whatever that means. We're
5702 * ignoring for now. */
5703 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5704 {
5705 /* register target */
5706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5707 IEM_MC_BEGIN(0, 0);
5708 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5709 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5710 } IEM_MC_ELSE() {
5711 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5712 } IEM_MC_ENDIF();
5713 IEM_MC_ADVANCE_RIP();
5714 IEM_MC_END();
5715 }
5716 else
5717 {
5718 /* memory target */
5719 IEM_MC_BEGIN(0, 1);
5720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5723 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5724 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5725 } IEM_MC_ELSE() {
5726 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5727 } IEM_MC_ENDIF();
5728 IEM_MC_ADVANCE_RIP();
5729 IEM_MC_END();
5730 }
5731 return VINF_SUCCESS;
5732}
5733
5734
5735/** Opcode 0x0f 0x9d. */
5736FNIEMOP_DEF(iemOp_setnl_Eb)
5737{
5738 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5739 IEMOP_HLP_MIN_386();
5740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5741
5742 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5743 * any way. AMD says it's "unused", whatever that means. We're
5744 * ignoring for now. */
5745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5746 {
5747 /* register target */
5748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5749 IEM_MC_BEGIN(0, 0);
5750 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5751 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5752 } IEM_MC_ELSE() {
5753 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5754 } IEM_MC_ENDIF();
5755 IEM_MC_ADVANCE_RIP();
5756 IEM_MC_END();
5757 }
5758 else
5759 {
5760 /* memory target */
5761 IEM_MC_BEGIN(0, 1);
5762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5765 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5766 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5767 } IEM_MC_ELSE() {
5768 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5769 } IEM_MC_ENDIF();
5770 IEM_MC_ADVANCE_RIP();
5771 IEM_MC_END();
5772 }
5773 return VINF_SUCCESS;
5774}
5775
5776
5777/** Opcode 0x0f 0x9e. */
5778FNIEMOP_DEF(iemOp_setle_Eb)
5779{
5780 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5781 IEMOP_HLP_MIN_386();
5782 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5783
5784 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5785 * any way. AMD says it's "unused", whatever that means. We're
5786 * ignoring for now. */
5787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5788 {
5789 /* register target */
5790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5791 IEM_MC_BEGIN(0, 0);
5792 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5793 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5794 } IEM_MC_ELSE() {
5795 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5796 } IEM_MC_ENDIF();
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 }
5800 else
5801 {
5802 /* memory target */
5803 IEM_MC_BEGIN(0, 1);
5804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5807 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5808 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5809 } IEM_MC_ELSE() {
5810 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5811 } IEM_MC_ENDIF();
5812 IEM_MC_ADVANCE_RIP();
5813 IEM_MC_END();
5814 }
5815 return VINF_SUCCESS;
5816}
5817
5818
5819/** Opcode 0x0f 0x9f. */
5820FNIEMOP_DEF(iemOp_setnle_Eb)
5821{
5822 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5823 IEMOP_HLP_MIN_386();
5824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5825
5826 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5827 * any way. AMD says it's "unused", whatever that means. We're
5828 * ignoring for now. */
5829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5830 {
5831 /* register target */
5832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5833 IEM_MC_BEGIN(0, 0);
5834 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5835 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5836 } IEM_MC_ELSE() {
5837 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5838 } IEM_MC_ENDIF();
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 }
5842 else
5843 {
5844 /* memory target */
5845 IEM_MC_BEGIN(0, 1);
5846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5849 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5850 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5851 } IEM_MC_ELSE() {
5852 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5853 } IEM_MC_ENDIF();
5854 IEM_MC_ADVANCE_RIP();
5855 IEM_MC_END();
5856 }
5857 return VINF_SUCCESS;
5858}
5859
5860
5861/**
5862 * Common 'push segment-register' helper.
5863 */
5864FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5865{
5866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5867 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5868 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5869
5870 switch (pVCpu->iem.s.enmEffOpSize)
5871 {
5872 case IEMMODE_16BIT:
5873 IEM_MC_BEGIN(0, 1);
5874 IEM_MC_LOCAL(uint16_t, u16Value);
5875 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5876 IEM_MC_PUSH_U16(u16Value);
5877 IEM_MC_ADVANCE_RIP();
5878 IEM_MC_END();
5879 break;
5880
5881 case IEMMODE_32BIT:
5882 IEM_MC_BEGIN(0, 1);
5883 IEM_MC_LOCAL(uint32_t, u32Value);
5884 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5885 IEM_MC_PUSH_U32_SREG(u32Value);
5886 IEM_MC_ADVANCE_RIP();
5887 IEM_MC_END();
5888 break;
5889
5890 case IEMMODE_64BIT:
5891 IEM_MC_BEGIN(0, 1);
5892 IEM_MC_LOCAL(uint64_t, u64Value);
5893 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5894 IEM_MC_PUSH_U64(u64Value);
5895 IEM_MC_ADVANCE_RIP();
5896 IEM_MC_END();
5897 break;
5898 }
5899
5900 return VINF_SUCCESS;
5901}
5902
5903
5904/** Opcode 0x0f 0xa0. */
5905FNIEMOP_DEF(iemOp_push_fs)
5906{
5907 IEMOP_MNEMONIC(push_fs, "push fs");
5908 IEMOP_HLP_MIN_386();
5909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5910 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5911}
5912
5913
5914/** Opcode 0x0f 0xa1. */
5915FNIEMOP_DEF(iemOp_pop_fs)
5916{
5917 IEMOP_MNEMONIC(pop_fs, "pop fs");
5918 IEMOP_HLP_MIN_386();
5919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5920 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5921}
5922
5923
5924/** Opcode 0x0f 0xa2. */
5925FNIEMOP_DEF(iemOp_cpuid)
5926{
5927 IEMOP_MNEMONIC(cpuid, "cpuid");
5928 IEMOP_HLP_MIN_486(); /* not all 486es. */
5929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5930 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5931}
5932
5933
5934/**
5935 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5936 * iemOp_bts_Ev_Gv.
5937 */
5938FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5939{
5940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5941 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5942
5943 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5944 {
5945 /* register destination. */
5946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5947 switch (pVCpu->iem.s.enmEffOpSize)
5948 {
5949 case IEMMODE_16BIT:
5950 IEM_MC_BEGIN(3, 0);
5951 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5952 IEM_MC_ARG(uint16_t, u16Src, 1);
5953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5954
5955 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5956 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5957 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5958 IEM_MC_REF_EFLAGS(pEFlags);
5959 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5960
5961 IEM_MC_ADVANCE_RIP();
5962 IEM_MC_END();
5963 return VINF_SUCCESS;
5964
5965 case IEMMODE_32BIT:
5966 IEM_MC_BEGIN(3, 0);
5967 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5968 IEM_MC_ARG(uint32_t, u32Src, 1);
5969 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5970
5971 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5972 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5973 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5974 IEM_MC_REF_EFLAGS(pEFlags);
5975 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5976
5977 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5978 IEM_MC_ADVANCE_RIP();
5979 IEM_MC_END();
5980 return VINF_SUCCESS;
5981
5982 case IEMMODE_64BIT:
5983 IEM_MC_BEGIN(3, 0);
5984 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5985 IEM_MC_ARG(uint64_t, u64Src, 1);
5986 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5987
5988 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5989 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5990 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5991 IEM_MC_REF_EFLAGS(pEFlags);
5992 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5993
5994 IEM_MC_ADVANCE_RIP();
5995 IEM_MC_END();
5996 return VINF_SUCCESS;
5997
5998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5999 }
6000 }
6001 else
6002 {
6003 /* memory destination. */
6004
6005 uint32_t fAccess;
6006 if (pImpl->pfnLockedU16)
6007 fAccess = IEM_ACCESS_DATA_RW;
6008 else /* BT */
6009 fAccess = IEM_ACCESS_DATA_R;
6010
6011 /** @todo test negative bit offsets! */
6012 switch (pVCpu->iem.s.enmEffOpSize)
6013 {
6014 case IEMMODE_16BIT:
6015 IEM_MC_BEGIN(3, 2);
6016 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6017 IEM_MC_ARG(uint16_t, u16Src, 1);
6018 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6020 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6021
6022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6023 if (pImpl->pfnLockedU16)
6024 IEMOP_HLP_DONE_DECODING();
6025 else
6026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6027 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6028 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6029 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6030 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6031 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6032 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6033 IEM_MC_FETCH_EFLAGS(EFlags);
6034
6035 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6036 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6037 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6038 else
6039 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6040 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6041
6042 IEM_MC_COMMIT_EFLAGS(EFlags);
6043 IEM_MC_ADVANCE_RIP();
6044 IEM_MC_END();
6045 return VINF_SUCCESS;
6046
6047 case IEMMODE_32BIT:
6048 IEM_MC_BEGIN(3, 2);
6049 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6050 IEM_MC_ARG(uint32_t, u32Src, 1);
6051 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6053 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6054
6055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6056 if (pImpl->pfnLockedU16)
6057 IEMOP_HLP_DONE_DECODING();
6058 else
6059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6060 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6061 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6062 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6063 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6064 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6065 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6066 IEM_MC_FETCH_EFLAGS(EFlags);
6067
6068 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6069 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6070 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6071 else
6072 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6073 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6074
6075 IEM_MC_COMMIT_EFLAGS(EFlags);
6076 IEM_MC_ADVANCE_RIP();
6077 IEM_MC_END();
6078 return VINF_SUCCESS;
6079
6080 case IEMMODE_64BIT:
6081 IEM_MC_BEGIN(3, 2);
6082 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6083 IEM_MC_ARG(uint64_t, u64Src, 1);
6084 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6086 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6087
6088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6089 if (pImpl->pfnLockedU16)
6090 IEMOP_HLP_DONE_DECODING();
6091 else
6092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6093 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6094 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6095 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6096 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6097 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6098 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6099 IEM_MC_FETCH_EFLAGS(EFlags);
6100
6101 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6102 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6103 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6104 else
6105 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6106 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6107
6108 IEM_MC_COMMIT_EFLAGS(EFlags);
6109 IEM_MC_ADVANCE_RIP();
6110 IEM_MC_END();
6111 return VINF_SUCCESS;
6112
6113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6114 }
6115 }
6116}
6117
6118
6119/** Opcode 0x0f 0xa3. */
6120FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6121{
6122 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6123 IEMOP_HLP_MIN_386();
6124 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6125}
6126
6127
6128/**
6129 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6130 */
6131FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6132{
6133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6134 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6135
6136 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6137 {
6138 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6140
6141 switch (pVCpu->iem.s.enmEffOpSize)
6142 {
6143 case IEMMODE_16BIT:
6144 IEM_MC_BEGIN(4, 0);
6145 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6146 IEM_MC_ARG(uint16_t, u16Src, 1);
6147 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6148 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6149
6150 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6151 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6152 IEM_MC_REF_EFLAGS(pEFlags);
6153 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6154
6155 IEM_MC_ADVANCE_RIP();
6156 IEM_MC_END();
6157 return VINF_SUCCESS;
6158
6159 case IEMMODE_32BIT:
6160 IEM_MC_BEGIN(4, 0);
6161 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6162 IEM_MC_ARG(uint32_t, u32Src, 1);
6163 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6164 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6165
6166 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6167 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6168 IEM_MC_REF_EFLAGS(pEFlags);
6169 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6170
6171 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6172 IEM_MC_ADVANCE_RIP();
6173 IEM_MC_END();
6174 return VINF_SUCCESS;
6175
6176 case IEMMODE_64BIT:
6177 IEM_MC_BEGIN(4, 0);
6178 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6179 IEM_MC_ARG(uint64_t, u64Src, 1);
6180 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6181 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6182
6183 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6184 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6185 IEM_MC_REF_EFLAGS(pEFlags);
6186 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6187
6188 IEM_MC_ADVANCE_RIP();
6189 IEM_MC_END();
6190 return VINF_SUCCESS;
6191
6192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6193 }
6194 }
6195 else
6196 {
6197 switch (pVCpu->iem.s.enmEffOpSize)
6198 {
6199 case IEMMODE_16BIT:
6200 IEM_MC_BEGIN(4, 2);
6201 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6202 IEM_MC_ARG(uint16_t, u16Src, 1);
6203 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6204 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6206
6207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6208 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6209 IEM_MC_ASSIGN(cShiftArg, cShift);
6210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6211 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6212 IEM_MC_FETCH_EFLAGS(EFlags);
6213 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6214 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6215
6216 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6217 IEM_MC_COMMIT_EFLAGS(EFlags);
6218 IEM_MC_ADVANCE_RIP();
6219 IEM_MC_END();
6220 return VINF_SUCCESS;
6221
6222 case IEMMODE_32BIT:
6223 IEM_MC_BEGIN(4, 2);
6224 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6225 IEM_MC_ARG(uint32_t, u32Src, 1);
6226 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6227 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6229
6230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6231 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6232 IEM_MC_ASSIGN(cShiftArg, cShift);
6233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6234 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6235 IEM_MC_FETCH_EFLAGS(EFlags);
6236 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6237 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6238
6239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6240 IEM_MC_COMMIT_EFLAGS(EFlags);
6241 IEM_MC_ADVANCE_RIP();
6242 IEM_MC_END();
6243 return VINF_SUCCESS;
6244
6245 case IEMMODE_64BIT:
6246 IEM_MC_BEGIN(4, 2);
6247 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6248 IEM_MC_ARG(uint64_t, u64Src, 1);
6249 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6250 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6252
6253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6254 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6255 IEM_MC_ASSIGN(cShiftArg, cShift);
6256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6257 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6258 IEM_MC_FETCH_EFLAGS(EFlags);
6259 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6260 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6261
6262 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6263 IEM_MC_COMMIT_EFLAGS(EFlags);
6264 IEM_MC_ADVANCE_RIP();
6265 IEM_MC_END();
6266 return VINF_SUCCESS;
6267
6268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6269 }
6270 }
6271}
6272
6273
6274/**
6275 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6276 */
6277FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6278{
6279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6281
6282 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6283 {
6284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6285
6286 switch (pVCpu->iem.s.enmEffOpSize)
6287 {
6288 case IEMMODE_16BIT:
6289 IEM_MC_BEGIN(4, 0);
6290 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6291 IEM_MC_ARG(uint16_t, u16Src, 1);
6292 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6293 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6294
6295 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6296 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6297 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6298 IEM_MC_REF_EFLAGS(pEFlags);
6299 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6300
6301 IEM_MC_ADVANCE_RIP();
6302 IEM_MC_END();
6303 return VINF_SUCCESS;
6304
6305 case IEMMODE_32BIT:
6306 IEM_MC_BEGIN(4, 0);
6307 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6308 IEM_MC_ARG(uint32_t, u32Src, 1);
6309 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6310 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6311
6312 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6313 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6314 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6315 IEM_MC_REF_EFLAGS(pEFlags);
6316 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6317
6318 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6319 IEM_MC_ADVANCE_RIP();
6320 IEM_MC_END();
6321 return VINF_SUCCESS;
6322
6323 case IEMMODE_64BIT:
6324 IEM_MC_BEGIN(4, 0);
6325 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6326 IEM_MC_ARG(uint64_t, u64Src, 1);
6327 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6328 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6329
6330 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6331 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6332 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6333 IEM_MC_REF_EFLAGS(pEFlags);
6334 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6335
6336 IEM_MC_ADVANCE_RIP();
6337 IEM_MC_END();
6338 return VINF_SUCCESS;
6339
6340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6341 }
6342 }
6343 else
6344 {
6345 switch (pVCpu->iem.s.enmEffOpSize)
6346 {
6347 case IEMMODE_16BIT:
6348 IEM_MC_BEGIN(4, 2);
6349 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6350 IEM_MC_ARG(uint16_t, u16Src, 1);
6351 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6352 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6354
6355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6357 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6358 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6359 IEM_MC_FETCH_EFLAGS(EFlags);
6360 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6361 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6362
6363 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6364 IEM_MC_COMMIT_EFLAGS(EFlags);
6365 IEM_MC_ADVANCE_RIP();
6366 IEM_MC_END();
6367 return VINF_SUCCESS;
6368
6369 case IEMMODE_32BIT:
6370 IEM_MC_BEGIN(4, 2);
6371 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6372 IEM_MC_ARG(uint32_t, u32Src, 1);
6373 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6374 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6376
6377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6379 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6380 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6381 IEM_MC_FETCH_EFLAGS(EFlags);
6382 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6383 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6384
6385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6386 IEM_MC_COMMIT_EFLAGS(EFlags);
6387 IEM_MC_ADVANCE_RIP();
6388 IEM_MC_END();
6389 return VINF_SUCCESS;
6390
6391 case IEMMODE_64BIT:
6392 IEM_MC_BEGIN(4, 2);
6393 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6394 IEM_MC_ARG(uint64_t, u64Src, 1);
6395 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6396 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6398
6399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6401 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6402 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6403 IEM_MC_FETCH_EFLAGS(EFlags);
6404 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6405 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6406
6407 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6408 IEM_MC_COMMIT_EFLAGS(EFlags);
6409 IEM_MC_ADVANCE_RIP();
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412
6413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6414 }
6415 }
6416}
6417
6418
6419
6420/** Opcode 0x0f 0xa4. */
6421FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6422{
6423 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6424 IEMOP_HLP_MIN_386();
6425 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6426}
6427
6428
6429/** Opcode 0x0f 0xa5. */
6430FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6431{
6432 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6433 IEMOP_HLP_MIN_386();
6434 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6435}
6436
6437
6438/** Opcode 0x0f 0xa8. */
6439FNIEMOP_DEF(iemOp_push_gs)
6440{
6441 IEMOP_MNEMONIC(push_gs, "push gs");
6442 IEMOP_HLP_MIN_386();
6443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6444 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6445}
6446
6447
6448/** Opcode 0x0f 0xa9. */
6449FNIEMOP_DEF(iemOp_pop_gs)
6450{
6451 IEMOP_MNEMONIC(pop_gs, "pop gs");
6452 IEMOP_HLP_MIN_386();
6453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6454 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6455}
6456
6457
6458/** Opcode 0x0f 0xaa. */
6459FNIEMOP_DEF(iemOp_rsm)
6460{
6461 IEMOP_MNEMONIC(rsm, "rsm");
6462 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6463 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6464 * intercept). */
6465 IEMOP_BITCH_ABOUT_STUB();
6466 return IEMOP_RAISE_INVALID_OPCODE();
6467}
6468
6469//IEMOP_HLP_MIN_386();
6470
6471
6472/** Opcode 0x0f 0xab. */
6473FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6474{
6475 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6476 IEMOP_HLP_MIN_386();
6477 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6478}
6479
6480
6481/** Opcode 0x0f 0xac. */
6482FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6483{
6484 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6485 IEMOP_HLP_MIN_386();
6486 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6487}
6488
6489
6490/** Opcode 0x0f 0xad. */
6491FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6492{
6493 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6494 IEMOP_HLP_MIN_386();
6495 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6496}
6497
6498
6499/** Opcode 0x0f 0xae mem/0. */
6500FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6501{
6502 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6503 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6504 return IEMOP_RAISE_INVALID_OPCODE();
6505
6506 IEM_MC_BEGIN(3, 1);
6507 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6508 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6509 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6513 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6514 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6515 IEM_MC_END();
6516 return VINF_SUCCESS;
6517}
6518
6519
6520/** Opcode 0x0f 0xae mem/1. */
6521FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6522{
6523 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6524 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6525 return IEMOP_RAISE_INVALID_OPCODE();
6526
6527 IEM_MC_BEGIN(3, 1);
6528 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6529 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6530 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6534 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6535 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6536 IEM_MC_END();
6537 return VINF_SUCCESS;
6538}
6539
6540
6541/**
6542 * @opmaps grp15
6543 * @opcode !11/2
6544 * @oppfx none
6545 * @opcpuid sse
6546 * @opgroup og_sse_mxcsrsm
6547 * @opxcpttype 5
6548 * @optest op1=0 -> mxcsr=0
6549 * @optest op1=0x2083 -> mxcsr=0x2083
6550 * @optest op1=0xfffffffe -> value.xcpt=0xd
6551 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6552 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6553 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6554 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6555 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6556 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6557 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6558 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6559 */
6560FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6561{
6562 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6563 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6564 return IEMOP_RAISE_INVALID_OPCODE();
6565
6566 IEM_MC_BEGIN(2, 0);
6567 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6568 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6571 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6572 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6573 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6574 IEM_MC_END();
6575 return VINF_SUCCESS;
6576}
6577
6578
6579/**
6580 * @opmaps grp15
6581 * @opcode !11/3
6582 * @oppfx none
6583 * @opcpuid sse
6584 * @opgroup og_sse_mxcsrsm
6585 * @opxcpttype 5
6586 * @optest mxcsr=0 -> op1=0
6587 * @optest mxcsr=0x2083 -> op1=0x2083
6588 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6589 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6590 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6591 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6592 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6593 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6594 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6595 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6596 */
6597FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6598{
6599 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6600 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6601 return IEMOP_RAISE_INVALID_OPCODE();
6602
6603 IEM_MC_BEGIN(2, 0);
6604 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6605 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6608 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6609 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6610 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6611 IEM_MC_END();
6612 return VINF_SUCCESS;
6613}
6614
6615
6616/**
6617 * @opmaps grp15
6618 * @opcode !11/4
6619 * @oppfx none
6620 * @opcpuid xsave
6621 * @opgroup og_system
6622 * @opxcpttype none
6623 */
6624FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6625{
6626 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6627 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6628 return IEMOP_RAISE_INVALID_OPCODE();
6629
6630 IEM_MC_BEGIN(3, 0);
6631 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6632 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6633 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6636 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6637 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6638 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6639 IEM_MC_END();
6640 return VINF_SUCCESS;
6641}
6642
6643
6644/**
6645 * @opmaps grp15
6646 * @opcode !11/5
6647 * @oppfx none
6648 * @opcpuid xsave
6649 * @opgroup og_system
6650 * @opxcpttype none
6651 */
6652FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6653{
6654 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6655 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6656 return IEMOP_RAISE_INVALID_OPCODE();
6657
6658 IEM_MC_BEGIN(3, 0);
6659 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6660 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6661 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6664 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6665 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6666 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6667 IEM_MC_END();
6668 return VINF_SUCCESS;
6669}
6670
6671/** Opcode 0x0f 0xae mem/6. */
6672FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6673
6674/**
6675 * @opmaps grp15
6676 * @opcode !11/7
6677 * @oppfx none
6678 * @opcpuid clfsh
6679 * @opgroup og_cachectl
6680 * @optest op1=1 ->
6681 */
6682FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6683{
6684 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6685 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6686 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6687
6688 IEM_MC_BEGIN(2, 0);
6689 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6690 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6693 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6694 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6695 IEM_MC_END();
6696 return VINF_SUCCESS;
6697}
6698
6699/**
6700 * @opmaps grp15
6701 * @opcode !11/7
6702 * @oppfx 0x66
6703 * @opcpuid clflushopt
6704 * @opgroup og_cachectl
6705 * @optest op1=1 ->
6706 */
6707FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6708{
6709 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6710 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6711 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6712
6713 IEM_MC_BEGIN(2, 0);
6714 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6715 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6718 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6719 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6720 IEM_MC_END();
6721 return VINF_SUCCESS;
6722}
6723
6724
6725/** Opcode 0x0f 0xae 11b/5. */
6726FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6727{
6728 RT_NOREF_PV(bRm);
6729 IEMOP_MNEMONIC(lfence, "lfence");
6730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6731 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6732 return IEMOP_RAISE_INVALID_OPCODE();
6733
6734 IEM_MC_BEGIN(0, 0);
6735 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6736 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6737 else
6738 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6739 IEM_MC_ADVANCE_RIP();
6740 IEM_MC_END();
6741 return VINF_SUCCESS;
6742}
6743
6744
6745/** Opcode 0x0f 0xae 11b/6. */
6746FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6747{
6748 RT_NOREF_PV(bRm);
6749 IEMOP_MNEMONIC(mfence, "mfence");
6750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6751 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6752 return IEMOP_RAISE_INVALID_OPCODE();
6753
6754 IEM_MC_BEGIN(0, 0);
6755 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6756 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6757 else
6758 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6759 IEM_MC_ADVANCE_RIP();
6760 IEM_MC_END();
6761 return VINF_SUCCESS;
6762}
6763
6764
6765/** Opcode 0x0f 0xae 11b/7. */
6766FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6767{
6768 RT_NOREF_PV(bRm);
6769 IEMOP_MNEMONIC(sfence, "sfence");
6770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6771 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6772 return IEMOP_RAISE_INVALID_OPCODE();
6773
6774 IEM_MC_BEGIN(0, 0);
6775 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6776 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6777 else
6778 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6779 IEM_MC_ADVANCE_RIP();
6780 IEM_MC_END();
6781 return VINF_SUCCESS;
6782}
6783
6784
6785/** Opcode 0xf3 0x0f 0xae 11b/0. */
6786FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6787
6788/** Opcode 0xf3 0x0f 0xae 11b/1. */
6789FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6790
6791/** Opcode 0xf3 0x0f 0xae 11b/2. */
6792FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6793
6794/** Opcode 0xf3 0x0f 0xae 11b/3. */
6795FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6796
6797
6798/**
6799 * Group 15 jump table for register variant.
6800 */
6801IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6802{ /* pfx: none, 066h, 0f3h, 0f2h */
6803 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6804 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6805 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6806 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6807 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6808 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6809 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6810 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6811};
6812AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6813
6814
6815/**
6816 * Group 15 jump table for memory variant.
6817 */
6818IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6819{ /* pfx: none, 066h, 0f3h, 0f2h */
6820 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6821 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6822 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6823 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6824 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6825 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6826 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6827 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6828};
6829AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6830
6831
6832/** Opcode 0x0f 0xae. */
6833FNIEMOP_DEF(iemOp_Grp15)
6834{
6835 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6838 /* register, register */
6839 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6840 + pVCpu->iem.s.idxPrefix], bRm);
6841 /* memory, register */
6842 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6843 + pVCpu->iem.s.idxPrefix], bRm);
6844}
6845
6846
6847/** Opcode 0x0f 0xaf. */
6848FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6849{
6850 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6851 IEMOP_HLP_MIN_386();
6852 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6853 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6854}
6855
6856
6857/** Opcode 0x0f 0xb0. */
6858FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6859{
6860 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6861 IEMOP_HLP_MIN_486();
6862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6863
6864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6865 {
6866 IEMOP_HLP_DONE_DECODING();
6867 IEM_MC_BEGIN(4, 0);
6868 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6869 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6870 IEM_MC_ARG(uint8_t, u8Src, 2);
6871 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6872
6873 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6874 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6875 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6876 IEM_MC_REF_EFLAGS(pEFlags);
6877 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6878 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6879 else
6880 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6881
6882 IEM_MC_ADVANCE_RIP();
6883 IEM_MC_END();
6884 }
6885 else
6886 {
6887 IEM_MC_BEGIN(4, 3);
6888 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6889 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6890 IEM_MC_ARG(uint8_t, u8Src, 2);
6891 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6893 IEM_MC_LOCAL(uint8_t, u8Al);
6894
6895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6896 IEMOP_HLP_DONE_DECODING();
6897 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6898 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6899 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6900 IEM_MC_FETCH_EFLAGS(EFlags);
6901 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6902 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6903 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6904 else
6905 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6906
6907 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6908 IEM_MC_COMMIT_EFLAGS(EFlags);
6909 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6910 IEM_MC_ADVANCE_RIP();
6911 IEM_MC_END();
6912 }
6913 return VINF_SUCCESS;
6914}
6915
6916/** Opcode 0x0f 0xb1. */
6917FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6918{
6919 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6920 IEMOP_HLP_MIN_486();
6921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6922
6923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6924 {
6925 IEMOP_HLP_DONE_DECODING();
6926 switch (pVCpu->iem.s.enmEffOpSize)
6927 {
6928 case IEMMODE_16BIT:
6929 IEM_MC_BEGIN(4, 0);
6930 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6931 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6932 IEM_MC_ARG(uint16_t, u16Src, 2);
6933 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6934
6935 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6936 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6937 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6938 IEM_MC_REF_EFLAGS(pEFlags);
6939 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6940 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6941 else
6942 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6943
6944 IEM_MC_ADVANCE_RIP();
6945 IEM_MC_END();
6946 return VINF_SUCCESS;
6947
6948 case IEMMODE_32BIT:
6949 IEM_MC_BEGIN(4, 0);
6950 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6951 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6952 IEM_MC_ARG(uint32_t, u32Src, 2);
6953 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6954
6955 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6956 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6957 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6958 IEM_MC_REF_EFLAGS(pEFlags);
6959 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6960 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6961 else
6962 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6963
6964 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6965 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6966 IEM_MC_ADVANCE_RIP();
6967 IEM_MC_END();
6968 return VINF_SUCCESS;
6969
6970 case IEMMODE_64BIT:
6971 IEM_MC_BEGIN(4, 0);
6972 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6973 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6974#ifdef RT_ARCH_X86
6975 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6976#else
6977 IEM_MC_ARG(uint64_t, u64Src, 2);
6978#endif
6979 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6980
6981 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6982 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6983 IEM_MC_REF_EFLAGS(pEFlags);
6984#ifdef RT_ARCH_X86
6985 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6986 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6987 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6988 else
6989 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6990#else
6991 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6992 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6993 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6994 else
6995 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6996#endif
6997
6998 IEM_MC_ADVANCE_RIP();
6999 IEM_MC_END();
7000 return VINF_SUCCESS;
7001
7002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7003 }
7004 }
7005 else
7006 {
7007 switch (pVCpu->iem.s.enmEffOpSize)
7008 {
7009 case IEMMODE_16BIT:
7010 IEM_MC_BEGIN(4, 3);
7011 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7012 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7013 IEM_MC_ARG(uint16_t, u16Src, 2);
7014 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7016 IEM_MC_LOCAL(uint16_t, u16Ax);
7017
7018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7019 IEMOP_HLP_DONE_DECODING();
7020 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7021 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7022 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7023 IEM_MC_FETCH_EFLAGS(EFlags);
7024 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7025 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7026 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7027 else
7028 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7029
7030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7031 IEM_MC_COMMIT_EFLAGS(EFlags);
7032 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7033 IEM_MC_ADVANCE_RIP();
7034 IEM_MC_END();
7035 return VINF_SUCCESS;
7036
7037 case IEMMODE_32BIT:
7038 IEM_MC_BEGIN(4, 3);
7039 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7040 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7041 IEM_MC_ARG(uint32_t, u32Src, 2);
7042 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7044 IEM_MC_LOCAL(uint32_t, u32Eax);
7045
7046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7047 IEMOP_HLP_DONE_DECODING();
7048 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7049 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7050 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7051 IEM_MC_FETCH_EFLAGS(EFlags);
7052 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7053 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7054 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7055 else
7056 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7057
7058 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7059 IEM_MC_COMMIT_EFLAGS(EFlags);
7060 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7061 IEM_MC_ADVANCE_RIP();
7062 IEM_MC_END();
7063 return VINF_SUCCESS;
7064
7065 case IEMMODE_64BIT:
7066 IEM_MC_BEGIN(4, 3);
7067 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7068 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7069#ifdef RT_ARCH_X86
7070 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7071#else
7072 IEM_MC_ARG(uint64_t, u64Src, 2);
7073#endif
7074 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7076 IEM_MC_LOCAL(uint64_t, u64Rax);
7077
7078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7079 IEMOP_HLP_DONE_DECODING();
7080 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7081 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7082 IEM_MC_FETCH_EFLAGS(EFlags);
7083 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7084#ifdef RT_ARCH_X86
7085 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7086 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7087 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7088 else
7089 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7090#else
7091 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7092 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7094 else
7095 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7096#endif
7097
7098 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7099 IEM_MC_COMMIT_EFLAGS(EFlags);
7100 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7101 IEM_MC_ADVANCE_RIP();
7102 IEM_MC_END();
7103 return VINF_SUCCESS;
7104
7105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7106 }
7107 }
7108}
7109
7110
7111FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7112{
7113 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7114 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7115
7116 switch (pVCpu->iem.s.enmEffOpSize)
7117 {
7118 case IEMMODE_16BIT:
7119 IEM_MC_BEGIN(5, 1);
7120 IEM_MC_ARG(uint16_t, uSel, 0);
7121 IEM_MC_ARG(uint16_t, offSeg, 1);
7122 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7123 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7124 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7125 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7128 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7129 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7130 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7131 IEM_MC_END();
7132 return VINF_SUCCESS;
7133
7134 case IEMMODE_32BIT:
7135 IEM_MC_BEGIN(5, 1);
7136 IEM_MC_ARG(uint16_t, uSel, 0);
7137 IEM_MC_ARG(uint32_t, offSeg, 1);
7138 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7139 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7140 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7141 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7144 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7145 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7146 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7147 IEM_MC_END();
7148 return VINF_SUCCESS;
7149
7150 case IEMMODE_64BIT:
7151 IEM_MC_BEGIN(5, 1);
7152 IEM_MC_ARG(uint16_t, uSel, 0);
7153 IEM_MC_ARG(uint64_t, offSeg, 1);
7154 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7155 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7156 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7157 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7160 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7161 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7162 else
7163 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7164 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7165 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7166 IEM_MC_END();
7167 return VINF_SUCCESS;
7168
7169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7170 }
7171}
7172
7173
7174/** Opcode 0x0f 0xb2. */
7175FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7176{
7177 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7178 IEMOP_HLP_MIN_386();
7179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7180 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7181 return IEMOP_RAISE_INVALID_OPCODE();
7182 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7183}
7184
7185
7186/** Opcode 0x0f 0xb3. */
7187FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7188{
7189 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7190 IEMOP_HLP_MIN_386();
7191 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7192}
7193
7194
7195/** Opcode 0x0f 0xb4. */
7196FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7197{
7198 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7199 IEMOP_HLP_MIN_386();
7200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7201 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7202 return IEMOP_RAISE_INVALID_OPCODE();
7203 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7204}
7205
7206
7207/** Opcode 0x0f 0xb5. */
7208FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7209{
7210 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7211 IEMOP_HLP_MIN_386();
7212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7214 return IEMOP_RAISE_INVALID_OPCODE();
7215 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7216}
7217
7218
7219/** Opcode 0x0f 0xb6. */
7220FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7221{
7222 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7223 IEMOP_HLP_MIN_386();
7224
7225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7226
7227 /*
7228 * If rm is denoting a register, no more instruction bytes.
7229 */
7230 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7231 {
7232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7233 switch (pVCpu->iem.s.enmEffOpSize)
7234 {
7235 case IEMMODE_16BIT:
7236 IEM_MC_BEGIN(0, 1);
7237 IEM_MC_LOCAL(uint16_t, u16Value);
7238 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7239 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7240 IEM_MC_ADVANCE_RIP();
7241 IEM_MC_END();
7242 return VINF_SUCCESS;
7243
7244 case IEMMODE_32BIT:
7245 IEM_MC_BEGIN(0, 1);
7246 IEM_MC_LOCAL(uint32_t, u32Value);
7247 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7248 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7249 IEM_MC_ADVANCE_RIP();
7250 IEM_MC_END();
7251 return VINF_SUCCESS;
7252
7253 case IEMMODE_64BIT:
7254 IEM_MC_BEGIN(0, 1);
7255 IEM_MC_LOCAL(uint64_t, u64Value);
7256 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7257 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7258 IEM_MC_ADVANCE_RIP();
7259 IEM_MC_END();
7260 return VINF_SUCCESS;
7261
7262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7263 }
7264 }
7265 else
7266 {
7267 /*
7268 * We're loading a register from memory.
7269 */
7270 switch (pVCpu->iem.s.enmEffOpSize)
7271 {
7272 case IEMMODE_16BIT:
7273 IEM_MC_BEGIN(0, 2);
7274 IEM_MC_LOCAL(uint16_t, u16Value);
7275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7278 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7279 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7280 IEM_MC_ADVANCE_RIP();
7281 IEM_MC_END();
7282 return VINF_SUCCESS;
7283
7284 case IEMMODE_32BIT:
7285 IEM_MC_BEGIN(0, 2);
7286 IEM_MC_LOCAL(uint32_t, u32Value);
7287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7290 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7291 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7292 IEM_MC_ADVANCE_RIP();
7293 IEM_MC_END();
7294 return VINF_SUCCESS;
7295
7296 case IEMMODE_64BIT:
7297 IEM_MC_BEGIN(0, 2);
7298 IEM_MC_LOCAL(uint64_t, u64Value);
7299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7302 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7303 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7304 IEM_MC_ADVANCE_RIP();
7305 IEM_MC_END();
7306 return VINF_SUCCESS;
7307
7308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7309 }
7310 }
7311}
7312
7313
7314/** Opcode 0x0f 0xb7. */
7315FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7316{
7317 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7318 IEMOP_HLP_MIN_386();
7319
7320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7321
7322 /** @todo Not entirely sure how the operand size prefix is handled here,
7323 * assuming that it will be ignored. Would be nice to have a few
7324 * test for this. */
7325 /*
7326 * If rm is denoting a register, no more instruction bytes.
7327 */
7328 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7329 {
7330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7331 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7332 {
7333 IEM_MC_BEGIN(0, 1);
7334 IEM_MC_LOCAL(uint32_t, u32Value);
7335 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7336 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7337 IEM_MC_ADVANCE_RIP();
7338 IEM_MC_END();
7339 }
7340 else
7341 {
7342 IEM_MC_BEGIN(0, 1);
7343 IEM_MC_LOCAL(uint64_t, u64Value);
7344 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7345 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7346 IEM_MC_ADVANCE_RIP();
7347 IEM_MC_END();
7348 }
7349 }
7350 else
7351 {
7352 /*
7353 * We're loading a register from memory.
7354 */
7355 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7356 {
7357 IEM_MC_BEGIN(0, 2);
7358 IEM_MC_LOCAL(uint32_t, u32Value);
7359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7362 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7363 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7364 IEM_MC_ADVANCE_RIP();
7365 IEM_MC_END();
7366 }
7367 else
7368 {
7369 IEM_MC_BEGIN(0, 2);
7370 IEM_MC_LOCAL(uint64_t, u64Value);
7371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7375 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7376 IEM_MC_ADVANCE_RIP();
7377 IEM_MC_END();
7378 }
7379 }
7380 return VINF_SUCCESS;
7381}
7382
7383
7384/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7385FNIEMOP_UD_STUB(iemOp_jmpe);
7386/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7387FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7388
7389
7390/**
7391 * @opcode 0xb9
7392 * @opinvalid intel-modrm
7393 * @optest ->
7394 */
7395FNIEMOP_DEF(iemOp_Grp10)
7396{
7397 /*
7398 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7399 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7400 */
7401 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7402 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7403 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7404}
7405
7406
7407/** Opcode 0x0f 0xba. */
7408FNIEMOP_DEF(iemOp_Grp8)
7409{
7410 IEMOP_HLP_MIN_386();
7411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7412 PCIEMOPBINSIZES pImpl;
7413 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7414 {
7415 case 0: case 1: case 2: case 3:
7416 /* Both AMD and Intel want full modr/m decoding and imm8. */
7417 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7418 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7419 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7420 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7421 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7423 }
7424 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7425
7426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7427 {
7428 /* register destination. */
7429 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7431
7432 switch (pVCpu->iem.s.enmEffOpSize)
7433 {
7434 case IEMMODE_16BIT:
7435 IEM_MC_BEGIN(3, 0);
7436 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7437 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7438 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7439
7440 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7441 IEM_MC_REF_EFLAGS(pEFlags);
7442 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7443
7444 IEM_MC_ADVANCE_RIP();
7445 IEM_MC_END();
7446 return VINF_SUCCESS;
7447
7448 case IEMMODE_32BIT:
7449 IEM_MC_BEGIN(3, 0);
7450 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7451 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7452 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7453
7454 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7455 IEM_MC_REF_EFLAGS(pEFlags);
7456 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7457
7458 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7459 IEM_MC_ADVANCE_RIP();
7460 IEM_MC_END();
7461 return VINF_SUCCESS;
7462
7463 case IEMMODE_64BIT:
7464 IEM_MC_BEGIN(3, 0);
7465 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7466 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7467 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7468
7469 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7470 IEM_MC_REF_EFLAGS(pEFlags);
7471 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7472
7473 IEM_MC_ADVANCE_RIP();
7474 IEM_MC_END();
7475 return VINF_SUCCESS;
7476
7477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7478 }
7479 }
7480 else
7481 {
7482 /* memory destination. */
7483
7484 uint32_t fAccess;
7485 if (pImpl->pfnLockedU16)
7486 fAccess = IEM_ACCESS_DATA_RW;
7487 else /* BT */
7488 fAccess = IEM_ACCESS_DATA_R;
7489
7490 /** @todo test negative bit offsets! */
7491 switch (pVCpu->iem.s.enmEffOpSize)
7492 {
7493 case IEMMODE_16BIT:
7494 IEM_MC_BEGIN(3, 1);
7495 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7496 IEM_MC_ARG(uint16_t, u16Src, 1);
7497 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7499
7500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7501 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7502 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7503 if (pImpl->pfnLockedU16)
7504 IEMOP_HLP_DONE_DECODING();
7505 else
7506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7507 IEM_MC_FETCH_EFLAGS(EFlags);
7508 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7509 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7510 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7511 else
7512 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7513 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7514
7515 IEM_MC_COMMIT_EFLAGS(EFlags);
7516 IEM_MC_ADVANCE_RIP();
7517 IEM_MC_END();
7518 return VINF_SUCCESS;
7519
7520 case IEMMODE_32BIT:
7521 IEM_MC_BEGIN(3, 1);
7522 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7523 IEM_MC_ARG(uint32_t, u32Src, 1);
7524 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7526
7527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7528 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7529 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7530 if (pImpl->pfnLockedU16)
7531 IEMOP_HLP_DONE_DECODING();
7532 else
7533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7534 IEM_MC_FETCH_EFLAGS(EFlags);
7535 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7536 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7537 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7538 else
7539 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7540 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7541
7542 IEM_MC_COMMIT_EFLAGS(EFlags);
7543 IEM_MC_ADVANCE_RIP();
7544 IEM_MC_END();
7545 return VINF_SUCCESS;
7546
7547 case IEMMODE_64BIT:
7548 IEM_MC_BEGIN(3, 1);
7549 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7550 IEM_MC_ARG(uint64_t, u64Src, 1);
7551 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7553
7554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7555 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7556 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7557 if (pImpl->pfnLockedU16)
7558 IEMOP_HLP_DONE_DECODING();
7559 else
7560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7561 IEM_MC_FETCH_EFLAGS(EFlags);
7562 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7563 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7564 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7565 else
7566 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7568
7569 IEM_MC_COMMIT_EFLAGS(EFlags);
7570 IEM_MC_ADVANCE_RIP();
7571 IEM_MC_END();
7572 return VINF_SUCCESS;
7573
7574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7575 }
7576 }
7577}
7578
7579
7580/** Opcode 0x0f 0xbb. */
7581FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7582{
7583 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7584 IEMOP_HLP_MIN_386();
7585 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7586}
7587
7588
7589/** Opcode 0x0f 0xbc. */
7590FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7591{
7592 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7593 IEMOP_HLP_MIN_386();
7594 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7595 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7596}
7597
7598
7599/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7600FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7601
7602
7603/** Opcode 0x0f 0xbd. */
7604FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7605{
7606 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7607 IEMOP_HLP_MIN_386();
7608 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7609 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7610}
7611
7612
7613/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7614FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7615
7616
7617/** Opcode 0x0f 0xbe. */
7618FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7619{
7620 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7621 IEMOP_HLP_MIN_386();
7622
7623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7624
7625 /*
7626 * If rm is denoting a register, no more instruction bytes.
7627 */
7628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7629 {
7630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7631 switch (pVCpu->iem.s.enmEffOpSize)
7632 {
7633 case IEMMODE_16BIT:
7634 IEM_MC_BEGIN(0, 1);
7635 IEM_MC_LOCAL(uint16_t, u16Value);
7636 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7637 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7638 IEM_MC_ADVANCE_RIP();
7639 IEM_MC_END();
7640 return VINF_SUCCESS;
7641
7642 case IEMMODE_32BIT:
7643 IEM_MC_BEGIN(0, 1);
7644 IEM_MC_LOCAL(uint32_t, u32Value);
7645 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7646 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7647 IEM_MC_ADVANCE_RIP();
7648 IEM_MC_END();
7649 return VINF_SUCCESS;
7650
7651 case IEMMODE_64BIT:
7652 IEM_MC_BEGIN(0, 1);
7653 IEM_MC_LOCAL(uint64_t, u64Value);
7654 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7655 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7656 IEM_MC_ADVANCE_RIP();
7657 IEM_MC_END();
7658 return VINF_SUCCESS;
7659
7660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7661 }
7662 }
7663 else
7664 {
7665 /*
7666 * We're loading a register from memory.
7667 */
7668 switch (pVCpu->iem.s.enmEffOpSize)
7669 {
7670 case IEMMODE_16BIT:
7671 IEM_MC_BEGIN(0, 2);
7672 IEM_MC_LOCAL(uint16_t, u16Value);
7673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7676 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7677 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7678 IEM_MC_ADVANCE_RIP();
7679 IEM_MC_END();
7680 return VINF_SUCCESS;
7681
7682 case IEMMODE_32BIT:
7683 IEM_MC_BEGIN(0, 2);
7684 IEM_MC_LOCAL(uint32_t, u32Value);
7685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7688 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7689 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7690 IEM_MC_ADVANCE_RIP();
7691 IEM_MC_END();
7692 return VINF_SUCCESS;
7693
7694 case IEMMODE_64BIT:
7695 IEM_MC_BEGIN(0, 2);
7696 IEM_MC_LOCAL(uint64_t, u64Value);
7697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7700 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7701 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7702 IEM_MC_ADVANCE_RIP();
7703 IEM_MC_END();
7704 return VINF_SUCCESS;
7705
7706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7707 }
7708 }
7709}
7710
7711
7712/** Opcode 0x0f 0xbf. */
7713FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7714{
7715 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7716 IEMOP_HLP_MIN_386();
7717
7718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7719
7720 /** @todo Not entirely sure how the operand size prefix is handled here,
7721 * assuming that it will be ignored. Would be nice to have a few
7722 * test for this. */
7723 /*
7724 * If rm is denoting a register, no more instruction bytes.
7725 */
7726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7727 {
7728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7729 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7730 {
7731 IEM_MC_BEGIN(0, 1);
7732 IEM_MC_LOCAL(uint32_t, u32Value);
7733 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7734 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7735 IEM_MC_ADVANCE_RIP();
7736 IEM_MC_END();
7737 }
7738 else
7739 {
7740 IEM_MC_BEGIN(0, 1);
7741 IEM_MC_LOCAL(uint64_t, u64Value);
7742 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7743 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7744 IEM_MC_ADVANCE_RIP();
7745 IEM_MC_END();
7746 }
7747 }
7748 else
7749 {
7750 /*
7751 * We're loading a register from memory.
7752 */
7753 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7754 {
7755 IEM_MC_BEGIN(0, 2);
7756 IEM_MC_LOCAL(uint32_t, u32Value);
7757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7760 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7761 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7762 IEM_MC_ADVANCE_RIP();
7763 IEM_MC_END();
7764 }
7765 else
7766 {
7767 IEM_MC_BEGIN(0, 2);
7768 IEM_MC_LOCAL(uint64_t, u64Value);
7769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7772 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7773 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7774 IEM_MC_ADVANCE_RIP();
7775 IEM_MC_END();
7776 }
7777 }
7778 return VINF_SUCCESS;
7779}
7780
7781
7782/** Opcode 0x0f 0xc0. */
7783FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7784{
7785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7786 IEMOP_HLP_MIN_486();
7787 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7788
7789 /*
7790 * If rm is denoting a register, no more instruction bytes.
7791 */
7792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7793 {
7794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7795
7796 IEM_MC_BEGIN(3, 0);
7797 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7798 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7799 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7800
7801 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7802 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7803 IEM_MC_REF_EFLAGS(pEFlags);
7804 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7805
7806 IEM_MC_ADVANCE_RIP();
7807 IEM_MC_END();
7808 }
7809 else
7810 {
7811 /*
7812 * We're accessing memory.
7813 */
7814 IEM_MC_BEGIN(3, 3);
7815 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7816 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7817 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7818 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7820
7821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7822 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7823 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7824 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7825 IEM_MC_FETCH_EFLAGS(EFlags);
7826 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7827 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7828 else
7829 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7830
7831 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7832 IEM_MC_COMMIT_EFLAGS(EFlags);
7833 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7834 IEM_MC_ADVANCE_RIP();
7835 IEM_MC_END();
7836 return VINF_SUCCESS;
7837 }
7838 return VINF_SUCCESS;
7839}
7840
7841
7842/** Opcode 0x0f 0xc1. */
7843FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7844{
7845 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7846 IEMOP_HLP_MIN_486();
7847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7848
7849 /*
7850 * If rm is denoting a register, no more instruction bytes.
7851 */
7852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7853 {
7854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7855
7856 switch (pVCpu->iem.s.enmEffOpSize)
7857 {
7858 case IEMMODE_16BIT:
7859 IEM_MC_BEGIN(3, 0);
7860 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7861 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7862 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7863
7864 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7865 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7866 IEM_MC_REF_EFLAGS(pEFlags);
7867 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7868
7869 IEM_MC_ADVANCE_RIP();
7870 IEM_MC_END();
7871 return VINF_SUCCESS;
7872
7873 case IEMMODE_32BIT:
7874 IEM_MC_BEGIN(3, 0);
7875 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7876 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7878
7879 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7880 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7881 IEM_MC_REF_EFLAGS(pEFlags);
7882 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7883
7884 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7885 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7886 IEM_MC_ADVANCE_RIP();
7887 IEM_MC_END();
7888 return VINF_SUCCESS;
7889
7890 case IEMMODE_64BIT:
7891 IEM_MC_BEGIN(3, 0);
7892 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7893 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7894 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7895
7896 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7897 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7898 IEM_MC_REF_EFLAGS(pEFlags);
7899 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7900
7901 IEM_MC_ADVANCE_RIP();
7902 IEM_MC_END();
7903 return VINF_SUCCESS;
7904
7905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7906 }
7907 }
7908 else
7909 {
7910 /*
7911 * We're accessing memory.
7912 */
7913 switch (pVCpu->iem.s.enmEffOpSize)
7914 {
7915 case IEMMODE_16BIT:
7916 IEM_MC_BEGIN(3, 3);
7917 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7918 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7919 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7920 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7922
7923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7924 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7925 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7926 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7927 IEM_MC_FETCH_EFLAGS(EFlags);
7928 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7929 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7930 else
7931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7932
7933 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7934 IEM_MC_COMMIT_EFLAGS(EFlags);
7935 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7936 IEM_MC_ADVANCE_RIP();
7937 IEM_MC_END();
7938 return VINF_SUCCESS;
7939
7940 case IEMMODE_32BIT:
7941 IEM_MC_BEGIN(3, 3);
7942 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7943 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7944 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7945 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7947
7948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7949 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7950 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7951 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7952 IEM_MC_FETCH_EFLAGS(EFlags);
7953 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7954 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7955 else
7956 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7957
7958 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7959 IEM_MC_COMMIT_EFLAGS(EFlags);
7960 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7961 IEM_MC_ADVANCE_RIP();
7962 IEM_MC_END();
7963 return VINF_SUCCESS;
7964
7965 case IEMMODE_64BIT:
7966 IEM_MC_BEGIN(3, 3);
7967 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7968 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7969 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7970 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7972
7973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7974 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7975 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7976 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7977 IEM_MC_FETCH_EFLAGS(EFlags);
7978 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7979 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7980 else
7981 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7982
7983 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7984 IEM_MC_COMMIT_EFLAGS(EFlags);
7985 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7986 IEM_MC_ADVANCE_RIP();
7987 IEM_MC_END();
7988 return VINF_SUCCESS;
7989
7990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7991 }
7992 }
7993}
7994
7995
7996/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7997FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7998/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7999FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8000/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8001FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8002/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8003FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8004
8005
8006/** Opcode 0x0f 0xc3. */
8007FNIEMOP_DEF(iemOp_movnti_My_Gy)
8008{
8009 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8010
8011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8012
8013 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8014 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8015 {
8016 switch (pVCpu->iem.s.enmEffOpSize)
8017 {
8018 case IEMMODE_32BIT:
8019 IEM_MC_BEGIN(0, 2);
8020 IEM_MC_LOCAL(uint32_t, u32Value);
8021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8022
8023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8025 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8026 return IEMOP_RAISE_INVALID_OPCODE();
8027
8028 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8029 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8030 IEM_MC_ADVANCE_RIP();
8031 IEM_MC_END();
8032 break;
8033
8034 case IEMMODE_64BIT:
8035 IEM_MC_BEGIN(0, 2);
8036 IEM_MC_LOCAL(uint64_t, u64Value);
8037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8038
8039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8041 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8042 return IEMOP_RAISE_INVALID_OPCODE();
8043
8044 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8045 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8046 IEM_MC_ADVANCE_RIP();
8047 IEM_MC_END();
8048 break;
8049
8050 case IEMMODE_16BIT:
8051 /** @todo check this form. */
8052 return IEMOP_RAISE_INVALID_OPCODE();
8053 }
8054 }
8055 else
8056 return IEMOP_RAISE_INVALID_OPCODE();
8057 return VINF_SUCCESS;
8058}
8059/* Opcode 0x66 0x0f 0xc3 - invalid */
8060/* Opcode 0xf3 0x0f 0xc3 - invalid */
8061/* Opcode 0xf2 0x0f 0xc3 - invalid */
8062
8063/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8064FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8065/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8066FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8067/* Opcode 0xf3 0x0f 0xc4 - invalid */
8068/* Opcode 0xf2 0x0f 0xc4 - invalid */
8069
8070/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8071FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8072/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8073FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8074/* Opcode 0xf3 0x0f 0xc5 - invalid */
8075/* Opcode 0xf2 0x0f 0xc5 - invalid */
8076
8077/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8078FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8079/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8080FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8081/* Opcode 0xf3 0x0f 0xc6 - invalid */
8082/* Opcode 0xf2 0x0f 0xc6 - invalid */
8083
8084
8085/** Opcode 0x0f 0xc7 !11/1. */
8086FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8087{
8088 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8089
8090 IEM_MC_BEGIN(4, 3);
8091 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8092 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8093 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8094 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8095 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8096 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8098
8099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8100 IEMOP_HLP_DONE_DECODING();
8101 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8102
8103 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8104 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8105 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8106
8107 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8108 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8109 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8110
8111 IEM_MC_FETCH_EFLAGS(EFlags);
8112 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8113 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8114 else
8115 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8116
8117 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8118 IEM_MC_COMMIT_EFLAGS(EFlags);
8119 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8120 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8121 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8122 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8123 IEM_MC_ENDIF();
8124 IEM_MC_ADVANCE_RIP();
8125
8126 IEM_MC_END();
8127 return VINF_SUCCESS;
8128}
8129
8130
8131/** Opcode REX.W 0x0f 0xc7 !11/1. */
8132FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8133{
8134 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8135 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8136 {
8137#if 0
8138 RT_NOREF(bRm);
8139 IEMOP_BITCH_ABOUT_STUB();
8140 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8141#else
8142 IEM_MC_BEGIN(4, 3);
8143 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8144 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8145 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8146 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8147 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8148 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8150
8151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8152 IEMOP_HLP_DONE_DECODING();
8153 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8154 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8155
8156 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8157 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8158 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8159
8160 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8161 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8162 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8163
8164 IEM_MC_FETCH_EFLAGS(EFlags);
8165# ifdef RT_ARCH_AMD64
8166 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8167 {
8168 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8169 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8170 else
8171 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8172 }
8173 else
8174# endif
8175 {
8176 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8177 accesses and not all all atomic, which works fine on in UNI CPU guest
8178 configuration (ignoring DMA). If guest SMP is active we have no choice
8179 but to use a rendezvous callback here. Sigh. */
8180 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8181 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8182 else
8183 {
8184 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8185 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8186 }
8187 }
8188
8189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8190 IEM_MC_COMMIT_EFLAGS(EFlags);
8191 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8192 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8193 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8194 IEM_MC_ENDIF();
8195 IEM_MC_ADVANCE_RIP();
8196
8197 IEM_MC_END();
8198 return VINF_SUCCESS;
8199#endif
8200 }
8201 Log(("cmpxchg16b -> #UD\n"));
8202 return IEMOP_RAISE_INVALID_OPCODE();
8203}
8204
8205FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8206{
8207 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8208 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8209 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8210}
8211
8212/** Opcode 0x0f 0xc7 11/6. */
8213FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8214
8215/** Opcode 0x0f 0xc7 !11/6. */
8216FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8217
8218/** Opcode 0x66 0x0f 0xc7 !11/6. */
8219FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8220
8221/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8222FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8223
8224/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8225FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8226
8227/** Opcode 0x0f 0xc7 11/7. */
8228FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8229
8230
8231/**
8232 * Group 9 jump table for register variant.
8233 */
8234IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8235{ /* pfx: none, 066h, 0f3h, 0f2h */
8236 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8237 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8238 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8239 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8240 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8241 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8242 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8243 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8244};
8245AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8246
8247
8248/**
8249 * Group 9 jump table for memory variant.
8250 */
8251IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8252{ /* pfx: none, 066h, 0f3h, 0f2h */
8253 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8254 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8255 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8256 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8257 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8258 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8259 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8260 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8261};
8262AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8263
8264
8265/** Opcode 0x0f 0xc7. */
8266FNIEMOP_DEF(iemOp_Grp9)
8267{
8268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8270 /* register, register */
8271 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8272 + pVCpu->iem.s.idxPrefix], bRm);
8273 /* memory, register */
8274 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8275 + pVCpu->iem.s.idxPrefix], bRm);
8276}
8277
8278
8279/**
8280 * Common 'bswap register' helper.
8281 */
8282FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8283{
8284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8285 switch (pVCpu->iem.s.enmEffOpSize)
8286 {
8287 case IEMMODE_16BIT:
8288 IEM_MC_BEGIN(1, 0);
8289 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8290 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8291 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8292 IEM_MC_ADVANCE_RIP();
8293 IEM_MC_END();
8294 return VINF_SUCCESS;
8295
8296 case IEMMODE_32BIT:
8297 IEM_MC_BEGIN(1, 0);
8298 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8299 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8300 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8301 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8302 IEM_MC_ADVANCE_RIP();
8303 IEM_MC_END();
8304 return VINF_SUCCESS;
8305
8306 case IEMMODE_64BIT:
8307 IEM_MC_BEGIN(1, 0);
8308 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8309 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8310 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8311 IEM_MC_ADVANCE_RIP();
8312 IEM_MC_END();
8313 return VINF_SUCCESS;
8314
8315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8316 }
8317}
8318
8319
8320/** Opcode 0x0f 0xc8. */
8321FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8322{
8323 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8324 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8325 prefix. REX.B is the correct prefix it appears. For a parallel
8326 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8327 IEMOP_HLP_MIN_486();
8328 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8329}
8330
8331
8332/** Opcode 0x0f 0xc9. */
8333FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8334{
8335 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8336 IEMOP_HLP_MIN_486();
8337 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8338}
8339
8340
8341/** Opcode 0x0f 0xca. */
8342FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8343{
8344 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8345 IEMOP_HLP_MIN_486();
8346 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8347}
8348
8349
8350/** Opcode 0x0f 0xcb. */
8351FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8352{
8353 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8354 IEMOP_HLP_MIN_486();
8355 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8356}
8357
8358
8359/** Opcode 0x0f 0xcc. */
8360FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8361{
8362 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8363 IEMOP_HLP_MIN_486();
8364 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8365}
8366
8367
8368/** Opcode 0x0f 0xcd. */
8369FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8370{
8371 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8372 IEMOP_HLP_MIN_486();
8373 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8374}
8375
8376
8377/** Opcode 0x0f 0xce. */
8378FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8379{
8380 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8381 IEMOP_HLP_MIN_486();
8382 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8383}
8384
8385
8386/** Opcode 0x0f 0xcf. */
8387FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8388{
8389 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8390 IEMOP_HLP_MIN_486();
8391 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8392}
8393
8394
8395/* Opcode 0x0f 0xd0 - invalid */
8396/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8397FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8398/* Opcode 0xf3 0x0f 0xd0 - invalid */
8399/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8400FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8401
8402/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8403FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8404/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8405FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8406/* Opcode 0xf3 0x0f 0xd1 - invalid */
8407/* Opcode 0xf2 0x0f 0xd1 - invalid */
8408
8409/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8410FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8411/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8412FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8413/* Opcode 0xf3 0x0f 0xd2 - invalid */
8414/* Opcode 0xf2 0x0f 0xd2 - invalid */
8415
8416/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8417FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8418/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8419FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8420/* Opcode 0xf3 0x0f 0xd3 - invalid */
8421/* Opcode 0xf2 0x0f 0xd3 - invalid */
8422
8423/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8424FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8425/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8426FNIEMOP_STUB(iemOp_paddq_Vx_W);
8427/* Opcode 0xf3 0x0f 0xd4 - invalid */
8428/* Opcode 0xf2 0x0f 0xd4 - invalid */
8429
8430/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8431FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8432/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8433FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8434/* Opcode 0xf3 0x0f 0xd5 - invalid */
8435/* Opcode 0xf2 0x0f 0xd5 - invalid */
8436
8437/* Opcode 0x0f 0xd6 - invalid */
8438
8439/**
8440 * @opcode 0xd6
8441 * @oppfx 0x66
8442 * @opcpuid sse2
8443 * @opgroup og_sse2_pcksclr_datamove
8444 * @opxcpttype none
8445 * @optest op1=-1 op2=2 -> op1=2
8446 * @optest op1=0 op2=-42 -> op1=-42
8447 */
8448FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8449{
8450 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8452 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8453 {
8454 /*
8455 * Register, register.
8456 */
8457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8458 IEM_MC_BEGIN(0, 2);
8459 IEM_MC_LOCAL(uint64_t, uSrc);
8460
8461 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8462 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8463
8464 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8465 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8466
8467 IEM_MC_ADVANCE_RIP();
8468 IEM_MC_END();
8469 }
8470 else
8471 {
8472 /*
8473 * Memory, register.
8474 */
8475 IEM_MC_BEGIN(0, 2);
8476 IEM_MC_LOCAL(uint64_t, uSrc);
8477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8478
8479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8483
8484 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8485 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8486
8487 IEM_MC_ADVANCE_RIP();
8488 IEM_MC_END();
8489 }
8490 return VINF_SUCCESS;
8491}
8492
8493
8494/**
8495 * @opcode 0xd6
8496 * @opcodesub 11 mr/reg
8497 * @oppfx f3
8498 * @opcpuid sse2
8499 * @opgroup og_sse2_simdint_datamove
8500 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8501 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8502 */
8503FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8504{
8505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8506 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8507 {
8508 /*
8509 * Register, register.
8510 */
8511 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8513 IEM_MC_BEGIN(0, 1);
8514 IEM_MC_LOCAL(uint64_t, uSrc);
8515
8516 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8517 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8518
8519 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8520 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8521 IEM_MC_FPU_TO_MMX_MODE();
8522
8523 IEM_MC_ADVANCE_RIP();
8524 IEM_MC_END();
8525 return VINF_SUCCESS;
8526 }
8527
8528 /**
8529 * @opdone
8530 * @opmnemonic udf30fd6mem
8531 * @opcode 0xd6
8532 * @opcodesub !11 mr/reg
8533 * @oppfx f3
8534 * @opunused intel-modrm
8535 * @opcpuid sse
8536 * @optest ->
8537 */
8538 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8539}
8540
8541
8542/**
8543 * @opcode 0xd6
8544 * @opcodesub 11 mr/reg
8545 * @oppfx f2
8546 * @opcpuid sse2
8547 * @opgroup og_sse2_simdint_datamove
8548 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8549 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8550 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8551 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8552 * @optest op1=-42 op2=0xfedcba9876543210
8553 * -> op1=0xfedcba9876543210 ftw=0xff
8554 */
8555FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8556{
8557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8559 {
8560 /*
8561 * Register, register.
8562 */
8563 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8565 IEM_MC_BEGIN(0, 1);
8566 IEM_MC_LOCAL(uint64_t, uSrc);
8567
8568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8569 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8570
8571 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8572 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8573 IEM_MC_FPU_TO_MMX_MODE();
8574
8575 IEM_MC_ADVANCE_RIP();
8576 IEM_MC_END();
8577 return VINF_SUCCESS;
8578 }
8579
8580 /**
8581 * @opdone
8582 * @opmnemonic udf20fd6mem
8583 * @opcode 0xd6
8584 * @opcodesub !11 mr/reg
8585 * @oppfx f2
8586 * @opunused intel-modrm
8587 * @opcpuid sse
8588 * @optest ->
8589 */
8590 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8591}
8592
8593/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8594FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8595{
8596 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8597 /** @todo testcase: Check that the instruction implicitly clears the high
8598 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8599 * and opcode modifications are made to work with the whole width (not
8600 * just 128). */
8601 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8602 /* Docs says register only. */
8603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8605 {
8606 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8607 IEM_MC_BEGIN(2, 0);
8608 IEM_MC_ARG(uint64_t *, pDst, 0);
8609 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8610 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8611 IEM_MC_PREPARE_FPU_USAGE();
8612 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8613 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8614 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8615 IEM_MC_ADVANCE_RIP();
8616 IEM_MC_END();
8617 return VINF_SUCCESS;
8618 }
8619 return IEMOP_RAISE_INVALID_OPCODE();
8620}
8621
8622/** Opcode 0x66 0x0f 0xd7 - */
8623FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8624{
8625 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8626 /** @todo testcase: Check that the instruction implicitly clears the high
8627 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8628 * and opcode modifications are made to work with the whole width (not
8629 * just 128). */
8630 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8631 /* Docs says register only. */
8632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8634 {
8635 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8636 IEM_MC_BEGIN(2, 0);
8637 IEM_MC_ARG(uint64_t *, pDst, 0);
8638 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8639 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8640 IEM_MC_PREPARE_SSE_USAGE();
8641 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8642 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8643 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8644 IEM_MC_ADVANCE_RIP();
8645 IEM_MC_END();
8646 return VINF_SUCCESS;
8647 }
8648 return IEMOP_RAISE_INVALID_OPCODE();
8649}
8650
8651/* Opcode 0xf3 0x0f 0xd7 - invalid */
8652/* Opcode 0xf2 0x0f 0xd7 - invalid */
8653
8654
8655/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8656FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8657/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8658FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8659/* Opcode 0xf3 0x0f 0xd8 - invalid */
8660/* Opcode 0xf2 0x0f 0xd8 - invalid */
8661
8662/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8663FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8664/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8665FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8666/* Opcode 0xf3 0x0f 0xd9 - invalid */
8667/* Opcode 0xf2 0x0f 0xd9 - invalid */
8668
8669/** Opcode 0x0f 0xda - pminub Pq, Qq */
8670FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8671/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8672FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8673/* Opcode 0xf3 0x0f 0xda - invalid */
8674/* Opcode 0xf2 0x0f 0xda - invalid */
8675
8676/** Opcode 0x0f 0xdb - pand Pq, Qq */
8677FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8678/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8679FNIEMOP_STUB(iemOp_pand_Vx_W);
8680/* Opcode 0xf3 0x0f 0xdb - invalid */
8681/* Opcode 0xf2 0x0f 0xdb - invalid */
8682
8683/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8684FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8685/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8686FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8687/* Opcode 0xf3 0x0f 0xdc - invalid */
8688/* Opcode 0xf2 0x0f 0xdc - invalid */
8689
8690/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8691FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8692/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8693FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8694/* Opcode 0xf3 0x0f 0xdd - invalid */
8695/* Opcode 0xf2 0x0f 0xdd - invalid */
8696
8697/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8698FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8699/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8700FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8701/* Opcode 0xf3 0x0f 0xde - invalid */
8702/* Opcode 0xf2 0x0f 0xde - invalid */
8703
8704/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8705FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8706/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8707FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8708/* Opcode 0xf3 0x0f 0xdf - invalid */
8709/* Opcode 0xf2 0x0f 0xdf - invalid */
8710
8711/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8712FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8713/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8714FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8715/* Opcode 0xf3 0x0f 0xe0 - invalid */
8716/* Opcode 0xf2 0x0f 0xe0 - invalid */
8717
8718/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8719FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8720/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8721FNIEMOP_STUB(iemOp_psraw_Vx_W);
8722/* Opcode 0xf3 0x0f 0xe1 - invalid */
8723/* Opcode 0xf2 0x0f 0xe1 - invalid */
8724
8725/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8726FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8727/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8728FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8729/* Opcode 0xf3 0x0f 0xe2 - invalid */
8730/* Opcode 0xf2 0x0f 0xe2 - invalid */
8731
8732/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8733FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8734/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8735FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8736/* Opcode 0xf3 0x0f 0xe3 - invalid */
8737/* Opcode 0xf2 0x0f 0xe3 - invalid */
8738
8739/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8740FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8741/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8742FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8743/* Opcode 0xf3 0x0f 0xe4 - invalid */
8744/* Opcode 0xf2 0x0f 0xe4 - invalid */
8745
8746/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8747FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8748/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8749FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8750/* Opcode 0xf3 0x0f 0xe5 - invalid */
8751/* Opcode 0xf2 0x0f 0xe5 - invalid */
8752
8753/* Opcode 0x0f 0xe6 - invalid */
8754/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8755FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8756/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8757FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8758/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8759FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8760
8761
8762/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8763FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8764{
8765 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8767 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8768 {
8769 /* Register, memory. */
8770 IEM_MC_BEGIN(0, 2);
8771 IEM_MC_LOCAL(uint64_t, uSrc);
8772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8773
8774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8776 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8777 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8778
8779 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8780 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8781
8782 IEM_MC_ADVANCE_RIP();
8783 IEM_MC_END();
8784 return VINF_SUCCESS;
8785 }
8786 /* The register, register encoding is invalid. */
8787 return IEMOP_RAISE_INVALID_OPCODE();
8788}
8789
8790/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8791FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8792{
8793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8794 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8795 {
8796 /* Register, memory. */
8797 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8798 IEM_MC_BEGIN(0, 2);
8799 IEM_MC_LOCAL(RTUINT128U, uSrc);
8800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8801
8802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8804 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8805 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8806
8807 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8808 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8809
8810 IEM_MC_ADVANCE_RIP();
8811 IEM_MC_END();
8812 return VINF_SUCCESS;
8813 }
8814
8815 /* The register, register encoding is invalid. */
8816 return IEMOP_RAISE_INVALID_OPCODE();
8817}
8818
8819/* Opcode 0xf3 0x0f 0xe7 - invalid */
8820/* Opcode 0xf2 0x0f 0xe7 - invalid */
8821
8822
8823/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8824FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8825/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8826FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8827/* Opcode 0xf3 0x0f 0xe8 - invalid */
8828/* Opcode 0xf2 0x0f 0xe8 - invalid */
8829
8830/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8831FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8832/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8833FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8834/* Opcode 0xf3 0x0f 0xe9 - invalid */
8835/* Opcode 0xf2 0x0f 0xe9 - invalid */
8836
8837/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8838FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8839/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8840FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8841/* Opcode 0xf3 0x0f 0xea - invalid */
8842/* Opcode 0xf2 0x0f 0xea - invalid */
8843
8844/** Opcode 0x0f 0xeb - por Pq, Qq */
8845FNIEMOP_STUB(iemOp_por_Pq_Qq);
8846/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8847FNIEMOP_STUB(iemOp_por_Vx_W);
8848/* Opcode 0xf3 0x0f 0xeb - invalid */
8849/* Opcode 0xf2 0x0f 0xeb - invalid */
8850
8851/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8852FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8853/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8854FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8855/* Opcode 0xf3 0x0f 0xec - invalid */
8856/* Opcode 0xf2 0x0f 0xec - invalid */
8857
8858/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8859FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8860/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8861FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8862/* Opcode 0xf3 0x0f 0xed - invalid */
8863/* Opcode 0xf2 0x0f 0xed - invalid */
8864
8865/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8866FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8867/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8868FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8869/* Opcode 0xf3 0x0f 0xee - invalid */
8870/* Opcode 0xf2 0x0f 0xee - invalid */
8871
8872
8873/** Opcode 0x0f 0xef - pxor Pq, Qq */
8874FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8875{
8876 IEMOP_MNEMONIC(pxor, "pxor");
8877 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8878}
8879
8880/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8881FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8882{
8883 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8884 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8885}
8886
8887/* Opcode 0xf3 0x0f 0xef - invalid */
8888/* Opcode 0xf2 0x0f 0xef - invalid */
8889
8890/* Opcode 0x0f 0xf0 - invalid */
8891/* Opcode 0x66 0x0f 0xf0 - invalid */
8892/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8893FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8894
8895/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8896FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8897/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8898FNIEMOP_STUB(iemOp_psllw_Vx_W);
8899/* Opcode 0xf2 0x0f 0xf1 - invalid */
8900
8901/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8902FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8903/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8904FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8905/* Opcode 0xf2 0x0f 0xf2 - invalid */
8906
8907/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8908FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8909/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8910FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8911/* Opcode 0xf2 0x0f 0xf3 - invalid */
8912
8913/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8914FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8915/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8916FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8917/* Opcode 0xf2 0x0f 0xf4 - invalid */
8918
8919/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8920FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8921/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8922FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8923/* Opcode 0xf2 0x0f 0xf5 - invalid */
8924
8925/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8926FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8927/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8928FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8929/* Opcode 0xf2 0x0f 0xf6 - invalid */
8930
8931/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8932FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8933/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8934FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8935/* Opcode 0xf2 0x0f 0xf7 - invalid */
8936
8937/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8938FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8939/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8940FNIEMOP_STUB(iemOp_psubb_Vx_W);
8941/* Opcode 0xf2 0x0f 0xf8 - invalid */
8942
8943/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8944FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8945/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8946FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8947/* Opcode 0xf2 0x0f 0xf9 - invalid */
8948
8949/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8950FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8951/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8952FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8953/* Opcode 0xf2 0x0f 0xfa - invalid */
8954
8955/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8956FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8957/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8958FNIEMOP_STUB(iemOp_psubq_Vx_W);
8959/* Opcode 0xf2 0x0f 0xfb - invalid */
8960
8961/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8962FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8963/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8964FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8965/* Opcode 0xf2 0x0f 0xfc - invalid */
8966
8967/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8968FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8969/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8970FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8971/* Opcode 0xf2 0x0f 0xfd - invalid */
8972
8973/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8974FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8975/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8976FNIEMOP_STUB(iemOp_paddd_Vx_W);
8977/* Opcode 0xf2 0x0f 0xfe - invalid */
8978
8979
8980/** Opcode **** 0x0f 0xff - UD0 */
8981FNIEMOP_DEF(iemOp_ud0)
8982{
8983 IEMOP_MNEMONIC(ud0, "ud0");
8984 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8985 {
8986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8987#ifndef TST_IEM_CHECK_MC
8988 RTGCPTR GCPtrEff;
8989 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8990 if (rcStrict != VINF_SUCCESS)
8991 return rcStrict;
8992#endif
8993 IEMOP_HLP_DONE_DECODING();
8994 }
8995 return IEMOP_RAISE_INVALID_OPCODE();
8996}
8997
8998
8999
9000/**
9001 * Two byte opcode map, first byte 0x0f.
9002 *
9003 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9004 * check if it needs updating as well when making changes.
9005 */
9006IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9007{
9008 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9009 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9010 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9011 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9012 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9013 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9014 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9015 /* 0x06 */ IEMOP_X4(iemOp_clts),
9016 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9017 /* 0x08 */ IEMOP_X4(iemOp_invd),
9018 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9019 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9020 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9021 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9022 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9023 /* 0x0e */ IEMOP_X4(iemOp_femms),
9024 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9025
9026 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9027 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9028 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9029 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9030 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9031 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9032 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9033 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9034 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9035 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9036 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9037 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9038 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9039 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9040 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9041 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9042
9043 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9044 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9045 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9046 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9047 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9048 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9049 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9050 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9051 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9052 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9053 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9054 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9055 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9056 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9057 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9058 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9059
9060 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9061 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9062 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9063 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9064 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9065 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9066 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9067 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9068 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9069 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9070 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9071 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9072 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9073 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9074 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9075 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9076
9077 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9078 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9079 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9080 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9081 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9082 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9083 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9084 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9085 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9086 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9087 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9088 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9089 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9090 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9091 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9092 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9093
9094 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9095 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9096 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9097 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9098 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9099 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9100 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9101 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9102 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9103 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9104 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9105 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9106 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9107 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9108 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9109 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9110
9111 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9112 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9113 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9114 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9115 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9116 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9117 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9118 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9119 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9120 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9121 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9122 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9123 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9124 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9125 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9126 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
9127
9128 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9129 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9130 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9131 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9132 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9133 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9134 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9135 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9136
9137 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9138 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9139 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9140 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9141 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9142 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9143 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9144 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9145
9146 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9147 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9148 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9149 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9150 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9151 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9152 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9153 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9154 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9155 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9156 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9157 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9158 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9159 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9160 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9161 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9162
9163 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9164 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9165 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9166 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9167 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9168 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9169 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9170 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9171 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9172 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9173 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9174 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9175 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9176 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9177 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9178 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9179
9180 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9181 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9182 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9183 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9184 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9185 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9186 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9187 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9188 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9189 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9190 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9191 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9192 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9193 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9194 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9195 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9196
9197 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9198 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9199 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9200 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9201 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9202 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9203 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9204 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9205 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9206 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9207 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9208 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9209 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9210 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9211 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9212 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9213
9214 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9215 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9216 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9217 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9218 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9219 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9220 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9221 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9222 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9223 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9224 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9225 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9226 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9227 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9228 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9229 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9230
9231 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9232 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9233 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9234 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9235 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9236 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9237 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9238 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9239 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9240 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9241 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9242 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9243 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9244 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9245 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9246 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9247
9248 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9249 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9250 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9251 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9252 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9253 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9254 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9255 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9256 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9257 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9258 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9259 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9260 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9261 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9262 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9263 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9264
9265 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9266 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9267 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9268 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9269 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9270 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9271 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9272 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9273 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9274 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9275 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9276 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9277 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9278 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9279 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9280 /* 0xff */ IEMOP_X4(iemOp_ud0),
9281};
9282AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9283
9284/** @} */
9285
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette