VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66743

最後變更 在這個檔案從66743是 66743,由 vboxsync 提交於 8 年 前

IEM: Implemented movups Vps,Wps (0x0f 0x10).

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 306.7 KB
 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66743 2017-05-02 10:23:24Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.alldomusa.eu.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1045FNIEMOP_STUB(iemOp_movupd_Vpd_Wpd);
1046
1047
1048/**
1049 * @opcode 0x10
1050 * @oppfx 0xf3
1051 * @opcpuid sse
1052 * @opgroup og_sse_simdfp_datamove
1053 * @opxcpttype 5
1054 * @optest op1=1 op2=2 -> op1=2
1055 * @optest op1=0 op2=-22 -> op1=-22
1056 */
1057FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1058{
1059 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1062 {
1063 /*
1064 * Register, register.
1065 */
1066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1067 IEM_MC_BEGIN(0, 1);
1068 IEM_MC_LOCAL(uint32_t, uSrc);
1069
1070 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1071 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1072 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1073 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1074
1075 IEM_MC_ADVANCE_RIP();
1076 IEM_MC_END();
1077 }
1078 else
1079 {
1080 /*
1081 * Memory, register.
1082 */
1083 IEM_MC_BEGIN(0, 2);
1084 IEM_MC_LOCAL(uint32_t, uSrc);
1085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1086
1087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1091
1092 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1093 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1094
1095 IEM_MC_ADVANCE_RIP();
1096 IEM_MC_END();
1097 }
1098 return VINF_SUCCESS;
1099}
1100
1101
1102/** Opcode 0xf2 0x0f 0x10 - movsd Vx, Wsd */
1103FNIEMOP_STUB(iemOp_movsd_Vx_Wsd);
1104
1105
1106/**
1107 * @opcode 0x11
1108 * @oppfx none
1109 * @opcpuid sse
1110 * @opgroup og_sse_simdfp_datamove
1111 * @opxcpttype 4UA
1112 * @optest op1=1 op2=2 -> op1=2
1113 * @optest op1=0 op2=-42 -> op1=-42
1114 */
1115FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1116{
1117 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1120 {
1121 /*
1122 * Register, register.
1123 */
1124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1125 IEM_MC_BEGIN(0, 0);
1126 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1127 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1128 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1129 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1130 IEM_MC_ADVANCE_RIP();
1131 IEM_MC_END();
1132 }
1133 else
1134 {
1135 /*
1136 * Memory, register.
1137 */
1138 IEM_MC_BEGIN(0, 2);
1139 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1141
1142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1144 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1145 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1146
1147 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1148 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1149
1150 IEM_MC_ADVANCE_RIP();
1151 IEM_MC_END();
1152 }
1153 return VINF_SUCCESS;
1154}
1155
1156
1157/**
1158 * @opcode 0x11
1159 * @oppfx 0x66
1160 * @opcpuid sse2
1161 * @opgroup og_sse2_pcksclr_datamove
1162 * @opxcpttype 4UA
1163 * @optest op1=1 op2=2 -> op1=2
1164 * @optest op1=0 op2=-42 -> op1=-42
1165 */
1166FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1167{
1168 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1171 {
1172 /*
1173 * Register, register.
1174 */
1175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1176 IEM_MC_BEGIN(0, 0);
1177 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1178 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1179 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1180 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1181 IEM_MC_ADVANCE_RIP();
1182 IEM_MC_END();
1183 }
1184 else
1185 {
1186 /*
1187 * Memory, register.
1188 */
1189 IEM_MC_BEGIN(0, 2);
1190 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1192
1193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1196 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1197
1198 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1199 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1200
1201 IEM_MC_ADVANCE_RIP();
1202 IEM_MC_END();
1203 }
1204 return VINF_SUCCESS;
1205}
1206
1207
1208/**
1209 * @opcode 0x11
1210 * @oppfx 0xf3
1211 * @opcpuid sse
1212 * @opgroup og_sse_simdfp_datamove
1213 * @opxcpttype 5
1214 * @optest op1=1 op2=2 -> op1=2
1215 * @optest op1=0 op2=-22 -> op1=-22
1216 */
1217FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1218{
1219 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1221 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1222 {
1223 /*
1224 * Register, register.
1225 */
1226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1227 IEM_MC_BEGIN(0, 1);
1228 IEM_MC_LOCAL(uint32_t, uSrc);
1229
1230 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1231 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1232 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1233 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1234
1235 IEM_MC_ADVANCE_RIP();
1236 IEM_MC_END();
1237 }
1238 else
1239 {
1240 /*
1241 * Memory, register.
1242 */
1243 IEM_MC_BEGIN(0, 2);
1244 IEM_MC_LOCAL(uint32_t, uSrc);
1245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1246
1247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1249 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1250 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1251
1252 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1253 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1254
1255 IEM_MC_ADVANCE_RIP();
1256 IEM_MC_END();
1257 }
1258 return VINF_SUCCESS;
1259}
1260
1261
1262/**
1263 * @opcode 0x11
1264 * @oppfx 0xf2
1265 * @opcpuid sse2
1266 * @opgroup og_sse2_pcksclr_datamove
1267 * @opxcpttype 5
1268 * @optest op1=1 op2=2 -> op1=2
1269 * @optest op1=0 op2=-42 -> op1=-42
1270 */
1271FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1272{
1273 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1275 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1276 {
1277 /*
1278 * Register, register.
1279 */
1280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1281 IEM_MC_BEGIN(0, 1);
1282 IEM_MC_LOCAL(uint64_t, uSrc);
1283
1284 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1285 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1286 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1287 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1288
1289 IEM_MC_ADVANCE_RIP();
1290 IEM_MC_END();
1291 }
1292 else
1293 {
1294 /*
1295 * Memory, register.
1296 */
1297 IEM_MC_BEGIN(0, 2);
1298 IEM_MC_LOCAL(uint64_t, uSrc);
1299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1300
1301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1303 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1304 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1305
1306 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1307 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1308
1309 IEM_MC_ADVANCE_RIP();
1310 IEM_MC_END();
1311 }
1312 return VINF_SUCCESS;
1313}
1314
1315
1316FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1317{
1318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1320 {
1321 /**
1322 * @opcode 0x12
1323 * @opcodesub 11 mr/reg
1324 * @oppfx none
1325 * @opcpuid sse
1326 * @opgroup og_sse_simdfp_datamove
1327 * @opxcpttype 5
1328 * @optest op1=1 op2=2 -> op1=2
1329 * @optest op1=0 op2=-42 -> op1=-42
1330 */
1331 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1332
1333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1334 IEM_MC_BEGIN(0, 1);
1335 IEM_MC_LOCAL(uint64_t, uSrc);
1336
1337 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1338 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1339 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1340 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1341
1342 IEM_MC_ADVANCE_RIP();
1343 IEM_MC_END();
1344 }
1345 else
1346 {
1347 /**
1348 * @opdone
1349 * @opcode 0x12
1350 * @opcodesub !11 mr/reg
1351 * @oppfx none
1352 * @opcpuid sse
1353 * @opgroup og_sse_simdfp_datamove
1354 * @opxcpttype 5
1355 * @optest op1=1 op2=2 -> op1=2
1356 * @optest op1=0 op2=-42 -> op1=-42
1357 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1358 */
1359 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1360
1361 IEM_MC_BEGIN(0, 2);
1362 IEM_MC_LOCAL(uint64_t, uSrc);
1363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1364
1365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1367 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1368 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1369
1370 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1371 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1372
1373 IEM_MC_ADVANCE_RIP();
1374 IEM_MC_END();
1375 }
1376 return VINF_SUCCESS;
1377}
1378
1379
1380/**
1381 * @opcode 0x12
1382 * @opcodesub !11 mr/reg
1383 * @oppfx 0x66
1384 * @opcpuid sse2
1385 * @opgroup og_sse2_pcksclr_datamove
1386 * @opxcpttype 5
1387 * @optest op1=1 op2=2 -> op1=2
1388 * @optest op1=0 op2=-42 -> op1=-42
1389 */
1390FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1391{
1392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1393 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1394 {
1395 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1396
1397 IEM_MC_BEGIN(0, 2);
1398 IEM_MC_LOCAL(uint64_t, uSrc);
1399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1400
1401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1403 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1404 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1405
1406 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1407 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1408
1409 IEM_MC_ADVANCE_RIP();
1410 IEM_MC_END();
1411 return VINF_SUCCESS;
1412 }
1413
1414 /**
1415 * @opdone
1416 * @opmnemonic ud660f12m3
1417 * @opcode 0x12
1418 * @opcodesub 11 mr/reg
1419 * @oppfx 0x66
1420 * @opunused immediate
1421 * @opcpuid sse
1422 * @optest ->
1423 */
1424 return IEMOP_RAISE_INVALID_OPCODE();
1425}
1426
1427
1428/**
1429 * @opcode 0x12
1430 * @oppfx 0xf3
1431 * @opcpuid sse3
1432 * @opgroup og_sse3_pcksclr_datamove
1433 * @opxcpttype 4
1434 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1435 * op1=0x00000002000000020000000100000001
1436 */
1437FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1438{
1439 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1441 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1442 {
1443 /*
1444 * Register, register.
1445 */
1446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1447 IEM_MC_BEGIN(2, 0);
1448 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1449 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1450
1451 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1452 IEM_MC_PREPARE_SSE_USAGE();
1453
1454 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1455 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1456 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1457
1458 IEM_MC_ADVANCE_RIP();
1459 IEM_MC_END();
1460 }
1461 else
1462 {
1463 /*
1464 * Register, memory.
1465 */
1466 IEM_MC_BEGIN(2, 2);
1467 IEM_MC_LOCAL(RTUINT128U, uSrc);
1468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1469 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1470 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1471
1472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1474 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1475 IEM_MC_PREPARE_SSE_USAGE();
1476
1477 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1478 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1479 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1480
1481 IEM_MC_ADVANCE_RIP();
1482 IEM_MC_END();
1483 }
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/**
1489 * @opcode 0x12
1490 * @oppfx 0xf2
1491 * @opcpuid sse3
1492 * @opgroup og_sse3_pcksclr_datamove
1493 * @opxcpttype 5
1494 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1495 * op1=0x22222222111111112222222211111111
1496 */
1497FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1498{
1499 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1502 {
1503 /*
1504 * Register, register.
1505 */
1506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1507 IEM_MC_BEGIN(2, 0);
1508 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1509 IEM_MC_ARG(uint64_t, uSrc, 1);
1510
1511 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1512 IEM_MC_PREPARE_SSE_USAGE();
1513
1514 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1515 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1516 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1517
1518 IEM_MC_ADVANCE_RIP();
1519 IEM_MC_END();
1520 }
1521 else
1522 {
1523 /*
1524 * Register, memory.
1525 */
1526 IEM_MC_BEGIN(2, 2);
1527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1528 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1529 IEM_MC_ARG(uint64_t, uSrc, 1);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1534 IEM_MC_PREPARE_SSE_USAGE();
1535
1536 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1537 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1538 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1539
1540 IEM_MC_ADVANCE_RIP();
1541 IEM_MC_END();
1542 }
1543 return VINF_SUCCESS;
1544}
1545
1546
1547/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1548FNIEMOP_STUB(iemOp_movlps_Mq_Vq);
1549
1550/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1551FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1552{
1553 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1556 {
1557#if 0
1558 /*
1559 * Register, register.
1560 */
1561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1562 IEM_MC_BEGIN(0, 1);
1563 IEM_MC_LOCAL(uint64_t, uSrc);
1564 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1565 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1566 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1567 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1568 IEM_MC_ADVANCE_RIP();
1569 IEM_MC_END();
1570#else
1571 return IEMOP_RAISE_INVALID_OPCODE();
1572#endif
1573 }
1574 else
1575 {
1576 /*
1577 * Memory, register.
1578 */
1579 IEM_MC_BEGIN(0, 2);
1580 IEM_MC_LOCAL(uint64_t, uSrc);
1581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1582
1583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1587
1588 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1589 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1590
1591 IEM_MC_ADVANCE_RIP();
1592 IEM_MC_END();
1593 }
1594 return VINF_SUCCESS;
1595}
1596
1597/* Opcode 0xf3 0x0f 0x13 - invalid */
1598/* Opcode 0xf2 0x0f 0x13 - invalid */
1599
1600/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1601FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1602/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1603FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1604/* Opcode 0xf3 0x0f 0x14 - invalid */
1605/* Opcode 0xf2 0x0f 0x14 - invalid */
1606/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1607FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1608/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1609FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1610/* Opcode 0xf3 0x0f 0x15 - invalid */
1611/* Opcode 0xf2 0x0f 0x15 - invalid */
1612/** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */
1613FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT
1614/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1615FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT
1616/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1617FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1618/* Opcode 0xf2 0x0f 0x16 - invalid */
1619/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1620FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT
1621/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1622FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT
1623/* Opcode 0xf3 0x0f 0x17 - invalid */
1624/* Opcode 0xf2 0x0f 0x17 - invalid */
1625
1626
1627/** Opcode 0x0f 0x18. */
1628FNIEMOP_DEF(iemOp_prefetch_Grp16)
1629{
1630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1632 {
1633 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1634 {
1635 case 4: /* Aliased to /0 for the time being according to AMD. */
1636 case 5: /* Aliased to /0 for the time being according to AMD. */
1637 case 6: /* Aliased to /0 for the time being according to AMD. */
1638 case 7: /* Aliased to /0 for the time being according to AMD. */
1639 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1640 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1641 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1642 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1644 }
1645
1646 IEM_MC_BEGIN(0, 1);
1647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1650 /* Currently a NOP. */
1651 NOREF(GCPtrEffSrc);
1652 IEM_MC_ADVANCE_RIP();
1653 IEM_MC_END();
1654 return VINF_SUCCESS;
1655 }
1656
1657 return IEMOP_RAISE_INVALID_OPCODE();
1658}
1659
1660
1661/** Opcode 0x0f 0x19..0x1f. */
1662FNIEMOP_DEF(iemOp_nop_Ev)
1663{
1664 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1666 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1667 {
1668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1669 IEM_MC_BEGIN(0, 0);
1670 IEM_MC_ADVANCE_RIP();
1671 IEM_MC_END();
1672 }
1673 else
1674 {
1675 IEM_MC_BEGIN(0, 1);
1676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1679 /* Currently a NOP. */
1680 NOREF(GCPtrEffSrc);
1681 IEM_MC_ADVANCE_RIP();
1682 IEM_MC_END();
1683 }
1684 return VINF_SUCCESS;
1685}
1686
1687
1688/** Opcode 0x0f 0x20. */
1689FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1690{
1691 /* mod is ignored, as is operand size overrides. */
1692 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1693 IEMOP_HLP_MIN_386();
1694 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1695 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1696 else
1697 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1698
1699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1700 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1702 {
1703 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1704 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1705 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1706 iCrReg |= 8;
1707 }
1708 switch (iCrReg)
1709 {
1710 case 0: case 2: case 3: case 4: case 8:
1711 break;
1712 default:
1713 return IEMOP_RAISE_INVALID_OPCODE();
1714 }
1715 IEMOP_HLP_DONE_DECODING();
1716
1717 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1718}
1719
1720
1721/** Opcode 0x0f 0x21. */
1722FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1723{
1724 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1725 IEMOP_HLP_MIN_386();
1726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1728 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1729 return IEMOP_RAISE_INVALID_OPCODE();
1730 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1731 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1732 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1733}
1734
1735
1736/** Opcode 0x0f 0x22. */
1737FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1738{
1739 /* mod is ignored, as is operand size overrides. */
1740 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1741 IEMOP_HLP_MIN_386();
1742 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1743 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1744 else
1745 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1746
1747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1748 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1749 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1750 {
1751 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1752 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1753 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1754 iCrReg |= 8;
1755 }
1756 switch (iCrReg)
1757 {
1758 case 0: case 2: case 3: case 4: case 8:
1759 break;
1760 default:
1761 return IEMOP_RAISE_INVALID_OPCODE();
1762 }
1763 IEMOP_HLP_DONE_DECODING();
1764
1765 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1766}
1767
1768
1769/** Opcode 0x0f 0x23. */
1770FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1771{
1772 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1773 IEMOP_HLP_MIN_386();
1774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1776 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1777 return IEMOP_RAISE_INVALID_OPCODE();
1778 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1779 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1780 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1781}
1782
1783
1784/** Opcode 0x0f 0x24. */
1785FNIEMOP_DEF(iemOp_mov_Rd_Td)
1786{
1787 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1788 /** @todo works on 386 and 486. */
1789 /* The RM byte is not considered, see testcase. */
1790 return IEMOP_RAISE_INVALID_OPCODE();
1791}
1792
1793
1794/** Opcode 0x0f 0x26. */
1795FNIEMOP_DEF(iemOp_mov_Td_Rd)
1796{
1797 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1798 /** @todo works on 386 and 486. */
1799 /* The RM byte is not considered, see testcase. */
1800 return IEMOP_RAISE_INVALID_OPCODE();
1801}
1802
1803
1804/** Opcode 0x0f 0x28 - movaps Vps, Wps */
1805FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
1806{
1807 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1810 {
1811 /*
1812 * Register, register.
1813 */
1814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1815 IEM_MC_BEGIN(0, 0);
1816 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1817 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1818 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1819 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1820 IEM_MC_ADVANCE_RIP();
1821 IEM_MC_END();
1822 }
1823 else
1824 {
1825 /*
1826 * Register, memory.
1827 */
1828 IEM_MC_BEGIN(0, 2);
1829 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1831
1832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836
1837 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1838 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1839
1840 IEM_MC_ADVANCE_RIP();
1841 IEM_MC_END();
1842 }
1843 return VINF_SUCCESS;
1844}
1845
1846/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
1847FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
1848{
1849 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1851 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1852 {
1853 /*
1854 * Register, register.
1855 */
1856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1857 IEM_MC_BEGIN(0, 0);
1858 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1859 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1860 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1861 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1862 IEM_MC_ADVANCE_RIP();
1863 IEM_MC_END();
1864 }
1865 else
1866 {
1867 /*
1868 * Register, memory.
1869 */
1870 IEM_MC_BEGIN(0, 2);
1871 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1873
1874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1876 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1877 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1878
1879 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1881
1882 IEM_MC_ADVANCE_RIP();
1883 IEM_MC_END();
1884 }
1885 return VINF_SUCCESS;
1886}
1887
1888/* Opcode 0xf3 0x0f 0x28 - invalid */
1889/* Opcode 0xf2 0x0f 0x28 - invalid */
1890
1891/** Opcode 0x0f 0x29 - movaps Wps, Vps */
1892FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
1893{
1894 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1896 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1897 {
1898 /*
1899 * Register, register.
1900 */
1901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1902 IEM_MC_BEGIN(0, 0);
1903 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1904 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1905 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1906 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1907 IEM_MC_ADVANCE_RIP();
1908 IEM_MC_END();
1909 }
1910 else
1911 {
1912 /*
1913 * Memory, register.
1914 */
1915 IEM_MC_BEGIN(0, 2);
1916 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1918
1919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1921 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1922 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1923
1924 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1925 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1926
1927 IEM_MC_ADVANCE_RIP();
1928 IEM_MC_END();
1929 }
1930 return VINF_SUCCESS;
1931}
1932
1933/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
1934FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
1935{
1936 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1937 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1938 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1939 {
1940 /*
1941 * Register, register.
1942 */
1943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1944 IEM_MC_BEGIN(0, 0);
1945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1946 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1947 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1948 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1949 IEM_MC_ADVANCE_RIP();
1950 IEM_MC_END();
1951 }
1952 else
1953 {
1954 /*
1955 * Memory, register.
1956 */
1957 IEM_MC_BEGIN(0, 2);
1958 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1960
1961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1964 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1965
1966 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1967 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1968
1969 IEM_MC_ADVANCE_RIP();
1970 IEM_MC_END();
1971 }
1972 return VINF_SUCCESS;
1973}
1974
1975/* Opcode 0xf3 0x0f 0x29 - invalid */
1976/* Opcode 0xf2 0x0f 0x29 - invalid */
1977
1978
1979/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
1980FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
1981/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
1982FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
1983/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
1984FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
1985/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1986FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
1987
1988
1989/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
1990FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
1991{
1992 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1994 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1995 {
1996 /*
1997 * memory, register.
1998 */
1999 IEM_MC_BEGIN(0, 2);
2000 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2002
2003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2005 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2007
2008 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2009 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 /* The register, register encoding is invalid. */
2015 else
2016 return IEMOP_RAISE_INVALID_OPCODE();
2017 return VINF_SUCCESS;
2018}
2019
2020/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2021FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2022{
2023 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2025 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2026 {
2027 /*
2028 * memory, register.
2029 */
2030 IEM_MC_BEGIN(0, 2);
2031 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2033
2034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2038
2039 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2040 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2041
2042 IEM_MC_ADVANCE_RIP();
2043 IEM_MC_END();
2044 }
2045 /* The register, register encoding is invalid. */
2046 else
2047 return IEMOP_RAISE_INVALID_OPCODE();
2048 return VINF_SUCCESS;
2049}
2050/* Opcode 0xf3 0x0f 0x2b - invalid */
2051/* Opcode 0xf2 0x0f 0x2b - invalid */
2052
2053
2054/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2055FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2056/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2057FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2058/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2059FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2060/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2061FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2062
2063/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2064FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2065/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2066FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2067/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2068FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2069/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2070FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2071
2072/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2073FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2074/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2075FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2076/* Opcode 0xf3 0x0f 0x2e - invalid */
2077/* Opcode 0xf2 0x0f 0x2e - invalid */
2078
2079/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2080FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2081/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2082FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2083/* Opcode 0xf3 0x0f 0x2f - invalid */
2084/* Opcode 0xf2 0x0f 0x2f - invalid */
2085
2086/** Opcode 0x0f 0x30. */
2087FNIEMOP_DEF(iemOp_wrmsr)
2088{
2089 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2091 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2092}
2093
2094
2095/** Opcode 0x0f 0x31. */
2096FNIEMOP_DEF(iemOp_rdtsc)
2097{
2098 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2100 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2101}
2102
2103
2104/** Opcode 0x0f 0x33. */
2105FNIEMOP_DEF(iemOp_rdmsr)
2106{
2107 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2109 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2110}
2111
2112
2113/** Opcode 0x0f 0x34. */
2114FNIEMOP_DEF(iemOp_rdpmc)
2115{
2116 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2118 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2119}
2120
2121
2122/** Opcode 0x0f 0x34. */
2123FNIEMOP_STUB(iemOp_sysenter);
2124/** Opcode 0x0f 0x35. */
2125FNIEMOP_STUB(iemOp_sysexit);
2126/** Opcode 0x0f 0x37. */
2127FNIEMOP_STUB(iemOp_getsec);
2128
2129
2130/** Opcode 0x0f 0x38. */
2131FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2132{
2133#ifdef IEM_WITH_THREE_0F_38
2134 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2135 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2136#else
2137 IEMOP_BITCH_ABOUT_STUB();
2138 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2139#endif
2140}
2141
2142
2143/** Opcode 0x0f 0x3a. */
2144FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2145{
2146#ifdef IEM_WITH_THREE_0F_3A
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2149#else
2150 IEMOP_BITCH_ABOUT_STUB();
2151 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2152#endif
2153}
2154
2155
2156/**
2157 * Implements a conditional move.
2158 *
2159 * Wish there was an obvious way to do this where we could share and reduce
2160 * code bloat.
2161 *
2162 * @param a_Cnd The conditional "microcode" operation.
2163 */
2164#define CMOV_X(a_Cnd) \
2165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2166 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2167 { \
2168 switch (pVCpu->iem.s.enmEffOpSize) \
2169 { \
2170 case IEMMODE_16BIT: \
2171 IEM_MC_BEGIN(0, 1); \
2172 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2173 a_Cnd { \
2174 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2175 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2176 } IEM_MC_ENDIF(); \
2177 IEM_MC_ADVANCE_RIP(); \
2178 IEM_MC_END(); \
2179 return VINF_SUCCESS; \
2180 \
2181 case IEMMODE_32BIT: \
2182 IEM_MC_BEGIN(0, 1); \
2183 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2184 a_Cnd { \
2185 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2186 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2187 } IEM_MC_ELSE() { \
2188 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2189 } IEM_MC_ENDIF(); \
2190 IEM_MC_ADVANCE_RIP(); \
2191 IEM_MC_END(); \
2192 return VINF_SUCCESS; \
2193 \
2194 case IEMMODE_64BIT: \
2195 IEM_MC_BEGIN(0, 1); \
2196 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2197 a_Cnd { \
2198 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2199 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2200 } IEM_MC_ENDIF(); \
2201 IEM_MC_ADVANCE_RIP(); \
2202 IEM_MC_END(); \
2203 return VINF_SUCCESS; \
2204 \
2205 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2206 } \
2207 } \
2208 else \
2209 { \
2210 switch (pVCpu->iem.s.enmEffOpSize) \
2211 { \
2212 case IEMMODE_16BIT: \
2213 IEM_MC_BEGIN(0, 2); \
2214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2215 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2217 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2218 a_Cnd { \
2219 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2220 } IEM_MC_ENDIF(); \
2221 IEM_MC_ADVANCE_RIP(); \
2222 IEM_MC_END(); \
2223 return VINF_SUCCESS; \
2224 \
2225 case IEMMODE_32BIT: \
2226 IEM_MC_BEGIN(0, 2); \
2227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2228 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2230 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2231 a_Cnd { \
2232 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2233 } IEM_MC_ELSE() { \
2234 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2235 } IEM_MC_ENDIF(); \
2236 IEM_MC_ADVANCE_RIP(); \
2237 IEM_MC_END(); \
2238 return VINF_SUCCESS; \
2239 \
2240 case IEMMODE_64BIT: \
2241 IEM_MC_BEGIN(0, 2); \
2242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2243 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2245 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2246 a_Cnd { \
2247 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2248 } IEM_MC_ENDIF(); \
2249 IEM_MC_ADVANCE_RIP(); \
2250 IEM_MC_END(); \
2251 return VINF_SUCCESS; \
2252 \
2253 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2254 } \
2255 } do {} while (0)
2256
2257
2258
2259/** Opcode 0x0f 0x40. */
2260FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2261{
2262 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2263 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2264}
2265
2266
2267/** Opcode 0x0f 0x41. */
2268FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2269{
2270 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2271 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2272}
2273
2274
2275/** Opcode 0x0f 0x42. */
2276FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2277{
2278 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2279 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2280}
2281
2282
2283/** Opcode 0x0f 0x43. */
2284FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2285{
2286 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2287 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2288}
2289
2290
2291/** Opcode 0x0f 0x44. */
2292FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2293{
2294 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2295 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2296}
2297
2298
2299/** Opcode 0x0f 0x45. */
2300FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2301{
2302 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2303 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2304}
2305
2306
2307/** Opcode 0x0f 0x46. */
2308FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2309{
2310 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2311 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2312}
2313
2314
2315/** Opcode 0x0f 0x47. */
2316FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2317{
2318 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2319 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2320}
2321
2322
2323/** Opcode 0x0f 0x48. */
2324FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2325{
2326 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2327 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2328}
2329
2330
2331/** Opcode 0x0f 0x49. */
2332FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2333{
2334 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2335 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2336}
2337
2338
2339/** Opcode 0x0f 0x4a. */
2340FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2341{
2342 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2343 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2344}
2345
2346
2347/** Opcode 0x0f 0x4b. */
2348FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2349{
2350 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2351 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2352}
2353
2354
2355/** Opcode 0x0f 0x4c. */
2356FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2357{
2358 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2359 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2360}
2361
2362
2363/** Opcode 0x0f 0x4d. */
2364FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2365{
2366 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2367 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2368}
2369
2370
2371/** Opcode 0x0f 0x4e. */
2372FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2373{
2374 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2375 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2376}
2377
2378
2379/** Opcode 0x0f 0x4f. */
2380FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2381{
2382 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2383 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2384}
2385
2386#undef CMOV_X
2387
2388/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2389FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2390/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2391FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2392/* Opcode 0xf3 0x0f 0x50 - invalid */
2393/* Opcode 0xf2 0x0f 0x50 - invalid */
2394
2395/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2396FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2397/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2398FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2399/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2400FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2401/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2402FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2403
2404/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2405FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2406/* Opcode 0x66 0x0f 0x52 - invalid */
2407/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2408FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2409/* Opcode 0xf2 0x0f 0x52 - invalid */
2410
2411/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2412FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2413/* Opcode 0x66 0x0f 0x53 - invalid */
2414/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2415FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2416/* Opcode 0xf2 0x0f 0x53 - invalid */
2417
2418/** Opcode 0x0f 0x54 - andps Vps, Wps */
2419FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2420/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2421FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2422/* Opcode 0xf3 0x0f 0x54 - invalid */
2423/* Opcode 0xf2 0x0f 0x54 - invalid */
2424
2425/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2426FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2427/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2428FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2429/* Opcode 0xf3 0x0f 0x55 - invalid */
2430/* Opcode 0xf2 0x0f 0x55 - invalid */
2431
2432/** Opcode 0x0f 0x56 - orps Vps, Wps */
2433FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2434/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2435FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2436/* Opcode 0xf3 0x0f 0x56 - invalid */
2437/* Opcode 0xf2 0x0f 0x56 - invalid */
2438
2439/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2440FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2441/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2442FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2443/* Opcode 0xf3 0x0f 0x57 - invalid */
2444/* Opcode 0xf2 0x0f 0x57 - invalid */
2445
2446/** Opcode 0x0f 0x58 - addps Vps, Wps */
2447FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2448/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2449FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2450/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2451FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2452/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2453FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2454
2455/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2456FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2457/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2458FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2459/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2460FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2461/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2462FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2463
2464/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2465FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2466/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2467FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2468/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2469FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2470/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2471FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2472
2473/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2474FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2475/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2476FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2477/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2478FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2479/* Opcode 0xf2 0x0f 0x5b - invalid */
2480
2481/** Opcode 0x0f 0x5c - subps Vps, Wps */
2482FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2483/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2484FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2485/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2486FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2487/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2488FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2489
2490/** Opcode 0x0f 0x5d - minps Vps, Wps */
2491FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2492/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2493FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2494/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2495FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2496/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2497FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2498
2499/** Opcode 0x0f 0x5e - divps Vps, Wps */
2500FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2501/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2502FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2503/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2504FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2505/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2506FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2507
2508/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2509FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2510/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2511FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2512/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2513FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2514/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2515FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2516
2517/**
2518 * Common worker for MMX instructions on the forms:
2519 * pxxxx mm1, mm2/mem32
2520 *
2521 * The 2nd operand is the first half of a register, which in the memory case
2522 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2523 * memory accessed for MMX.
2524 *
2525 * Exceptions type 4.
2526 */
2527FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2528{
2529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2531 {
2532 /*
2533 * Register, register.
2534 */
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 IEM_MC_BEGIN(2, 0);
2537 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2538 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2539 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2540 IEM_MC_PREPARE_SSE_USAGE();
2541 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2542 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2543 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2544 IEM_MC_ADVANCE_RIP();
2545 IEM_MC_END();
2546 }
2547 else
2548 {
2549 /*
2550 * Register, memory.
2551 */
2552 IEM_MC_BEGIN(2, 2);
2553 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2554 IEM_MC_LOCAL(uint64_t, uSrc);
2555 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2557
2558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2560 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2561 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2562
2563 IEM_MC_PREPARE_SSE_USAGE();
2564 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2565 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2566
2567 IEM_MC_ADVANCE_RIP();
2568 IEM_MC_END();
2569 }
2570 return VINF_SUCCESS;
2571}
2572
2573
2574/**
2575 * Common worker for SSE2 instructions on the forms:
2576 * pxxxx xmm1, xmm2/mem128
2577 *
2578 * The 2nd operand is the first half of a register, which in the memory case
2579 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2580 * memory accessed for MMX.
2581 *
2582 * Exceptions type 4.
2583 */
2584FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2585{
2586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2587 if (!pImpl->pfnU64)
2588 return IEMOP_RAISE_INVALID_OPCODE();
2589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2590 {
2591 /*
2592 * Register, register.
2593 */
2594 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2595 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2597 IEM_MC_BEGIN(2, 0);
2598 IEM_MC_ARG(uint64_t *, pDst, 0);
2599 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2600 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2601 IEM_MC_PREPARE_FPU_USAGE();
2602 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2603 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2604 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2605 IEM_MC_ADVANCE_RIP();
2606 IEM_MC_END();
2607 }
2608 else
2609 {
2610 /*
2611 * Register, memory.
2612 */
2613 IEM_MC_BEGIN(2, 2);
2614 IEM_MC_ARG(uint64_t *, pDst, 0);
2615 IEM_MC_LOCAL(uint32_t, uSrc);
2616 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2618
2619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2621 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2622 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2623
2624 IEM_MC_PREPARE_FPU_USAGE();
2625 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2626 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2627
2628 IEM_MC_ADVANCE_RIP();
2629 IEM_MC_END();
2630 }
2631 return VINF_SUCCESS;
2632}
2633
2634
2635/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2636FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2637{
2638 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2639 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2640}
2641
2642/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2643FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2644{
2645 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2646 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2647}
2648
2649/* Opcode 0xf3 0x0f 0x60 - invalid */
2650
2651
2652/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2653FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2654{
2655 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2656 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2657}
2658
2659/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2660FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2661{
2662 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2663 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2664}
2665
2666/* Opcode 0xf3 0x0f 0x61 - invalid */
2667
2668
2669/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2670FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2671{
2672 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2673 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2674}
2675
2676/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2677FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2678{
2679 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2680 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2681}
2682
2683/* Opcode 0xf3 0x0f 0x62 - invalid */
2684
2685
2686
2687/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2688FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2689/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2690FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2691/* Opcode 0xf3 0x0f 0x63 - invalid */
2692
2693/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2694FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2695/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2696FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2697/* Opcode 0xf3 0x0f 0x64 - invalid */
2698
2699/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2700FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2701/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2702FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2703/* Opcode 0xf3 0x0f 0x65 - invalid */
2704
2705/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2706FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2707/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
2708FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
2709/* Opcode 0xf3 0x0f 0x66 - invalid */
2710
2711/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2712FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2713/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
2714FNIEMOP_STUB(iemOp_packuswb_Vx_W);
2715/* Opcode 0xf3 0x0f 0x67 - invalid */
2716
2717
2718/**
2719 * Common worker for MMX instructions on the form:
2720 * pxxxx mm1, mm2/mem64
2721 *
2722 * The 2nd operand is the second half of a register, which in the memory case
2723 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2724 * where it may read the full 128 bits or only the upper 64 bits.
2725 *
2726 * Exceptions type 4.
2727 */
2728FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2729{
2730 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2731 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2733 {
2734 /*
2735 * Register, register.
2736 */
2737 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2738 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2740 IEM_MC_BEGIN(2, 0);
2741 IEM_MC_ARG(uint64_t *, pDst, 0);
2742 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2743 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2744 IEM_MC_PREPARE_FPU_USAGE();
2745 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2746 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2747 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2748 IEM_MC_ADVANCE_RIP();
2749 IEM_MC_END();
2750 }
2751 else
2752 {
2753 /*
2754 * Register, memory.
2755 */
2756 IEM_MC_BEGIN(2, 2);
2757 IEM_MC_ARG(uint64_t *, pDst, 0);
2758 IEM_MC_LOCAL(uint64_t, uSrc);
2759 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2761
2762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2764 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2765 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2766
2767 IEM_MC_PREPARE_FPU_USAGE();
2768 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2769 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2770
2771 IEM_MC_ADVANCE_RIP();
2772 IEM_MC_END();
2773 }
2774 return VINF_SUCCESS;
2775}
2776
2777
2778/**
2779 * Common worker for SSE2 instructions on the form:
2780 * pxxxx xmm1, xmm2/mem128
2781 *
2782 * The 2nd operand is the second half of a register, which in the memory case
2783 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2784 * where it may read the full 128 bits or only the upper 64 bits.
2785 *
2786 * Exceptions type 4.
2787 */
2788FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2789{
2790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2791 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2792 {
2793 /*
2794 * Register, register.
2795 */
2796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2797 IEM_MC_BEGIN(2, 0);
2798 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2799 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2800 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2801 IEM_MC_PREPARE_SSE_USAGE();
2802 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2803 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2804 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2805 IEM_MC_ADVANCE_RIP();
2806 IEM_MC_END();
2807 }
2808 else
2809 {
2810 /*
2811 * Register, memory.
2812 */
2813 IEM_MC_BEGIN(2, 2);
2814 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2815 IEM_MC_LOCAL(RTUINT128U, uSrc);
2816 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2818
2819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2821 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2822 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2823
2824 IEM_MC_PREPARE_SSE_USAGE();
2825 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2826 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2827
2828 IEM_MC_ADVANCE_RIP();
2829 IEM_MC_END();
2830 }
2831 return VINF_SUCCESS;
2832}
2833
2834
2835/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2836FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2837{
2838 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2839 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2840}
2841
2842/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
2843FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
2844{
2845 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
2846 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2847}
2848/* Opcode 0xf3 0x0f 0x68 - invalid */
2849
2850
2851/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2852FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2853{
2854 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2855 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2856}
2857
2858/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
2859FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
2860{
2861 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
2862 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2863
2864}
2865/* Opcode 0xf3 0x0f 0x69 - invalid */
2866
2867
2868/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2869FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2870{
2871 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2872 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2873}
2874
2875/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
2876FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
2877{
2878 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
2879 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2880}
2881/* Opcode 0xf3 0x0f 0x6a - invalid */
2882
2883
2884/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2885FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2886/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
2887FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
2888/* Opcode 0xf3 0x0f 0x6b - invalid */
2889
2890
2891/* Opcode 0x0f 0x6c - invalid */
2892
2893/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
2894FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
2895{
2896 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
2897 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2898}
2899
2900/* Opcode 0xf3 0x0f 0x6c - invalid */
2901/* Opcode 0xf2 0x0f 0x6c - invalid */
2902
2903
2904/* Opcode 0x0f 0x6d - invalid */
2905
2906/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
2907FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
2908{
2909 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
2910 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2911}
2912
2913/* Opcode 0xf3 0x0f 0x6d - invalid */
2914
2915
2916/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
2917FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
2918{
2919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2920 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2921 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2922 else
2923 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2925 {
2926 /* MMX, greg */
2927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2928 IEM_MC_BEGIN(0, 1);
2929 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2930 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2931 IEM_MC_LOCAL(uint64_t, u64Tmp);
2932 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2933 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2934 else
2935 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2936 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2937 IEM_MC_ADVANCE_RIP();
2938 IEM_MC_END();
2939 }
2940 else
2941 {
2942 /* MMX, [mem] */
2943 IEM_MC_BEGIN(0, 2);
2944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2945 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2948 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2949 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2950 {
2951 IEM_MC_LOCAL(uint64_t, u64Tmp);
2952 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2953 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2954 }
2955 else
2956 {
2957 IEM_MC_LOCAL(uint32_t, u32Tmp);
2958 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2959 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2960 }
2961 IEM_MC_ADVANCE_RIP();
2962 IEM_MC_END();
2963 }
2964 return VINF_SUCCESS;
2965}
2966
2967/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
2968FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
2969{
2970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2972 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2973 else
2974 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2976 {
2977 /* XMM, greg*/
2978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2979 IEM_MC_BEGIN(0, 1);
2980 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2981 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2982 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2983 {
2984 IEM_MC_LOCAL(uint64_t, u64Tmp);
2985 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2986 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2987 }
2988 else
2989 {
2990 IEM_MC_LOCAL(uint32_t, u32Tmp);
2991 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2992 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2993 }
2994 IEM_MC_ADVANCE_RIP();
2995 IEM_MC_END();
2996 }
2997 else
2998 {
2999 /* XMM, [mem] */
3000 IEM_MC_BEGIN(0, 2);
3001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3002 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3006 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3007 {
3008 IEM_MC_LOCAL(uint64_t, u64Tmp);
3009 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3010 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3011 }
3012 else
3013 {
3014 IEM_MC_LOCAL(uint32_t, u32Tmp);
3015 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3016 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3017 }
3018 IEM_MC_ADVANCE_RIP();
3019 IEM_MC_END();
3020 }
3021 return VINF_SUCCESS;
3022}
3023
3024/* Opcode 0xf3 0x0f 0x6e - invalid */
3025
3026
3027/** Opcode 0x0f 0x6f - movq Pq, Qq */
3028FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3029{
3030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3031 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3033 {
3034 /*
3035 * Register, register.
3036 */
3037 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3038 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3040 IEM_MC_BEGIN(0, 1);
3041 IEM_MC_LOCAL(uint64_t, u64Tmp);
3042 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3043 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3044 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3045 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3046 IEM_MC_ADVANCE_RIP();
3047 IEM_MC_END();
3048 }
3049 else
3050 {
3051 /*
3052 * Register, memory.
3053 */
3054 IEM_MC_BEGIN(0, 2);
3055 IEM_MC_LOCAL(uint64_t, u64Tmp);
3056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3057
3058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3060 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3061 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3062 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3063 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3064
3065 IEM_MC_ADVANCE_RIP();
3066 IEM_MC_END();
3067 }
3068 return VINF_SUCCESS;
3069}
3070
3071/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3072FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3073{
3074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3075 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3077 {
3078 /*
3079 * Register, register.
3080 */
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_BEGIN(0, 0);
3083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3085 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3086 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3087 IEM_MC_ADVANCE_RIP();
3088 IEM_MC_END();
3089 }
3090 else
3091 {
3092 /*
3093 * Register, memory.
3094 */
3095 IEM_MC_BEGIN(0, 2);
3096 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3098
3099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3102 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3103 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3104 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3105
3106 IEM_MC_ADVANCE_RIP();
3107 IEM_MC_END();
3108 }
3109 return VINF_SUCCESS;
3110}
3111
3112/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3113FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3114{
3115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3116 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3118 {
3119 /*
3120 * Register, register.
3121 */
3122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3123 IEM_MC_BEGIN(0, 0);
3124 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3125 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3126 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3127 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3128 IEM_MC_ADVANCE_RIP();
3129 IEM_MC_END();
3130 }
3131 else
3132 {
3133 /*
3134 * Register, memory.
3135 */
3136 IEM_MC_BEGIN(0, 2);
3137 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3139
3140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3142 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3144 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3145 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3146
3147 IEM_MC_ADVANCE_RIP();
3148 IEM_MC_END();
3149 }
3150 return VINF_SUCCESS;
3151}
3152
3153
3154/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3155FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3156{
3157 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3160 {
3161 /*
3162 * Register, register.
3163 */
3164 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3166
3167 IEM_MC_BEGIN(3, 0);
3168 IEM_MC_ARG(uint64_t *, pDst, 0);
3169 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3170 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3171 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3172 IEM_MC_PREPARE_FPU_USAGE();
3173 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3174 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3175 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3176 IEM_MC_ADVANCE_RIP();
3177 IEM_MC_END();
3178 }
3179 else
3180 {
3181 /*
3182 * Register, memory.
3183 */
3184 IEM_MC_BEGIN(3, 2);
3185 IEM_MC_ARG(uint64_t *, pDst, 0);
3186 IEM_MC_LOCAL(uint64_t, uSrc);
3187 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3189
3190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3191 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3192 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3194 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3195
3196 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3197 IEM_MC_PREPARE_FPU_USAGE();
3198 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3199 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3200
3201 IEM_MC_ADVANCE_RIP();
3202 IEM_MC_END();
3203 }
3204 return VINF_SUCCESS;
3205}
3206
3207/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3208FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3209{
3210 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3213 {
3214 /*
3215 * Register, register.
3216 */
3217 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3219
3220 IEM_MC_BEGIN(3, 0);
3221 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3222 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3223 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3225 IEM_MC_PREPARE_SSE_USAGE();
3226 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3227 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3228 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3229 IEM_MC_ADVANCE_RIP();
3230 IEM_MC_END();
3231 }
3232 else
3233 {
3234 /*
3235 * Register, memory.
3236 */
3237 IEM_MC_BEGIN(3, 2);
3238 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3239 IEM_MC_LOCAL(RTUINT128U, uSrc);
3240 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3242
3243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3244 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3245 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3247 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3248
3249 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3250 IEM_MC_PREPARE_SSE_USAGE();
3251 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3252 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3253
3254 IEM_MC_ADVANCE_RIP();
3255 IEM_MC_END();
3256 }
3257 return VINF_SUCCESS;
3258}
3259
3260/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3261FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3262{
3263 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3265 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3266 {
3267 /*
3268 * Register, register.
3269 */
3270 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3272
3273 IEM_MC_BEGIN(3, 0);
3274 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3275 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3276 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3277 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3278 IEM_MC_PREPARE_SSE_USAGE();
3279 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3280 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3281 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3282 IEM_MC_ADVANCE_RIP();
3283 IEM_MC_END();
3284 }
3285 else
3286 {
3287 /*
3288 * Register, memory.
3289 */
3290 IEM_MC_BEGIN(3, 2);
3291 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3292 IEM_MC_LOCAL(RTUINT128U, uSrc);
3293 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3295
3296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3297 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3298 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3301
3302 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3303 IEM_MC_PREPARE_SSE_USAGE();
3304 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3305 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3306
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3314FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3315{
3316 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3319 {
3320 /*
3321 * Register, register.
3322 */
3323 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3325
3326 IEM_MC_BEGIN(3, 0);
3327 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3328 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3329 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3330 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3331 IEM_MC_PREPARE_SSE_USAGE();
3332 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3333 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3334 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3335 IEM_MC_ADVANCE_RIP();
3336 IEM_MC_END();
3337 }
3338 else
3339 {
3340 /*
3341 * Register, memory.
3342 */
3343 IEM_MC_BEGIN(3, 2);
3344 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3345 IEM_MC_LOCAL(RTUINT128U, uSrc);
3346 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3348
3349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3350 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3351 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3354
3355 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3356 IEM_MC_PREPARE_SSE_USAGE();
3357 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3358 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3359
3360 IEM_MC_ADVANCE_RIP();
3361 IEM_MC_END();
3362 }
3363 return VINF_SUCCESS;
3364}
3365
3366
3367/** Opcode 0x0f 0x71 11/2. */
3368FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3369
3370/** Opcode 0x66 0x0f 0x71 11/2. */
3371FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3372
3373/** Opcode 0x0f 0x71 11/4. */
3374FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3375
3376/** Opcode 0x66 0x0f 0x71 11/4. */
3377FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3378
3379/** Opcode 0x0f 0x71 11/6. */
3380FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3381
3382/** Opcode 0x66 0x0f 0x71 11/6. */
3383FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3384
3385
3386/**
3387 * Group 12 jump table for register variant.
3388 */
3389IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3390{
3391 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3392 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3393 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3394 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3395 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3396 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3397 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3398 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3399};
3400AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3401
3402
3403/** Opcode 0x0f 0x71. */
3404FNIEMOP_DEF(iemOp_Grp12)
3405{
3406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3407 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3408 /* register, register */
3409 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3410 + pVCpu->iem.s.idxPrefix], bRm);
3411 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3412}
3413
3414
3415/** Opcode 0x0f 0x72 11/2. */
3416FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3417
3418/** Opcode 0x66 0x0f 0x72 11/2. */
3419FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3420
3421/** Opcode 0x0f 0x72 11/4. */
3422FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3423
3424/** Opcode 0x66 0x0f 0x72 11/4. */
3425FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3426
3427/** Opcode 0x0f 0x72 11/6. */
3428FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3429
3430/** Opcode 0x66 0x0f 0x72 11/6. */
3431FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3432
3433
3434/**
3435 * Group 13 jump table for register variant.
3436 */
3437IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3438{
3439 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3440 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3441 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3442 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3443 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3444 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3445 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3446 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3447};
3448AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3449
3450/** Opcode 0x0f 0x72. */
3451FNIEMOP_DEF(iemOp_Grp13)
3452{
3453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3454 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3455 /* register, register */
3456 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3457 + pVCpu->iem.s.idxPrefix], bRm);
3458 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3459}
3460
3461
3462/** Opcode 0x0f 0x73 11/2. */
3463FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3464
3465/** Opcode 0x66 0x0f 0x73 11/2. */
3466FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3467
3468/** Opcode 0x66 0x0f 0x73 11/3. */
3469FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3470
3471/** Opcode 0x0f 0x73 11/6. */
3472FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3473
3474/** Opcode 0x66 0x0f 0x73 11/6. */
3475FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3476
3477/** Opcode 0x66 0x0f 0x73 11/7. */
3478FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3479
3480/**
3481 * Group 14 jump table for register variant.
3482 */
3483IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3484{
3485 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3486 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3487 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3488 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3489 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3490 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3491 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3492 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3493};
3494AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3495
3496
3497/** Opcode 0x0f 0x73. */
3498FNIEMOP_DEF(iemOp_Grp14)
3499{
3500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3502 /* register, register */
3503 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3504 + pVCpu->iem.s.idxPrefix], bRm);
3505 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3506}
3507
3508
3509/**
3510 * Common worker for MMX instructions on the form:
3511 * pxxx mm1, mm2/mem64
3512 */
3513FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3514{
3515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3517 {
3518 /*
3519 * Register, register.
3520 */
3521 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3522 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 IEM_MC_BEGIN(2, 0);
3525 IEM_MC_ARG(uint64_t *, pDst, 0);
3526 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3527 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3528 IEM_MC_PREPARE_FPU_USAGE();
3529 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3530 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3531 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3532 IEM_MC_ADVANCE_RIP();
3533 IEM_MC_END();
3534 }
3535 else
3536 {
3537 /*
3538 * Register, memory.
3539 */
3540 IEM_MC_BEGIN(2, 2);
3541 IEM_MC_ARG(uint64_t *, pDst, 0);
3542 IEM_MC_LOCAL(uint64_t, uSrc);
3543 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3545
3546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3548 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3549 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3550
3551 IEM_MC_PREPARE_FPU_USAGE();
3552 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3553 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3554
3555 IEM_MC_ADVANCE_RIP();
3556 IEM_MC_END();
3557 }
3558 return VINF_SUCCESS;
3559}
3560
3561
3562/**
3563 * Common worker for SSE2 instructions on the forms:
3564 * pxxx xmm1, xmm2/mem128
3565 *
3566 * Proper alignment of the 128-bit operand is enforced.
3567 * Exceptions type 4. SSE2 cpuid checks.
3568 */
3569FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3570{
3571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3573 {
3574 /*
3575 * Register, register.
3576 */
3577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3578 IEM_MC_BEGIN(2, 0);
3579 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3580 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3581 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3582 IEM_MC_PREPARE_SSE_USAGE();
3583 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3584 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3585 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3586 IEM_MC_ADVANCE_RIP();
3587 IEM_MC_END();
3588 }
3589 else
3590 {
3591 /*
3592 * Register, memory.
3593 */
3594 IEM_MC_BEGIN(2, 2);
3595 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3596 IEM_MC_LOCAL(RTUINT128U, uSrc);
3597 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3599
3600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3602 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3603 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3604
3605 IEM_MC_PREPARE_SSE_USAGE();
3606 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3607 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3608
3609 IEM_MC_ADVANCE_RIP();
3610 IEM_MC_END();
3611 }
3612 return VINF_SUCCESS;
3613}
3614
3615
3616/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3617FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3618{
3619 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3620 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3621}
3622
3623/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3624FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3625{
3626 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3627 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3628}
3629
3630/* Opcode 0xf3 0x0f 0x74 - invalid */
3631/* Opcode 0xf2 0x0f 0x74 - invalid */
3632
3633
3634/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3635FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3636{
3637 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3638 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3639}
3640
3641/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3642FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3643{
3644 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3645 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3646}
3647
3648/* Opcode 0xf3 0x0f 0x75 - invalid */
3649/* Opcode 0xf2 0x0f 0x75 - invalid */
3650
3651
3652/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3653FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3654{
3655 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3656 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3657}
3658
3659/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3660FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3661{
3662 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3663 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3664}
3665
3666/* Opcode 0xf3 0x0f 0x76 - invalid */
3667/* Opcode 0xf2 0x0f 0x76 - invalid */
3668
3669
3670/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3671FNIEMOP_STUB(iemOp_emms);
3672/* Opcode 0x66 0x0f 0x77 - invalid */
3673/* Opcode 0xf3 0x0f 0x77 - invalid */
3674/* Opcode 0xf2 0x0f 0x77 - invalid */
3675
3676/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3677FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3678/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3679FNIEMOP_STUB(iemOp_AmdGrp17);
3680/* Opcode 0xf3 0x0f 0x78 - invalid */
3681/* Opcode 0xf2 0x0f 0x78 - invalid */
3682
3683/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3684FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3685/* Opcode 0x66 0x0f 0x79 - invalid */
3686/* Opcode 0xf3 0x0f 0x79 - invalid */
3687/* Opcode 0xf2 0x0f 0x79 - invalid */
3688
3689/* Opcode 0x0f 0x7a - invalid */
3690/* Opcode 0x66 0x0f 0x7a - invalid */
3691/* Opcode 0xf3 0x0f 0x7a - invalid */
3692/* Opcode 0xf2 0x0f 0x7a - invalid */
3693
3694/* Opcode 0x0f 0x7b - invalid */
3695/* Opcode 0x66 0x0f 0x7b - invalid */
3696/* Opcode 0xf3 0x0f 0x7b - invalid */
3697/* Opcode 0xf2 0x0f 0x7b - invalid */
3698
3699/* Opcode 0x0f 0x7c - invalid */
3700/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3701FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3702/* Opcode 0xf3 0x0f 0x7c - invalid */
3703/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3704FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3705
3706/* Opcode 0x0f 0x7d - invalid */
3707/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
3708FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
3709/* Opcode 0xf3 0x0f 0x7d - invalid */
3710/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
3711FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
3712
3713
3714/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3715FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3716{
3717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3718 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3719 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3720 else
3721 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3722 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3723 {
3724 /* greg, MMX */
3725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3726 IEM_MC_BEGIN(0, 1);
3727 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3728 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3729 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3730 {
3731 IEM_MC_LOCAL(uint64_t, u64Tmp);
3732 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3733 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3734 }
3735 else
3736 {
3737 IEM_MC_LOCAL(uint32_t, u32Tmp);
3738 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3739 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3740 }
3741 IEM_MC_ADVANCE_RIP();
3742 IEM_MC_END();
3743 }
3744 else
3745 {
3746 /* [mem], MMX */
3747 IEM_MC_BEGIN(0, 2);
3748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3749 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3752 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3753 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3754 {
3755 IEM_MC_LOCAL(uint64_t, u64Tmp);
3756 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3757 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3758 }
3759 else
3760 {
3761 IEM_MC_LOCAL(uint32_t, u32Tmp);
3762 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3763 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3764 }
3765 IEM_MC_ADVANCE_RIP();
3766 IEM_MC_END();
3767 }
3768 return VINF_SUCCESS;
3769}
3770
3771/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
3772FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
3773{
3774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3775 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3776 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3777 else
3778 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3779 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3780 {
3781 /* greg, XMM */
3782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3783 IEM_MC_BEGIN(0, 1);
3784 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3785 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3786 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3787 {
3788 IEM_MC_LOCAL(uint64_t, u64Tmp);
3789 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3790 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3791 }
3792 else
3793 {
3794 IEM_MC_LOCAL(uint32_t, u32Tmp);
3795 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3796 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3797 }
3798 IEM_MC_ADVANCE_RIP();
3799 IEM_MC_END();
3800 }
3801 else
3802 {
3803 /* [mem], XMM */
3804 IEM_MC_BEGIN(0, 2);
3805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3806 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3810 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3811 {
3812 IEM_MC_LOCAL(uint64_t, u64Tmp);
3813 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3814 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3815 }
3816 else
3817 {
3818 IEM_MC_LOCAL(uint32_t, u32Tmp);
3819 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3820 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3821 }
3822 IEM_MC_ADVANCE_RIP();
3823 IEM_MC_END();
3824 }
3825 return VINF_SUCCESS;
3826}
3827
3828/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
3829FNIEMOP_STUB(iemOp_movq_Vq_Wq);
3830/* Opcode 0xf2 0x0f 0x7e - invalid */
3831
3832
3833/** Opcode 0x0f 0x7f - movq Qq, Pq */
3834FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3835{
3836 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3838 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3839 {
3840 /*
3841 * Register, register.
3842 */
3843 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3844 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3846 IEM_MC_BEGIN(0, 1);
3847 IEM_MC_LOCAL(uint64_t, u64Tmp);
3848 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3849 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3850 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3851 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3852 IEM_MC_ADVANCE_RIP();
3853 IEM_MC_END();
3854 }
3855 else
3856 {
3857 /*
3858 * Register, memory.
3859 */
3860 IEM_MC_BEGIN(0, 2);
3861 IEM_MC_LOCAL(uint64_t, u64Tmp);
3862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3863
3864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3866 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3867 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3868
3869 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3870 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3871
3872 IEM_MC_ADVANCE_RIP();
3873 IEM_MC_END();
3874 }
3875 return VINF_SUCCESS;
3876}
3877
3878/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
3879FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
3880{
3881 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
3882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3884 {
3885 /*
3886 * Register, register.
3887 */
3888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3889 IEM_MC_BEGIN(0, 0);
3890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3891 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3892 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3893 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3894 IEM_MC_ADVANCE_RIP();
3895 IEM_MC_END();
3896 }
3897 else
3898 {
3899 /*
3900 * Register, memory.
3901 */
3902 IEM_MC_BEGIN(0, 2);
3903 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3905
3906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3908 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3909 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3910
3911 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3912 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3913
3914 IEM_MC_ADVANCE_RIP();
3915 IEM_MC_END();
3916 }
3917 return VINF_SUCCESS;
3918}
3919
3920/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
3921FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
3922{
3923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3924 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
3925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3926 {
3927 /*
3928 * Register, register.
3929 */
3930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3931 IEM_MC_BEGIN(0, 0);
3932 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3933 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3934 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3935 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3936 IEM_MC_ADVANCE_RIP();
3937 IEM_MC_END();
3938 }
3939 else
3940 {
3941 /*
3942 * Register, memory.
3943 */
3944 IEM_MC_BEGIN(0, 2);
3945 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3947
3948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3950 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3951 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3952
3953 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3954 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3955
3956 IEM_MC_ADVANCE_RIP();
3957 IEM_MC_END();
3958 }
3959 return VINF_SUCCESS;
3960}
3961
3962/* Opcode 0xf2 0x0f 0x7f - invalid */
3963
3964
3965
3966/** Opcode 0x0f 0x80. */
3967FNIEMOP_DEF(iemOp_jo_Jv)
3968{
3969 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3970 IEMOP_HLP_MIN_386();
3971 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3972 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3973 {
3974 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3976
3977 IEM_MC_BEGIN(0, 0);
3978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3979 IEM_MC_REL_JMP_S16(i16Imm);
3980 } IEM_MC_ELSE() {
3981 IEM_MC_ADVANCE_RIP();
3982 } IEM_MC_ENDIF();
3983 IEM_MC_END();
3984 }
3985 else
3986 {
3987 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3989
3990 IEM_MC_BEGIN(0, 0);
3991 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3992 IEM_MC_REL_JMP_S32(i32Imm);
3993 } IEM_MC_ELSE() {
3994 IEM_MC_ADVANCE_RIP();
3995 } IEM_MC_ENDIF();
3996 IEM_MC_END();
3997 }
3998 return VINF_SUCCESS;
3999}
4000
4001
4002/** Opcode 0x0f 0x81. */
4003FNIEMOP_DEF(iemOp_jno_Jv)
4004{
4005 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4006 IEMOP_HLP_MIN_386();
4007 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4008 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4009 {
4010 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4012
4013 IEM_MC_BEGIN(0, 0);
4014 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4015 IEM_MC_ADVANCE_RIP();
4016 } IEM_MC_ELSE() {
4017 IEM_MC_REL_JMP_S16(i16Imm);
4018 } IEM_MC_ENDIF();
4019 IEM_MC_END();
4020 }
4021 else
4022 {
4023 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4025
4026 IEM_MC_BEGIN(0, 0);
4027 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4028 IEM_MC_ADVANCE_RIP();
4029 } IEM_MC_ELSE() {
4030 IEM_MC_REL_JMP_S32(i32Imm);
4031 } IEM_MC_ENDIF();
4032 IEM_MC_END();
4033 }
4034 return VINF_SUCCESS;
4035}
4036
4037
4038/** Opcode 0x0f 0x82. */
4039FNIEMOP_DEF(iemOp_jc_Jv)
4040{
4041 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4042 IEMOP_HLP_MIN_386();
4043 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4044 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4045 {
4046 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4048
4049 IEM_MC_BEGIN(0, 0);
4050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4051 IEM_MC_REL_JMP_S16(i16Imm);
4052 } IEM_MC_ELSE() {
4053 IEM_MC_ADVANCE_RIP();
4054 } IEM_MC_ENDIF();
4055 IEM_MC_END();
4056 }
4057 else
4058 {
4059 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4061
4062 IEM_MC_BEGIN(0, 0);
4063 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4064 IEM_MC_REL_JMP_S32(i32Imm);
4065 } IEM_MC_ELSE() {
4066 IEM_MC_ADVANCE_RIP();
4067 } IEM_MC_ENDIF();
4068 IEM_MC_END();
4069 }
4070 return VINF_SUCCESS;
4071}
4072
4073
4074/** Opcode 0x0f 0x83. */
4075FNIEMOP_DEF(iemOp_jnc_Jv)
4076{
4077 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4078 IEMOP_HLP_MIN_386();
4079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4080 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4081 {
4082 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4084
4085 IEM_MC_BEGIN(0, 0);
4086 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4087 IEM_MC_ADVANCE_RIP();
4088 } IEM_MC_ELSE() {
4089 IEM_MC_REL_JMP_S16(i16Imm);
4090 } IEM_MC_ENDIF();
4091 IEM_MC_END();
4092 }
4093 else
4094 {
4095 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4097
4098 IEM_MC_BEGIN(0, 0);
4099 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4100 IEM_MC_ADVANCE_RIP();
4101 } IEM_MC_ELSE() {
4102 IEM_MC_REL_JMP_S32(i32Imm);
4103 } IEM_MC_ENDIF();
4104 IEM_MC_END();
4105 }
4106 return VINF_SUCCESS;
4107}
4108
4109
4110/** Opcode 0x0f 0x84. */
4111FNIEMOP_DEF(iemOp_je_Jv)
4112{
4113 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4114 IEMOP_HLP_MIN_386();
4115 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4116 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4117 {
4118 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120
4121 IEM_MC_BEGIN(0, 0);
4122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4123 IEM_MC_REL_JMP_S16(i16Imm);
4124 } IEM_MC_ELSE() {
4125 IEM_MC_ADVANCE_RIP();
4126 } IEM_MC_ENDIF();
4127 IEM_MC_END();
4128 }
4129 else
4130 {
4131 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4133
4134 IEM_MC_BEGIN(0, 0);
4135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4136 IEM_MC_REL_JMP_S32(i32Imm);
4137 } IEM_MC_ELSE() {
4138 IEM_MC_ADVANCE_RIP();
4139 } IEM_MC_ENDIF();
4140 IEM_MC_END();
4141 }
4142 return VINF_SUCCESS;
4143}
4144
4145
4146/** Opcode 0x0f 0x85. */
4147FNIEMOP_DEF(iemOp_jne_Jv)
4148{
4149 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4150 IEMOP_HLP_MIN_386();
4151 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4152 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4153 {
4154 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156
4157 IEM_MC_BEGIN(0, 0);
4158 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4159 IEM_MC_ADVANCE_RIP();
4160 } IEM_MC_ELSE() {
4161 IEM_MC_REL_JMP_S16(i16Imm);
4162 } IEM_MC_ENDIF();
4163 IEM_MC_END();
4164 }
4165 else
4166 {
4167 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4169
4170 IEM_MC_BEGIN(0, 0);
4171 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4172 IEM_MC_ADVANCE_RIP();
4173 } IEM_MC_ELSE() {
4174 IEM_MC_REL_JMP_S32(i32Imm);
4175 } IEM_MC_ENDIF();
4176 IEM_MC_END();
4177 }
4178 return VINF_SUCCESS;
4179}
4180
4181
4182/** Opcode 0x0f 0x86. */
4183FNIEMOP_DEF(iemOp_jbe_Jv)
4184{
4185 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4186 IEMOP_HLP_MIN_386();
4187 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4188 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4189 {
4190 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4192
4193 IEM_MC_BEGIN(0, 0);
4194 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4195 IEM_MC_REL_JMP_S16(i16Imm);
4196 } IEM_MC_ELSE() {
4197 IEM_MC_ADVANCE_RIP();
4198 } IEM_MC_ENDIF();
4199 IEM_MC_END();
4200 }
4201 else
4202 {
4203 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4205
4206 IEM_MC_BEGIN(0, 0);
4207 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4208 IEM_MC_REL_JMP_S32(i32Imm);
4209 } IEM_MC_ELSE() {
4210 IEM_MC_ADVANCE_RIP();
4211 } IEM_MC_ENDIF();
4212 IEM_MC_END();
4213 }
4214 return VINF_SUCCESS;
4215}
4216
4217
4218/** Opcode 0x0f 0x87. */
4219FNIEMOP_DEF(iemOp_jnbe_Jv)
4220{
4221 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4222 IEMOP_HLP_MIN_386();
4223 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4224 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4225 {
4226 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4228
4229 IEM_MC_BEGIN(0, 0);
4230 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4231 IEM_MC_ADVANCE_RIP();
4232 } IEM_MC_ELSE() {
4233 IEM_MC_REL_JMP_S16(i16Imm);
4234 } IEM_MC_ENDIF();
4235 IEM_MC_END();
4236 }
4237 else
4238 {
4239 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4241
4242 IEM_MC_BEGIN(0, 0);
4243 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4244 IEM_MC_ADVANCE_RIP();
4245 } IEM_MC_ELSE() {
4246 IEM_MC_REL_JMP_S32(i32Imm);
4247 } IEM_MC_ENDIF();
4248 IEM_MC_END();
4249 }
4250 return VINF_SUCCESS;
4251}
4252
4253
4254/** Opcode 0x0f 0x88. */
4255FNIEMOP_DEF(iemOp_js_Jv)
4256{
4257 IEMOP_MNEMONIC(js_Jv, "js Jv");
4258 IEMOP_HLP_MIN_386();
4259 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4260 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4261 {
4262 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4264
4265 IEM_MC_BEGIN(0, 0);
4266 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4267 IEM_MC_REL_JMP_S16(i16Imm);
4268 } IEM_MC_ELSE() {
4269 IEM_MC_ADVANCE_RIP();
4270 } IEM_MC_ENDIF();
4271 IEM_MC_END();
4272 }
4273 else
4274 {
4275 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4277
4278 IEM_MC_BEGIN(0, 0);
4279 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4280 IEM_MC_REL_JMP_S32(i32Imm);
4281 } IEM_MC_ELSE() {
4282 IEM_MC_ADVANCE_RIP();
4283 } IEM_MC_ENDIF();
4284 IEM_MC_END();
4285 }
4286 return VINF_SUCCESS;
4287}
4288
4289
4290/** Opcode 0x0f 0x89. */
4291FNIEMOP_DEF(iemOp_jns_Jv)
4292{
4293 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4294 IEMOP_HLP_MIN_386();
4295 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4296 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4297 {
4298 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4300
4301 IEM_MC_BEGIN(0, 0);
4302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4303 IEM_MC_ADVANCE_RIP();
4304 } IEM_MC_ELSE() {
4305 IEM_MC_REL_JMP_S16(i16Imm);
4306 } IEM_MC_ENDIF();
4307 IEM_MC_END();
4308 }
4309 else
4310 {
4311 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4313
4314 IEM_MC_BEGIN(0, 0);
4315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4316 IEM_MC_ADVANCE_RIP();
4317 } IEM_MC_ELSE() {
4318 IEM_MC_REL_JMP_S32(i32Imm);
4319 } IEM_MC_ENDIF();
4320 IEM_MC_END();
4321 }
4322 return VINF_SUCCESS;
4323}
4324
4325
4326/** Opcode 0x0f 0x8a. */
4327FNIEMOP_DEF(iemOp_jp_Jv)
4328{
4329 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4330 IEMOP_HLP_MIN_386();
4331 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4332 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4333 {
4334 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4336
4337 IEM_MC_BEGIN(0, 0);
4338 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4339 IEM_MC_REL_JMP_S16(i16Imm);
4340 } IEM_MC_ELSE() {
4341 IEM_MC_ADVANCE_RIP();
4342 } IEM_MC_ENDIF();
4343 IEM_MC_END();
4344 }
4345 else
4346 {
4347 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4349
4350 IEM_MC_BEGIN(0, 0);
4351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4352 IEM_MC_REL_JMP_S32(i32Imm);
4353 } IEM_MC_ELSE() {
4354 IEM_MC_ADVANCE_RIP();
4355 } IEM_MC_ENDIF();
4356 IEM_MC_END();
4357 }
4358 return VINF_SUCCESS;
4359}
4360
4361
4362/** Opcode 0x0f 0x8b. */
4363FNIEMOP_DEF(iemOp_jnp_Jv)
4364{
4365 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4366 IEMOP_HLP_MIN_386();
4367 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4368 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4369 {
4370 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372
4373 IEM_MC_BEGIN(0, 0);
4374 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4375 IEM_MC_ADVANCE_RIP();
4376 } IEM_MC_ELSE() {
4377 IEM_MC_REL_JMP_S16(i16Imm);
4378 } IEM_MC_ENDIF();
4379 IEM_MC_END();
4380 }
4381 else
4382 {
4383 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4385
4386 IEM_MC_BEGIN(0, 0);
4387 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4388 IEM_MC_ADVANCE_RIP();
4389 } IEM_MC_ELSE() {
4390 IEM_MC_REL_JMP_S32(i32Imm);
4391 } IEM_MC_ENDIF();
4392 IEM_MC_END();
4393 }
4394 return VINF_SUCCESS;
4395}
4396
4397
4398/** Opcode 0x0f 0x8c. */
4399FNIEMOP_DEF(iemOp_jl_Jv)
4400{
4401 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4402 IEMOP_HLP_MIN_386();
4403 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4404 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4405 {
4406 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4408
4409 IEM_MC_BEGIN(0, 0);
4410 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4411 IEM_MC_REL_JMP_S16(i16Imm);
4412 } IEM_MC_ELSE() {
4413 IEM_MC_ADVANCE_RIP();
4414 } IEM_MC_ENDIF();
4415 IEM_MC_END();
4416 }
4417 else
4418 {
4419 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4421
4422 IEM_MC_BEGIN(0, 0);
4423 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4424 IEM_MC_REL_JMP_S32(i32Imm);
4425 } IEM_MC_ELSE() {
4426 IEM_MC_ADVANCE_RIP();
4427 } IEM_MC_ENDIF();
4428 IEM_MC_END();
4429 }
4430 return VINF_SUCCESS;
4431}
4432
4433
4434/** Opcode 0x0f 0x8d. */
4435FNIEMOP_DEF(iemOp_jnl_Jv)
4436{
4437 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4438 IEMOP_HLP_MIN_386();
4439 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4440 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4441 {
4442 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4444
4445 IEM_MC_BEGIN(0, 0);
4446 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4447 IEM_MC_ADVANCE_RIP();
4448 } IEM_MC_ELSE() {
4449 IEM_MC_REL_JMP_S16(i16Imm);
4450 } IEM_MC_ENDIF();
4451 IEM_MC_END();
4452 }
4453 else
4454 {
4455 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4457
4458 IEM_MC_BEGIN(0, 0);
4459 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4460 IEM_MC_ADVANCE_RIP();
4461 } IEM_MC_ELSE() {
4462 IEM_MC_REL_JMP_S32(i32Imm);
4463 } IEM_MC_ENDIF();
4464 IEM_MC_END();
4465 }
4466 return VINF_SUCCESS;
4467}
4468
4469
4470/** Opcode 0x0f 0x8e. */
4471FNIEMOP_DEF(iemOp_jle_Jv)
4472{
4473 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4474 IEMOP_HLP_MIN_386();
4475 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4476 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4477 {
4478 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4480
4481 IEM_MC_BEGIN(0, 0);
4482 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4483 IEM_MC_REL_JMP_S16(i16Imm);
4484 } IEM_MC_ELSE() {
4485 IEM_MC_ADVANCE_RIP();
4486 } IEM_MC_ENDIF();
4487 IEM_MC_END();
4488 }
4489 else
4490 {
4491 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4493
4494 IEM_MC_BEGIN(0, 0);
4495 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4496 IEM_MC_REL_JMP_S32(i32Imm);
4497 } IEM_MC_ELSE() {
4498 IEM_MC_ADVANCE_RIP();
4499 } IEM_MC_ENDIF();
4500 IEM_MC_END();
4501 }
4502 return VINF_SUCCESS;
4503}
4504
4505
4506/** Opcode 0x0f 0x8f. */
4507FNIEMOP_DEF(iemOp_jnle_Jv)
4508{
4509 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4510 IEMOP_HLP_MIN_386();
4511 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4512 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4513 {
4514 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4516
4517 IEM_MC_BEGIN(0, 0);
4518 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4519 IEM_MC_ADVANCE_RIP();
4520 } IEM_MC_ELSE() {
4521 IEM_MC_REL_JMP_S16(i16Imm);
4522 } IEM_MC_ENDIF();
4523 IEM_MC_END();
4524 }
4525 else
4526 {
4527 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4529
4530 IEM_MC_BEGIN(0, 0);
4531 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4532 IEM_MC_ADVANCE_RIP();
4533 } IEM_MC_ELSE() {
4534 IEM_MC_REL_JMP_S32(i32Imm);
4535 } IEM_MC_ENDIF();
4536 IEM_MC_END();
4537 }
4538 return VINF_SUCCESS;
4539}
4540
4541
4542/** Opcode 0x0f 0x90. */
4543FNIEMOP_DEF(iemOp_seto_Eb)
4544{
4545 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4546 IEMOP_HLP_MIN_386();
4547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4548
4549 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4550 * any way. AMD says it's "unused", whatever that means. We're
4551 * ignoring for now. */
4552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4553 {
4554 /* register target */
4555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4556 IEM_MC_BEGIN(0, 0);
4557 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4558 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4559 } IEM_MC_ELSE() {
4560 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4561 } IEM_MC_ENDIF();
4562 IEM_MC_ADVANCE_RIP();
4563 IEM_MC_END();
4564 }
4565 else
4566 {
4567 /* memory target */
4568 IEM_MC_BEGIN(0, 1);
4569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4573 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4574 } IEM_MC_ELSE() {
4575 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4576 } IEM_MC_ENDIF();
4577 IEM_MC_ADVANCE_RIP();
4578 IEM_MC_END();
4579 }
4580 return VINF_SUCCESS;
4581}
4582
4583
4584/** Opcode 0x0f 0x91. */
4585FNIEMOP_DEF(iemOp_setno_Eb)
4586{
4587 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4588 IEMOP_HLP_MIN_386();
4589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4590
4591 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4592 * any way. AMD says it's "unused", whatever that means. We're
4593 * ignoring for now. */
4594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4595 {
4596 /* register target */
4597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4598 IEM_MC_BEGIN(0, 0);
4599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4600 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4601 } IEM_MC_ELSE() {
4602 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4603 } IEM_MC_ENDIF();
4604 IEM_MC_ADVANCE_RIP();
4605 IEM_MC_END();
4606 }
4607 else
4608 {
4609 /* memory target */
4610 IEM_MC_BEGIN(0, 1);
4611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4615 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4616 } IEM_MC_ELSE() {
4617 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4618 } IEM_MC_ENDIF();
4619 IEM_MC_ADVANCE_RIP();
4620 IEM_MC_END();
4621 }
4622 return VINF_SUCCESS;
4623}
4624
4625
4626/** Opcode 0x0f 0x92. */
4627FNIEMOP_DEF(iemOp_setc_Eb)
4628{
4629 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4630 IEMOP_HLP_MIN_386();
4631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4632
4633 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4634 * any way. AMD says it's "unused", whatever that means. We're
4635 * ignoring for now. */
4636 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4637 {
4638 /* register target */
4639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4640 IEM_MC_BEGIN(0, 0);
4641 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4642 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4643 } IEM_MC_ELSE() {
4644 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4645 } IEM_MC_ENDIF();
4646 IEM_MC_ADVANCE_RIP();
4647 IEM_MC_END();
4648 }
4649 else
4650 {
4651 /* memory target */
4652 IEM_MC_BEGIN(0, 1);
4653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4657 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4658 } IEM_MC_ELSE() {
4659 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4660 } IEM_MC_ENDIF();
4661 IEM_MC_ADVANCE_RIP();
4662 IEM_MC_END();
4663 }
4664 return VINF_SUCCESS;
4665}
4666
4667
4668/** Opcode 0x0f 0x93. */
4669FNIEMOP_DEF(iemOp_setnc_Eb)
4670{
4671 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4672 IEMOP_HLP_MIN_386();
4673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4674
4675 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4676 * any way. AMD says it's "unused", whatever that means. We're
4677 * ignoring for now. */
4678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4679 {
4680 /* register target */
4681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4682 IEM_MC_BEGIN(0, 0);
4683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4684 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4685 } IEM_MC_ELSE() {
4686 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4687 } IEM_MC_ENDIF();
4688 IEM_MC_ADVANCE_RIP();
4689 IEM_MC_END();
4690 }
4691 else
4692 {
4693 /* memory target */
4694 IEM_MC_BEGIN(0, 1);
4695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4699 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4700 } IEM_MC_ELSE() {
4701 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4702 } IEM_MC_ENDIF();
4703 IEM_MC_ADVANCE_RIP();
4704 IEM_MC_END();
4705 }
4706 return VINF_SUCCESS;
4707}
4708
4709
4710/** Opcode 0x0f 0x94. */
4711FNIEMOP_DEF(iemOp_sete_Eb)
4712{
4713 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4714 IEMOP_HLP_MIN_386();
4715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4716
4717 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4718 * any way. AMD says it's "unused", whatever that means. We're
4719 * ignoring for now. */
4720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4721 {
4722 /* register target */
4723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4724 IEM_MC_BEGIN(0, 0);
4725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4726 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4727 } IEM_MC_ELSE() {
4728 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4729 } IEM_MC_ENDIF();
4730 IEM_MC_ADVANCE_RIP();
4731 IEM_MC_END();
4732 }
4733 else
4734 {
4735 /* memory target */
4736 IEM_MC_BEGIN(0, 1);
4737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4741 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4742 } IEM_MC_ELSE() {
4743 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4744 } IEM_MC_ENDIF();
4745 IEM_MC_ADVANCE_RIP();
4746 IEM_MC_END();
4747 }
4748 return VINF_SUCCESS;
4749}
4750
4751
4752/** Opcode 0x0f 0x95. */
4753FNIEMOP_DEF(iemOp_setne_Eb)
4754{
4755 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4756 IEMOP_HLP_MIN_386();
4757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4758
4759 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4760 * any way. AMD says it's "unused", whatever that means. We're
4761 * ignoring for now. */
4762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4763 {
4764 /* register target */
4765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4766 IEM_MC_BEGIN(0, 0);
4767 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4768 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4769 } IEM_MC_ELSE() {
4770 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4771 } IEM_MC_ENDIF();
4772 IEM_MC_ADVANCE_RIP();
4773 IEM_MC_END();
4774 }
4775 else
4776 {
4777 /* memory target */
4778 IEM_MC_BEGIN(0, 1);
4779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4783 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4784 } IEM_MC_ELSE() {
4785 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4786 } IEM_MC_ENDIF();
4787 IEM_MC_ADVANCE_RIP();
4788 IEM_MC_END();
4789 }
4790 return VINF_SUCCESS;
4791}
4792
4793
4794/** Opcode 0x0f 0x96. */
4795FNIEMOP_DEF(iemOp_setbe_Eb)
4796{
4797 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4798 IEMOP_HLP_MIN_386();
4799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4800
4801 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4802 * any way. AMD says it's "unused", whatever that means. We're
4803 * ignoring for now. */
4804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4805 {
4806 /* register target */
4807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4808 IEM_MC_BEGIN(0, 0);
4809 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4810 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4811 } IEM_MC_ELSE() {
4812 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4813 } IEM_MC_ENDIF();
4814 IEM_MC_ADVANCE_RIP();
4815 IEM_MC_END();
4816 }
4817 else
4818 {
4819 /* memory target */
4820 IEM_MC_BEGIN(0, 1);
4821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4824 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4825 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4826 } IEM_MC_ELSE() {
4827 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4828 } IEM_MC_ENDIF();
4829 IEM_MC_ADVANCE_RIP();
4830 IEM_MC_END();
4831 }
4832 return VINF_SUCCESS;
4833}
4834
4835
4836/** Opcode 0x0f 0x97. */
4837FNIEMOP_DEF(iemOp_setnbe_Eb)
4838{
4839 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4840 IEMOP_HLP_MIN_386();
4841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4842
4843 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4844 * any way. AMD says it's "unused", whatever that means. We're
4845 * ignoring for now. */
4846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4847 {
4848 /* register target */
4849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4850 IEM_MC_BEGIN(0, 0);
4851 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4852 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4853 } IEM_MC_ELSE() {
4854 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4855 } IEM_MC_ENDIF();
4856 IEM_MC_ADVANCE_RIP();
4857 IEM_MC_END();
4858 }
4859 else
4860 {
4861 /* memory target */
4862 IEM_MC_BEGIN(0, 1);
4863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4866 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4867 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4868 } IEM_MC_ELSE() {
4869 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4870 } IEM_MC_ENDIF();
4871 IEM_MC_ADVANCE_RIP();
4872 IEM_MC_END();
4873 }
4874 return VINF_SUCCESS;
4875}
4876
4877
4878/** Opcode 0x0f 0x98. */
4879FNIEMOP_DEF(iemOp_sets_Eb)
4880{
4881 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4882 IEMOP_HLP_MIN_386();
4883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4884
4885 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4886 * any way. AMD says it's "unused", whatever that means. We're
4887 * ignoring for now. */
4888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4889 {
4890 /* register target */
4891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4892 IEM_MC_BEGIN(0, 0);
4893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4894 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4895 } IEM_MC_ELSE() {
4896 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4897 } IEM_MC_ENDIF();
4898 IEM_MC_ADVANCE_RIP();
4899 IEM_MC_END();
4900 }
4901 else
4902 {
4903 /* memory target */
4904 IEM_MC_BEGIN(0, 1);
4905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4909 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4910 } IEM_MC_ELSE() {
4911 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4912 } IEM_MC_ENDIF();
4913 IEM_MC_ADVANCE_RIP();
4914 IEM_MC_END();
4915 }
4916 return VINF_SUCCESS;
4917}
4918
4919
4920/** Opcode 0x0f 0x99. */
4921FNIEMOP_DEF(iemOp_setns_Eb)
4922{
4923 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4924 IEMOP_HLP_MIN_386();
4925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4926
4927 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4928 * any way. AMD says it's "unused", whatever that means. We're
4929 * ignoring for now. */
4930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4931 {
4932 /* register target */
4933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4934 IEM_MC_BEGIN(0, 0);
4935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4936 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4937 } IEM_MC_ELSE() {
4938 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4939 } IEM_MC_ENDIF();
4940 IEM_MC_ADVANCE_RIP();
4941 IEM_MC_END();
4942 }
4943 else
4944 {
4945 /* memory target */
4946 IEM_MC_BEGIN(0, 1);
4947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4951 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4952 } IEM_MC_ELSE() {
4953 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4954 } IEM_MC_ENDIF();
4955 IEM_MC_ADVANCE_RIP();
4956 IEM_MC_END();
4957 }
4958 return VINF_SUCCESS;
4959}
4960
4961
4962/** Opcode 0x0f 0x9a. */
4963FNIEMOP_DEF(iemOp_setp_Eb)
4964{
4965 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4966 IEMOP_HLP_MIN_386();
4967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4968
4969 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4970 * any way. AMD says it's "unused", whatever that means. We're
4971 * ignoring for now. */
4972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4973 {
4974 /* register target */
4975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4976 IEM_MC_BEGIN(0, 0);
4977 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4978 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4979 } IEM_MC_ELSE() {
4980 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4981 } IEM_MC_ENDIF();
4982 IEM_MC_ADVANCE_RIP();
4983 IEM_MC_END();
4984 }
4985 else
4986 {
4987 /* memory target */
4988 IEM_MC_BEGIN(0, 1);
4989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4993 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4994 } IEM_MC_ELSE() {
4995 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4996 } IEM_MC_ENDIF();
4997 IEM_MC_ADVANCE_RIP();
4998 IEM_MC_END();
4999 }
5000 return VINF_SUCCESS;
5001}
5002
5003
5004/** Opcode 0x0f 0x9b. */
5005FNIEMOP_DEF(iemOp_setnp_Eb)
5006{
5007 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5008 IEMOP_HLP_MIN_386();
5009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5010
5011 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5012 * any way. AMD says it's "unused", whatever that means. We're
5013 * ignoring for now. */
5014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5015 {
5016 /* register target */
5017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5018 IEM_MC_BEGIN(0, 0);
5019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5020 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5021 } IEM_MC_ELSE() {
5022 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5023 } IEM_MC_ENDIF();
5024 IEM_MC_ADVANCE_RIP();
5025 IEM_MC_END();
5026 }
5027 else
5028 {
5029 /* memory target */
5030 IEM_MC_BEGIN(0, 1);
5031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5034 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5035 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5036 } IEM_MC_ELSE() {
5037 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5038 } IEM_MC_ENDIF();
5039 IEM_MC_ADVANCE_RIP();
5040 IEM_MC_END();
5041 }
5042 return VINF_SUCCESS;
5043}
5044
5045
5046/** Opcode 0x0f 0x9c. */
5047FNIEMOP_DEF(iemOp_setl_Eb)
5048{
5049 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5050 IEMOP_HLP_MIN_386();
5051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5052
5053 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5054 * any way. AMD says it's "unused", whatever that means. We're
5055 * ignoring for now. */
5056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5057 {
5058 /* register target */
5059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5060 IEM_MC_BEGIN(0, 0);
5061 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5062 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5063 } IEM_MC_ELSE() {
5064 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5065 } IEM_MC_ENDIF();
5066 IEM_MC_ADVANCE_RIP();
5067 IEM_MC_END();
5068 }
5069 else
5070 {
5071 /* memory target */
5072 IEM_MC_BEGIN(0, 1);
5073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5076 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5077 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5078 } IEM_MC_ELSE() {
5079 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5080 } IEM_MC_ENDIF();
5081 IEM_MC_ADVANCE_RIP();
5082 IEM_MC_END();
5083 }
5084 return VINF_SUCCESS;
5085}
5086
5087
5088/** Opcode 0x0f 0x9d. */
5089FNIEMOP_DEF(iemOp_setnl_Eb)
5090{
5091 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5092 IEMOP_HLP_MIN_386();
5093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5094
5095 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5096 * any way. AMD says it's "unused", whatever that means. We're
5097 * ignoring for now. */
5098 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5099 {
5100 /* register target */
5101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5102 IEM_MC_BEGIN(0, 0);
5103 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5104 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5105 } IEM_MC_ELSE() {
5106 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5107 } IEM_MC_ENDIF();
5108 IEM_MC_ADVANCE_RIP();
5109 IEM_MC_END();
5110 }
5111 else
5112 {
5113 /* memory target */
5114 IEM_MC_BEGIN(0, 1);
5115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5118 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5119 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5120 } IEM_MC_ELSE() {
5121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5122 } IEM_MC_ENDIF();
5123 IEM_MC_ADVANCE_RIP();
5124 IEM_MC_END();
5125 }
5126 return VINF_SUCCESS;
5127}
5128
5129
5130/** Opcode 0x0f 0x9e. */
5131FNIEMOP_DEF(iemOp_setle_Eb)
5132{
5133 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5134 IEMOP_HLP_MIN_386();
5135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5136
5137 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5138 * any way. AMD says it's "unused", whatever that means. We're
5139 * ignoring for now. */
5140 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5141 {
5142 /* register target */
5143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5144 IEM_MC_BEGIN(0, 0);
5145 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5146 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5147 } IEM_MC_ELSE() {
5148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5149 } IEM_MC_ENDIF();
5150 IEM_MC_ADVANCE_RIP();
5151 IEM_MC_END();
5152 }
5153 else
5154 {
5155 /* memory target */
5156 IEM_MC_BEGIN(0, 1);
5157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5160 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5161 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5162 } IEM_MC_ELSE() {
5163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5164 } IEM_MC_ENDIF();
5165 IEM_MC_ADVANCE_RIP();
5166 IEM_MC_END();
5167 }
5168 return VINF_SUCCESS;
5169}
5170
5171
5172/** Opcode 0x0f 0x9f. */
5173FNIEMOP_DEF(iemOp_setnle_Eb)
5174{
5175 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5176 IEMOP_HLP_MIN_386();
5177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5178
5179 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5180 * any way. AMD says it's "unused", whatever that means. We're
5181 * ignoring for now. */
5182 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5183 {
5184 /* register target */
5185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5186 IEM_MC_BEGIN(0, 0);
5187 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5188 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5189 } IEM_MC_ELSE() {
5190 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5191 } IEM_MC_ENDIF();
5192 IEM_MC_ADVANCE_RIP();
5193 IEM_MC_END();
5194 }
5195 else
5196 {
5197 /* memory target */
5198 IEM_MC_BEGIN(0, 1);
5199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5202 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5203 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5204 } IEM_MC_ELSE() {
5205 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5206 } IEM_MC_ENDIF();
5207 IEM_MC_ADVANCE_RIP();
5208 IEM_MC_END();
5209 }
5210 return VINF_SUCCESS;
5211}
5212
5213
5214/**
5215 * Common 'push segment-register' helper.
5216 */
5217FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5218{
5219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5220 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5221 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5222
5223 switch (pVCpu->iem.s.enmEffOpSize)
5224 {
5225 case IEMMODE_16BIT:
5226 IEM_MC_BEGIN(0, 1);
5227 IEM_MC_LOCAL(uint16_t, u16Value);
5228 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5229 IEM_MC_PUSH_U16(u16Value);
5230 IEM_MC_ADVANCE_RIP();
5231 IEM_MC_END();
5232 break;
5233
5234 case IEMMODE_32BIT:
5235 IEM_MC_BEGIN(0, 1);
5236 IEM_MC_LOCAL(uint32_t, u32Value);
5237 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5238 IEM_MC_PUSH_U32_SREG(u32Value);
5239 IEM_MC_ADVANCE_RIP();
5240 IEM_MC_END();
5241 break;
5242
5243 case IEMMODE_64BIT:
5244 IEM_MC_BEGIN(0, 1);
5245 IEM_MC_LOCAL(uint64_t, u64Value);
5246 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5247 IEM_MC_PUSH_U64(u64Value);
5248 IEM_MC_ADVANCE_RIP();
5249 IEM_MC_END();
5250 break;
5251 }
5252
5253 return VINF_SUCCESS;
5254}
5255
5256
5257/** Opcode 0x0f 0xa0. */
5258FNIEMOP_DEF(iemOp_push_fs)
5259{
5260 IEMOP_MNEMONIC(push_fs, "push fs");
5261 IEMOP_HLP_MIN_386();
5262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5263 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5264}
5265
5266
5267/** Opcode 0x0f 0xa1. */
5268FNIEMOP_DEF(iemOp_pop_fs)
5269{
5270 IEMOP_MNEMONIC(pop_fs, "pop fs");
5271 IEMOP_HLP_MIN_386();
5272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5273 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5274}
5275
5276
5277/** Opcode 0x0f 0xa2. */
5278FNIEMOP_DEF(iemOp_cpuid)
5279{
5280 IEMOP_MNEMONIC(cpuid, "cpuid");
5281 IEMOP_HLP_MIN_486(); /* not all 486es. */
5282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5283 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5284}
5285
5286
5287/**
5288 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5289 * iemOp_bts_Ev_Gv.
5290 */
5291FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5292{
5293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5294 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5295
5296 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5297 {
5298 /* register destination. */
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300 switch (pVCpu->iem.s.enmEffOpSize)
5301 {
5302 case IEMMODE_16BIT:
5303 IEM_MC_BEGIN(3, 0);
5304 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5305 IEM_MC_ARG(uint16_t, u16Src, 1);
5306 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5307
5308 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5309 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5310 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5311 IEM_MC_REF_EFLAGS(pEFlags);
5312 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5313
5314 IEM_MC_ADVANCE_RIP();
5315 IEM_MC_END();
5316 return VINF_SUCCESS;
5317
5318 case IEMMODE_32BIT:
5319 IEM_MC_BEGIN(3, 0);
5320 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5321 IEM_MC_ARG(uint32_t, u32Src, 1);
5322 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5323
5324 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5325 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5326 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5327 IEM_MC_REF_EFLAGS(pEFlags);
5328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5329
5330 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5331 IEM_MC_ADVANCE_RIP();
5332 IEM_MC_END();
5333 return VINF_SUCCESS;
5334
5335 case IEMMODE_64BIT:
5336 IEM_MC_BEGIN(3, 0);
5337 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5338 IEM_MC_ARG(uint64_t, u64Src, 1);
5339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5340
5341 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5342 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5343 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5344 IEM_MC_REF_EFLAGS(pEFlags);
5345 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5346
5347 IEM_MC_ADVANCE_RIP();
5348 IEM_MC_END();
5349 return VINF_SUCCESS;
5350
5351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5352 }
5353 }
5354 else
5355 {
5356 /* memory destination. */
5357
5358 uint32_t fAccess;
5359 if (pImpl->pfnLockedU16)
5360 fAccess = IEM_ACCESS_DATA_RW;
5361 else /* BT */
5362 fAccess = IEM_ACCESS_DATA_R;
5363
5364 /** @todo test negative bit offsets! */
5365 switch (pVCpu->iem.s.enmEffOpSize)
5366 {
5367 case IEMMODE_16BIT:
5368 IEM_MC_BEGIN(3, 2);
5369 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5370 IEM_MC_ARG(uint16_t, u16Src, 1);
5371 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5373 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5374
5375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5376 if (pImpl->pfnLockedU16)
5377 IEMOP_HLP_DONE_DECODING();
5378 else
5379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5380 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5381 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5382 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5383 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5384 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5385 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5386 IEM_MC_FETCH_EFLAGS(EFlags);
5387
5388 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5389 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5390 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5391 else
5392 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5393 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5394
5395 IEM_MC_COMMIT_EFLAGS(EFlags);
5396 IEM_MC_ADVANCE_RIP();
5397 IEM_MC_END();
5398 return VINF_SUCCESS;
5399
5400 case IEMMODE_32BIT:
5401 IEM_MC_BEGIN(3, 2);
5402 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5403 IEM_MC_ARG(uint32_t, u32Src, 1);
5404 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5406 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5407
5408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5409 if (pImpl->pfnLockedU16)
5410 IEMOP_HLP_DONE_DECODING();
5411 else
5412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5413 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5414 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5415 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5416 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5417 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5418 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5419 IEM_MC_FETCH_EFLAGS(EFlags);
5420
5421 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5422 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5424 else
5425 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5426 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5427
5428 IEM_MC_COMMIT_EFLAGS(EFlags);
5429 IEM_MC_ADVANCE_RIP();
5430 IEM_MC_END();
5431 return VINF_SUCCESS;
5432
5433 case IEMMODE_64BIT:
5434 IEM_MC_BEGIN(3, 2);
5435 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5436 IEM_MC_ARG(uint64_t, u64Src, 1);
5437 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5439 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5440
5441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5442 if (pImpl->pfnLockedU16)
5443 IEMOP_HLP_DONE_DECODING();
5444 else
5445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5446 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5447 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5448 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5449 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5450 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5451 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5452 IEM_MC_FETCH_EFLAGS(EFlags);
5453
5454 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5455 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5456 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5457 else
5458 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5459 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5460
5461 IEM_MC_COMMIT_EFLAGS(EFlags);
5462 IEM_MC_ADVANCE_RIP();
5463 IEM_MC_END();
5464 return VINF_SUCCESS;
5465
5466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5467 }
5468 }
5469}
5470
5471
5472/** Opcode 0x0f 0xa3. */
5473FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5474{
5475 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5476 IEMOP_HLP_MIN_386();
5477 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5478}
5479
5480
5481/**
5482 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5483 */
5484FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5485{
5486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5487 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5488
5489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5490 {
5491 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5493
5494 switch (pVCpu->iem.s.enmEffOpSize)
5495 {
5496 case IEMMODE_16BIT:
5497 IEM_MC_BEGIN(4, 0);
5498 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5499 IEM_MC_ARG(uint16_t, u16Src, 1);
5500 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5501 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5502
5503 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5504 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5505 IEM_MC_REF_EFLAGS(pEFlags);
5506 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5507
5508 IEM_MC_ADVANCE_RIP();
5509 IEM_MC_END();
5510 return VINF_SUCCESS;
5511
5512 case IEMMODE_32BIT:
5513 IEM_MC_BEGIN(4, 0);
5514 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5515 IEM_MC_ARG(uint32_t, u32Src, 1);
5516 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5517 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5518
5519 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5520 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5521 IEM_MC_REF_EFLAGS(pEFlags);
5522 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5523
5524 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5525 IEM_MC_ADVANCE_RIP();
5526 IEM_MC_END();
5527 return VINF_SUCCESS;
5528
5529 case IEMMODE_64BIT:
5530 IEM_MC_BEGIN(4, 0);
5531 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5532 IEM_MC_ARG(uint64_t, u64Src, 1);
5533 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5534 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5535
5536 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5537 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5538 IEM_MC_REF_EFLAGS(pEFlags);
5539 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5540
5541 IEM_MC_ADVANCE_RIP();
5542 IEM_MC_END();
5543 return VINF_SUCCESS;
5544
5545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5546 }
5547 }
5548 else
5549 {
5550 switch (pVCpu->iem.s.enmEffOpSize)
5551 {
5552 case IEMMODE_16BIT:
5553 IEM_MC_BEGIN(4, 2);
5554 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5555 IEM_MC_ARG(uint16_t, u16Src, 1);
5556 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5557 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5559
5560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5561 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5562 IEM_MC_ASSIGN(cShiftArg, cShift);
5563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5564 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5565 IEM_MC_FETCH_EFLAGS(EFlags);
5566 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5567 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5568
5569 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5570 IEM_MC_COMMIT_EFLAGS(EFlags);
5571 IEM_MC_ADVANCE_RIP();
5572 IEM_MC_END();
5573 return VINF_SUCCESS;
5574
5575 case IEMMODE_32BIT:
5576 IEM_MC_BEGIN(4, 2);
5577 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5578 IEM_MC_ARG(uint32_t, u32Src, 1);
5579 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5580 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5582
5583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5584 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5585 IEM_MC_ASSIGN(cShiftArg, cShift);
5586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5587 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5588 IEM_MC_FETCH_EFLAGS(EFlags);
5589 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5590 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5591
5592 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5593 IEM_MC_COMMIT_EFLAGS(EFlags);
5594 IEM_MC_ADVANCE_RIP();
5595 IEM_MC_END();
5596 return VINF_SUCCESS;
5597
5598 case IEMMODE_64BIT:
5599 IEM_MC_BEGIN(4, 2);
5600 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5601 IEM_MC_ARG(uint64_t, u64Src, 1);
5602 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5603 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5605
5606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5607 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5608 IEM_MC_ASSIGN(cShiftArg, cShift);
5609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5610 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5611 IEM_MC_FETCH_EFLAGS(EFlags);
5612 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5613 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5614
5615 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5616 IEM_MC_COMMIT_EFLAGS(EFlags);
5617 IEM_MC_ADVANCE_RIP();
5618 IEM_MC_END();
5619 return VINF_SUCCESS;
5620
5621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5622 }
5623 }
5624}
5625
5626
5627/**
5628 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5629 */
5630FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5631{
5632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5633 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5634
5635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5636 {
5637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5638
5639 switch (pVCpu->iem.s.enmEffOpSize)
5640 {
5641 case IEMMODE_16BIT:
5642 IEM_MC_BEGIN(4, 0);
5643 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5644 IEM_MC_ARG(uint16_t, u16Src, 1);
5645 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5646 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5647
5648 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5649 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5650 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5651 IEM_MC_REF_EFLAGS(pEFlags);
5652 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5653
5654 IEM_MC_ADVANCE_RIP();
5655 IEM_MC_END();
5656 return VINF_SUCCESS;
5657
5658 case IEMMODE_32BIT:
5659 IEM_MC_BEGIN(4, 0);
5660 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5661 IEM_MC_ARG(uint32_t, u32Src, 1);
5662 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5663 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5664
5665 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5666 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5667 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5668 IEM_MC_REF_EFLAGS(pEFlags);
5669 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5670
5671 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5672 IEM_MC_ADVANCE_RIP();
5673 IEM_MC_END();
5674 return VINF_SUCCESS;
5675
5676 case IEMMODE_64BIT:
5677 IEM_MC_BEGIN(4, 0);
5678 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5679 IEM_MC_ARG(uint64_t, u64Src, 1);
5680 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5681 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5682
5683 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5684 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5685 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5686 IEM_MC_REF_EFLAGS(pEFlags);
5687 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5688
5689 IEM_MC_ADVANCE_RIP();
5690 IEM_MC_END();
5691 return VINF_SUCCESS;
5692
5693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5694 }
5695 }
5696 else
5697 {
5698 switch (pVCpu->iem.s.enmEffOpSize)
5699 {
5700 case IEMMODE_16BIT:
5701 IEM_MC_BEGIN(4, 2);
5702 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5703 IEM_MC_ARG(uint16_t, u16Src, 1);
5704 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5705 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5707
5708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5710 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5711 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5712 IEM_MC_FETCH_EFLAGS(EFlags);
5713 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5714 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5715
5716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5717 IEM_MC_COMMIT_EFLAGS(EFlags);
5718 IEM_MC_ADVANCE_RIP();
5719 IEM_MC_END();
5720 return VINF_SUCCESS;
5721
5722 case IEMMODE_32BIT:
5723 IEM_MC_BEGIN(4, 2);
5724 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5725 IEM_MC_ARG(uint32_t, u32Src, 1);
5726 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5727 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5729
5730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5732 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5733 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5734 IEM_MC_FETCH_EFLAGS(EFlags);
5735 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5736 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5737
5738 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5739 IEM_MC_COMMIT_EFLAGS(EFlags);
5740 IEM_MC_ADVANCE_RIP();
5741 IEM_MC_END();
5742 return VINF_SUCCESS;
5743
5744 case IEMMODE_64BIT:
5745 IEM_MC_BEGIN(4, 2);
5746 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5747 IEM_MC_ARG(uint64_t, u64Src, 1);
5748 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5749 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5751
5752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5754 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5755 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5756 IEM_MC_FETCH_EFLAGS(EFlags);
5757 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5758 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5759
5760 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5761 IEM_MC_COMMIT_EFLAGS(EFlags);
5762 IEM_MC_ADVANCE_RIP();
5763 IEM_MC_END();
5764 return VINF_SUCCESS;
5765
5766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5767 }
5768 }
5769}
5770
5771
5772
5773/** Opcode 0x0f 0xa4. */
5774FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5775{
5776 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5777 IEMOP_HLP_MIN_386();
5778 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5779}
5780
5781
5782/** Opcode 0x0f 0xa5. */
5783FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5784{
5785 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5786 IEMOP_HLP_MIN_386();
5787 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5788}
5789
5790
5791/** Opcode 0x0f 0xa8. */
5792FNIEMOP_DEF(iemOp_push_gs)
5793{
5794 IEMOP_MNEMONIC(push_gs, "push gs");
5795 IEMOP_HLP_MIN_386();
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5798}
5799
5800
5801/** Opcode 0x0f 0xa9. */
5802FNIEMOP_DEF(iemOp_pop_gs)
5803{
5804 IEMOP_MNEMONIC(pop_gs, "pop gs");
5805 IEMOP_HLP_MIN_386();
5806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5807 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5808}
5809
5810
5811/** Opcode 0x0f 0xaa. */
5812FNIEMOP_DEF(iemOp_rsm)
5813{
5814 IEMOP_MNEMONIC(rsm, "rsm");
5815 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
5816 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
5817 * intercept). */
5818 IEMOP_BITCH_ABOUT_STUB();
5819 return IEMOP_RAISE_INVALID_OPCODE();
5820}
5821
5822//IEMOP_HLP_MIN_386();
5823
5824
5825/** Opcode 0x0f 0xab. */
5826FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5827{
5828 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5829 IEMOP_HLP_MIN_386();
5830 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5831}
5832
5833
5834/** Opcode 0x0f 0xac. */
5835FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5836{
5837 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5838 IEMOP_HLP_MIN_386();
5839 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5840}
5841
5842
5843/** Opcode 0x0f 0xad. */
5844FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5845{
5846 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5847 IEMOP_HLP_MIN_386();
5848 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5849}
5850
5851
5852/** Opcode 0x0f 0xae mem/0. */
5853FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5854{
5855 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5856 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5857 return IEMOP_RAISE_INVALID_OPCODE();
5858
5859 IEM_MC_BEGIN(3, 1);
5860 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5861 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5862 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5865 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5866 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5867 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5868 IEM_MC_END();
5869 return VINF_SUCCESS;
5870}
5871
5872
5873/** Opcode 0x0f 0xae mem/1. */
5874FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5875{
5876 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5877 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5878 return IEMOP_RAISE_INVALID_OPCODE();
5879
5880 IEM_MC_BEGIN(3, 1);
5881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5882 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5886 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5887 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5888 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5889 IEM_MC_END();
5890 return VINF_SUCCESS;
5891}
5892
5893
5894/**
5895 * @opmaps grp15
5896 * @opcode !11/2
5897 * @oppfx none
5898 * @opcpuid sse
5899 * @opgroup og_sse_mxcsrsm
5900 * @opxcpttype 5
5901 * @optest op1=0 -> mxcsr=0
5902 * @optest op1=0x2083 -> mxcsr=0x2083
5903 * @optest op1=0xfffffffe -> value.xcpt=0xd
5904 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5905 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5906 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5907 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5908 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5909 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5910 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5911 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5912 */
5913FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
5914{
5915 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5916 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5917 return IEMOP_RAISE_INVALID_OPCODE();
5918
5919 IEM_MC_BEGIN(2, 0);
5920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5921 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5924 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5925 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5926 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5927 IEM_MC_END();
5928 return VINF_SUCCESS;
5929}
5930
5931
5932/**
5933 * @opmaps grp15
5934 * @opcode !11/3
5935 * @oppfx none
5936 * @opcpuid sse
5937 * @opgroup og_sse_mxcsrsm
5938 * @opxcpttype 5
5939 * @optest mxcsr=0 -> op1=0
5940 * @optest mxcsr=0x2083 -> op1=0x2083
5941 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5942 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5943 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5944 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
5945 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5946 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5947 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5948 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5949 */
5950FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
5951{
5952 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
5953 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
5954 return IEMOP_RAISE_INVALID_OPCODE();
5955
5956 IEM_MC_BEGIN(2, 0);
5957 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5958 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5961 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5962 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5963 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
5964 IEM_MC_END();
5965 return VINF_SUCCESS;
5966}
5967
5968
5969/**
5970 * @opmaps grp15
5971 * @opcode !11/4
5972 * @oppfx none
5973 * @opcpuid xsave
5974 * @opgroup og_system
5975 * @opxcpttype none
5976 */
5977FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
5978{
5979 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
5980 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
5981 return IEMOP_RAISE_INVALID_OPCODE();
5982
5983 IEM_MC_BEGIN(3, 0);
5984 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5985 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5986 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5989 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
5990 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5991 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
5992 IEM_MC_END();
5993 return VINF_SUCCESS;
5994}
5995
5996
5997/**
5998 * @opmaps grp15
5999 * @opcode !11/5
6000 * @oppfx none
6001 * @opcpuid xsave
6002 * @opgroup og_system
6003 * @opxcpttype none
6004 */
6005FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6006{
6007 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
6008 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6009 return IEMOP_RAISE_INVALID_OPCODE();
6010
6011 IEM_MC_BEGIN(3, 0);
6012 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6013 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6014 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6017 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6018 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6019 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6020 IEM_MC_END();
6021 return VINF_SUCCESS;
6022}
6023
6024/** Opcode 0x0f 0xae mem/6. */
6025FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6026
6027/**
6028 * @opmaps grp15
6029 * @opcode !11/7
6030 * @oppfx none
6031 * @opcpuid clfsh
6032 * @opgroup og_cachectl
6033 * @optest op1=1 ->
6034 */
6035FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6036{
6037 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6038 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6039 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6040
6041 IEM_MC_BEGIN(2, 0);
6042 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6043 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6046 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6047 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6048 IEM_MC_END();
6049 return VINF_SUCCESS;
6050}
6051
6052/**
6053 * @opmaps grp15
6054 * @opcode !11/7
6055 * @oppfx 0x66
6056 * @opcpuid clflushopt
6057 * @opgroup og_cachectl
6058 * @optest op1=1 ->
6059 */
6060FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6061{
6062 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6063 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6064 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6065
6066 IEM_MC_BEGIN(2, 0);
6067 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6068 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6071 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6072 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6073 IEM_MC_END();
6074 return VINF_SUCCESS;
6075}
6076
6077
6078/** Opcode 0x0f 0xae 11b/5. */
6079FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6080{
6081 RT_NOREF_PV(bRm);
6082 IEMOP_MNEMONIC(lfence, "lfence");
6083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6084 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6085 return IEMOP_RAISE_INVALID_OPCODE();
6086
6087 IEM_MC_BEGIN(0, 0);
6088 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6089 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6090 else
6091 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6092 IEM_MC_ADVANCE_RIP();
6093 IEM_MC_END();
6094 return VINF_SUCCESS;
6095}
6096
6097
6098/** Opcode 0x0f 0xae 11b/6. */
6099FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6100{
6101 RT_NOREF_PV(bRm);
6102 IEMOP_MNEMONIC(mfence, "mfence");
6103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6104 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6105 return IEMOP_RAISE_INVALID_OPCODE();
6106
6107 IEM_MC_BEGIN(0, 0);
6108 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6109 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6110 else
6111 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6112 IEM_MC_ADVANCE_RIP();
6113 IEM_MC_END();
6114 return VINF_SUCCESS;
6115}
6116
6117
6118/** Opcode 0x0f 0xae 11b/7. */
6119FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6120{
6121 RT_NOREF_PV(bRm);
6122 IEMOP_MNEMONIC(sfence, "sfence");
6123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6124 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6125 return IEMOP_RAISE_INVALID_OPCODE();
6126
6127 IEM_MC_BEGIN(0, 0);
6128 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6129 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6130 else
6131 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6132 IEM_MC_ADVANCE_RIP();
6133 IEM_MC_END();
6134 return VINF_SUCCESS;
6135}
6136
6137
6138/** Opcode 0xf3 0x0f 0xae 11b/0. */
6139FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6140
6141/** Opcode 0xf3 0x0f 0xae 11b/1. */
6142FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6143
6144/** Opcode 0xf3 0x0f 0xae 11b/2. */
6145FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6146
6147/** Opcode 0xf3 0x0f 0xae 11b/3. */
6148FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6149
6150
6151/**
6152 * Group 15 jump table for register variant.
6153 */
6154IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6155{ /* pfx: none, 066h, 0f3h, 0f2h */
6156 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6157 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6158 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6159 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6160 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6161 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6162 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6163 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6164};
6165AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6166
6167
6168/**
6169 * Group 15 jump table for memory variant.
6170 */
6171IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6172{ /* pfx: none, 066h, 0f3h, 0f2h */
6173 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6174 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6175 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6176 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6177 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6178 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6179 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6180 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6181};
6182AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6183
6184
6185/** Opcode 0x0f 0xae. */
6186FNIEMOP_DEF(iemOp_Grp15)
6187{
6188 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6190 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6191 /* register, register */
6192 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6193 + pVCpu->iem.s.idxPrefix], bRm);
6194 /* memory, register */
6195 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6196 + pVCpu->iem.s.idxPrefix], bRm);
6197}
6198
6199
6200/** Opcode 0x0f 0xaf. */
6201FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6202{
6203 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6204 IEMOP_HLP_MIN_386();
6205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6207}
6208
6209
6210/** Opcode 0x0f 0xb0. */
6211FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6212{
6213 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6214 IEMOP_HLP_MIN_486();
6215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6216
6217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6218 {
6219 IEMOP_HLP_DONE_DECODING();
6220 IEM_MC_BEGIN(4, 0);
6221 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6222 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6223 IEM_MC_ARG(uint8_t, u8Src, 2);
6224 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6225
6226 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6227 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6228 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6229 IEM_MC_REF_EFLAGS(pEFlags);
6230 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6231 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6232 else
6233 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6234
6235 IEM_MC_ADVANCE_RIP();
6236 IEM_MC_END();
6237 }
6238 else
6239 {
6240 IEM_MC_BEGIN(4, 3);
6241 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6242 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6243 IEM_MC_ARG(uint8_t, u8Src, 2);
6244 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6246 IEM_MC_LOCAL(uint8_t, u8Al);
6247
6248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6249 IEMOP_HLP_DONE_DECODING();
6250 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6251 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6252 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6253 IEM_MC_FETCH_EFLAGS(EFlags);
6254 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6255 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6256 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6257 else
6258 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6259
6260 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6261 IEM_MC_COMMIT_EFLAGS(EFlags);
6262 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6263 IEM_MC_ADVANCE_RIP();
6264 IEM_MC_END();
6265 }
6266 return VINF_SUCCESS;
6267}
6268
6269/** Opcode 0x0f 0xb1. */
6270FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6271{
6272 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6273 IEMOP_HLP_MIN_486();
6274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6275
6276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6277 {
6278 IEMOP_HLP_DONE_DECODING();
6279 switch (pVCpu->iem.s.enmEffOpSize)
6280 {
6281 case IEMMODE_16BIT:
6282 IEM_MC_BEGIN(4, 0);
6283 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6284 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6285 IEM_MC_ARG(uint16_t, u16Src, 2);
6286 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6287
6288 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6289 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6290 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6291 IEM_MC_REF_EFLAGS(pEFlags);
6292 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6293 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6294 else
6295 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6296
6297 IEM_MC_ADVANCE_RIP();
6298 IEM_MC_END();
6299 return VINF_SUCCESS;
6300
6301 case IEMMODE_32BIT:
6302 IEM_MC_BEGIN(4, 0);
6303 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6304 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6305 IEM_MC_ARG(uint32_t, u32Src, 2);
6306 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6307
6308 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6309 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6310 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6311 IEM_MC_REF_EFLAGS(pEFlags);
6312 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6313 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6314 else
6315 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6316
6317 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6318 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6319 IEM_MC_ADVANCE_RIP();
6320 IEM_MC_END();
6321 return VINF_SUCCESS;
6322
6323 case IEMMODE_64BIT:
6324 IEM_MC_BEGIN(4, 0);
6325 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6326 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6327#ifdef RT_ARCH_X86
6328 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6329#else
6330 IEM_MC_ARG(uint64_t, u64Src, 2);
6331#endif
6332 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6333
6334 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6335 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6336 IEM_MC_REF_EFLAGS(pEFlags);
6337#ifdef RT_ARCH_X86
6338 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6339 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6340 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6341 else
6342 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6343#else
6344 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6345 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6346 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6347 else
6348 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6349#endif
6350
6351 IEM_MC_ADVANCE_RIP();
6352 IEM_MC_END();
6353 return VINF_SUCCESS;
6354
6355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6356 }
6357 }
6358 else
6359 {
6360 switch (pVCpu->iem.s.enmEffOpSize)
6361 {
6362 case IEMMODE_16BIT:
6363 IEM_MC_BEGIN(4, 3);
6364 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6365 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6366 IEM_MC_ARG(uint16_t, u16Src, 2);
6367 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6369 IEM_MC_LOCAL(uint16_t, u16Ax);
6370
6371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6372 IEMOP_HLP_DONE_DECODING();
6373 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6374 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6375 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6376 IEM_MC_FETCH_EFLAGS(EFlags);
6377 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6378 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6379 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6380 else
6381 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6382
6383 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6384 IEM_MC_COMMIT_EFLAGS(EFlags);
6385 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6386 IEM_MC_ADVANCE_RIP();
6387 IEM_MC_END();
6388 return VINF_SUCCESS;
6389
6390 case IEMMODE_32BIT:
6391 IEM_MC_BEGIN(4, 3);
6392 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6393 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6394 IEM_MC_ARG(uint32_t, u32Src, 2);
6395 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6397 IEM_MC_LOCAL(uint32_t, u32Eax);
6398
6399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6400 IEMOP_HLP_DONE_DECODING();
6401 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6402 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6403 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6404 IEM_MC_FETCH_EFLAGS(EFlags);
6405 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6406 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6407 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6408 else
6409 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6410
6411 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6412 IEM_MC_COMMIT_EFLAGS(EFlags);
6413 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6414 IEM_MC_ADVANCE_RIP();
6415 IEM_MC_END();
6416 return VINF_SUCCESS;
6417
6418 case IEMMODE_64BIT:
6419 IEM_MC_BEGIN(4, 3);
6420 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6421 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6422#ifdef RT_ARCH_X86
6423 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6424#else
6425 IEM_MC_ARG(uint64_t, u64Src, 2);
6426#endif
6427 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6429 IEM_MC_LOCAL(uint64_t, u64Rax);
6430
6431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6432 IEMOP_HLP_DONE_DECODING();
6433 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6434 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6435 IEM_MC_FETCH_EFLAGS(EFlags);
6436 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6437#ifdef RT_ARCH_X86
6438 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6439 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6440 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6441 else
6442 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6443#else
6444 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6445 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6446 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6447 else
6448 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6449#endif
6450
6451 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6452 IEM_MC_COMMIT_EFLAGS(EFlags);
6453 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6454 IEM_MC_ADVANCE_RIP();
6455 IEM_MC_END();
6456 return VINF_SUCCESS;
6457
6458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6459 }
6460 }
6461}
6462
6463
6464FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6465{
6466 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6467 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6468
6469 switch (pVCpu->iem.s.enmEffOpSize)
6470 {
6471 case IEMMODE_16BIT:
6472 IEM_MC_BEGIN(5, 1);
6473 IEM_MC_ARG(uint16_t, uSel, 0);
6474 IEM_MC_ARG(uint16_t, offSeg, 1);
6475 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6476 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6477 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6478 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6481 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6482 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6483 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6484 IEM_MC_END();
6485 return VINF_SUCCESS;
6486
6487 case IEMMODE_32BIT:
6488 IEM_MC_BEGIN(5, 1);
6489 IEM_MC_ARG(uint16_t, uSel, 0);
6490 IEM_MC_ARG(uint32_t, offSeg, 1);
6491 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6492 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6493 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6494 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6497 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6498 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6499 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6500 IEM_MC_END();
6501 return VINF_SUCCESS;
6502
6503 case IEMMODE_64BIT:
6504 IEM_MC_BEGIN(5, 1);
6505 IEM_MC_ARG(uint16_t, uSel, 0);
6506 IEM_MC_ARG(uint64_t, offSeg, 1);
6507 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6508 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6509 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6510 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6513 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6514 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6515 else
6516 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6517 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6518 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6519 IEM_MC_END();
6520 return VINF_SUCCESS;
6521
6522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6523 }
6524}
6525
6526
6527/** Opcode 0x0f 0xb2. */
6528FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6529{
6530 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6531 IEMOP_HLP_MIN_386();
6532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6534 return IEMOP_RAISE_INVALID_OPCODE();
6535 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6536}
6537
6538
6539/** Opcode 0x0f 0xb3. */
6540FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6541{
6542 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6543 IEMOP_HLP_MIN_386();
6544 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6545}
6546
6547
6548/** Opcode 0x0f 0xb4. */
6549FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6550{
6551 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6552 IEMOP_HLP_MIN_386();
6553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6555 return IEMOP_RAISE_INVALID_OPCODE();
6556 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6557}
6558
6559
6560/** Opcode 0x0f 0xb5. */
6561FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6562{
6563 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6564 IEMOP_HLP_MIN_386();
6565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6566 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6567 return IEMOP_RAISE_INVALID_OPCODE();
6568 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6569}
6570
6571
6572/** Opcode 0x0f 0xb6. */
6573FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6574{
6575 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6576 IEMOP_HLP_MIN_386();
6577
6578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6579
6580 /*
6581 * If rm is denoting a register, no more instruction bytes.
6582 */
6583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6584 {
6585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6586 switch (pVCpu->iem.s.enmEffOpSize)
6587 {
6588 case IEMMODE_16BIT:
6589 IEM_MC_BEGIN(0, 1);
6590 IEM_MC_LOCAL(uint16_t, u16Value);
6591 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6592 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6593 IEM_MC_ADVANCE_RIP();
6594 IEM_MC_END();
6595 return VINF_SUCCESS;
6596
6597 case IEMMODE_32BIT:
6598 IEM_MC_BEGIN(0, 1);
6599 IEM_MC_LOCAL(uint32_t, u32Value);
6600 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6601 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6602 IEM_MC_ADVANCE_RIP();
6603 IEM_MC_END();
6604 return VINF_SUCCESS;
6605
6606 case IEMMODE_64BIT:
6607 IEM_MC_BEGIN(0, 1);
6608 IEM_MC_LOCAL(uint64_t, u64Value);
6609 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6610 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6611 IEM_MC_ADVANCE_RIP();
6612 IEM_MC_END();
6613 return VINF_SUCCESS;
6614
6615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6616 }
6617 }
6618 else
6619 {
6620 /*
6621 * We're loading a register from memory.
6622 */
6623 switch (pVCpu->iem.s.enmEffOpSize)
6624 {
6625 case IEMMODE_16BIT:
6626 IEM_MC_BEGIN(0, 2);
6627 IEM_MC_LOCAL(uint16_t, u16Value);
6628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6631 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6632 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6633 IEM_MC_ADVANCE_RIP();
6634 IEM_MC_END();
6635 return VINF_SUCCESS;
6636
6637 case IEMMODE_32BIT:
6638 IEM_MC_BEGIN(0, 2);
6639 IEM_MC_LOCAL(uint32_t, u32Value);
6640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6643 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6644 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6645 IEM_MC_ADVANCE_RIP();
6646 IEM_MC_END();
6647 return VINF_SUCCESS;
6648
6649 case IEMMODE_64BIT:
6650 IEM_MC_BEGIN(0, 2);
6651 IEM_MC_LOCAL(uint64_t, u64Value);
6652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6655 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6656 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6657 IEM_MC_ADVANCE_RIP();
6658 IEM_MC_END();
6659 return VINF_SUCCESS;
6660
6661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6662 }
6663 }
6664}
6665
6666
6667/** Opcode 0x0f 0xb7. */
6668FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6669{
6670 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6671 IEMOP_HLP_MIN_386();
6672
6673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6674
6675 /** @todo Not entirely sure how the operand size prefix is handled here,
6676 * assuming that it will be ignored. Would be nice to have a few
6677 * test for this. */
6678 /*
6679 * If rm is denoting a register, no more instruction bytes.
6680 */
6681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6682 {
6683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6684 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6685 {
6686 IEM_MC_BEGIN(0, 1);
6687 IEM_MC_LOCAL(uint32_t, u32Value);
6688 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6689 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6690 IEM_MC_ADVANCE_RIP();
6691 IEM_MC_END();
6692 }
6693 else
6694 {
6695 IEM_MC_BEGIN(0, 1);
6696 IEM_MC_LOCAL(uint64_t, u64Value);
6697 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6698 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6699 IEM_MC_ADVANCE_RIP();
6700 IEM_MC_END();
6701 }
6702 }
6703 else
6704 {
6705 /*
6706 * We're loading a register from memory.
6707 */
6708 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6709 {
6710 IEM_MC_BEGIN(0, 2);
6711 IEM_MC_LOCAL(uint32_t, u32Value);
6712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6715 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6716 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6717 IEM_MC_ADVANCE_RIP();
6718 IEM_MC_END();
6719 }
6720 else
6721 {
6722 IEM_MC_BEGIN(0, 2);
6723 IEM_MC_LOCAL(uint64_t, u64Value);
6724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6727 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6728 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6729 IEM_MC_ADVANCE_RIP();
6730 IEM_MC_END();
6731 }
6732 }
6733 return VINF_SUCCESS;
6734}
6735
6736
6737/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6738FNIEMOP_UD_STUB(iemOp_jmpe);
6739/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6740FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6741
6742
6743/**
6744 * @opcode 0xb9
6745 * @opinvalid intel-modrm
6746 * @optest ->
6747 */
6748FNIEMOP_DEF(iemOp_Grp10)
6749{
6750 /*
6751 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6752 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6753 */
6754 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6755 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6756 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6757}
6758
6759
6760/** Opcode 0x0f 0xba. */
6761FNIEMOP_DEF(iemOp_Grp8)
6762{
6763 IEMOP_HLP_MIN_386();
6764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6765 PCIEMOPBINSIZES pImpl;
6766 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6767 {
6768 case 0: case 1: case 2: case 3:
6769 /* Both AMD and Intel want full modr/m decoding and imm8. */
6770 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6771 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6772 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6773 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6774 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6776 }
6777 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6778
6779 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6780 {
6781 /* register destination. */
6782 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6784
6785 switch (pVCpu->iem.s.enmEffOpSize)
6786 {
6787 case IEMMODE_16BIT:
6788 IEM_MC_BEGIN(3, 0);
6789 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6790 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6791 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6792
6793 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6794 IEM_MC_REF_EFLAGS(pEFlags);
6795 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6796
6797 IEM_MC_ADVANCE_RIP();
6798 IEM_MC_END();
6799 return VINF_SUCCESS;
6800
6801 case IEMMODE_32BIT:
6802 IEM_MC_BEGIN(3, 0);
6803 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6804 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6806
6807 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6808 IEM_MC_REF_EFLAGS(pEFlags);
6809 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6810
6811 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6812 IEM_MC_ADVANCE_RIP();
6813 IEM_MC_END();
6814 return VINF_SUCCESS;
6815
6816 case IEMMODE_64BIT:
6817 IEM_MC_BEGIN(3, 0);
6818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6819 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6820 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6821
6822 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6823 IEM_MC_REF_EFLAGS(pEFlags);
6824 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6825
6826 IEM_MC_ADVANCE_RIP();
6827 IEM_MC_END();
6828 return VINF_SUCCESS;
6829
6830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6831 }
6832 }
6833 else
6834 {
6835 /* memory destination. */
6836
6837 uint32_t fAccess;
6838 if (pImpl->pfnLockedU16)
6839 fAccess = IEM_ACCESS_DATA_RW;
6840 else /* BT */
6841 fAccess = IEM_ACCESS_DATA_R;
6842
6843 /** @todo test negative bit offsets! */
6844 switch (pVCpu->iem.s.enmEffOpSize)
6845 {
6846 case IEMMODE_16BIT:
6847 IEM_MC_BEGIN(3, 1);
6848 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6849 IEM_MC_ARG(uint16_t, u16Src, 1);
6850 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6852
6853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6854 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6855 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6856 if (pImpl->pfnLockedU16)
6857 IEMOP_HLP_DONE_DECODING();
6858 else
6859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6860 IEM_MC_FETCH_EFLAGS(EFlags);
6861 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6862 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6863 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6864 else
6865 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6866 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6867
6868 IEM_MC_COMMIT_EFLAGS(EFlags);
6869 IEM_MC_ADVANCE_RIP();
6870 IEM_MC_END();
6871 return VINF_SUCCESS;
6872
6873 case IEMMODE_32BIT:
6874 IEM_MC_BEGIN(3, 1);
6875 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6876 IEM_MC_ARG(uint32_t, u32Src, 1);
6877 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6879
6880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6881 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6882 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6883 if (pImpl->pfnLockedU16)
6884 IEMOP_HLP_DONE_DECODING();
6885 else
6886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6887 IEM_MC_FETCH_EFLAGS(EFlags);
6888 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6889 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6890 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6891 else
6892 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6893 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6894
6895 IEM_MC_COMMIT_EFLAGS(EFlags);
6896 IEM_MC_ADVANCE_RIP();
6897 IEM_MC_END();
6898 return VINF_SUCCESS;
6899
6900 case IEMMODE_64BIT:
6901 IEM_MC_BEGIN(3, 1);
6902 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6903 IEM_MC_ARG(uint64_t, u64Src, 1);
6904 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6906
6907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6908 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6909 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6910 if (pImpl->pfnLockedU16)
6911 IEMOP_HLP_DONE_DECODING();
6912 else
6913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6914 IEM_MC_FETCH_EFLAGS(EFlags);
6915 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6916 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6917 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6918 else
6919 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6920 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6921
6922 IEM_MC_COMMIT_EFLAGS(EFlags);
6923 IEM_MC_ADVANCE_RIP();
6924 IEM_MC_END();
6925 return VINF_SUCCESS;
6926
6927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6928 }
6929 }
6930}
6931
6932
6933/** Opcode 0x0f 0xbb. */
6934FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6935{
6936 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6937 IEMOP_HLP_MIN_386();
6938 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6939}
6940
6941
6942/** Opcode 0x0f 0xbc. */
6943FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6944{
6945 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6946 IEMOP_HLP_MIN_386();
6947 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6948 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6949}
6950
6951
6952/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6953FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6954
6955
6956/** Opcode 0x0f 0xbd. */
6957FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6958{
6959 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6960 IEMOP_HLP_MIN_386();
6961 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6962 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6963}
6964
6965
6966/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6967FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6968
6969
6970/** Opcode 0x0f 0xbe. */
6971FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6972{
6973 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6974 IEMOP_HLP_MIN_386();
6975
6976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6977
6978 /*
6979 * If rm is denoting a register, no more instruction bytes.
6980 */
6981 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6982 {
6983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6984 switch (pVCpu->iem.s.enmEffOpSize)
6985 {
6986 case IEMMODE_16BIT:
6987 IEM_MC_BEGIN(0, 1);
6988 IEM_MC_LOCAL(uint16_t, u16Value);
6989 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6990 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6991 IEM_MC_ADVANCE_RIP();
6992 IEM_MC_END();
6993 return VINF_SUCCESS;
6994
6995 case IEMMODE_32BIT:
6996 IEM_MC_BEGIN(0, 1);
6997 IEM_MC_LOCAL(uint32_t, u32Value);
6998 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6999 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7000 IEM_MC_ADVANCE_RIP();
7001 IEM_MC_END();
7002 return VINF_SUCCESS;
7003
7004 case IEMMODE_64BIT:
7005 IEM_MC_BEGIN(0, 1);
7006 IEM_MC_LOCAL(uint64_t, u64Value);
7007 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7008 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7009 IEM_MC_ADVANCE_RIP();
7010 IEM_MC_END();
7011 return VINF_SUCCESS;
7012
7013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7014 }
7015 }
7016 else
7017 {
7018 /*
7019 * We're loading a register from memory.
7020 */
7021 switch (pVCpu->iem.s.enmEffOpSize)
7022 {
7023 case IEMMODE_16BIT:
7024 IEM_MC_BEGIN(0, 2);
7025 IEM_MC_LOCAL(uint16_t, u16Value);
7026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7029 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7030 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7031 IEM_MC_ADVANCE_RIP();
7032 IEM_MC_END();
7033 return VINF_SUCCESS;
7034
7035 case IEMMODE_32BIT:
7036 IEM_MC_BEGIN(0, 2);
7037 IEM_MC_LOCAL(uint32_t, u32Value);
7038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7041 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7042 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7043 IEM_MC_ADVANCE_RIP();
7044 IEM_MC_END();
7045 return VINF_SUCCESS;
7046
7047 case IEMMODE_64BIT:
7048 IEM_MC_BEGIN(0, 2);
7049 IEM_MC_LOCAL(uint64_t, u64Value);
7050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7054 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7055 IEM_MC_ADVANCE_RIP();
7056 IEM_MC_END();
7057 return VINF_SUCCESS;
7058
7059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7060 }
7061 }
7062}
7063
7064
7065/** Opcode 0x0f 0xbf. */
7066FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7067{
7068 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7069 IEMOP_HLP_MIN_386();
7070
7071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7072
7073 /** @todo Not entirely sure how the operand size prefix is handled here,
7074 * assuming that it will be ignored. Would be nice to have a few
7075 * test for this. */
7076 /*
7077 * If rm is denoting a register, no more instruction bytes.
7078 */
7079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7080 {
7081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7082 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7083 {
7084 IEM_MC_BEGIN(0, 1);
7085 IEM_MC_LOCAL(uint32_t, u32Value);
7086 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7087 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7088 IEM_MC_ADVANCE_RIP();
7089 IEM_MC_END();
7090 }
7091 else
7092 {
7093 IEM_MC_BEGIN(0, 1);
7094 IEM_MC_LOCAL(uint64_t, u64Value);
7095 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7096 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7097 IEM_MC_ADVANCE_RIP();
7098 IEM_MC_END();
7099 }
7100 }
7101 else
7102 {
7103 /*
7104 * We're loading a register from memory.
7105 */
7106 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7107 {
7108 IEM_MC_BEGIN(0, 2);
7109 IEM_MC_LOCAL(uint32_t, u32Value);
7110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7113 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7114 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7115 IEM_MC_ADVANCE_RIP();
7116 IEM_MC_END();
7117 }
7118 else
7119 {
7120 IEM_MC_BEGIN(0, 2);
7121 IEM_MC_LOCAL(uint64_t, u64Value);
7122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7125 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7126 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7127 IEM_MC_ADVANCE_RIP();
7128 IEM_MC_END();
7129 }
7130 }
7131 return VINF_SUCCESS;
7132}
7133
7134
7135/** Opcode 0x0f 0xc0. */
7136FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7137{
7138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7139 IEMOP_HLP_MIN_486();
7140 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7141
7142 /*
7143 * If rm is denoting a register, no more instruction bytes.
7144 */
7145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7146 {
7147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7148
7149 IEM_MC_BEGIN(3, 0);
7150 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7151 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7152 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7153
7154 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7155 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7156 IEM_MC_REF_EFLAGS(pEFlags);
7157 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7158
7159 IEM_MC_ADVANCE_RIP();
7160 IEM_MC_END();
7161 }
7162 else
7163 {
7164 /*
7165 * We're accessing memory.
7166 */
7167 IEM_MC_BEGIN(3, 3);
7168 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7169 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7170 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7171 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7173
7174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7175 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7176 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7177 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7178 IEM_MC_FETCH_EFLAGS(EFlags);
7179 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7180 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7181 else
7182 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7183
7184 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7185 IEM_MC_COMMIT_EFLAGS(EFlags);
7186 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7187 IEM_MC_ADVANCE_RIP();
7188 IEM_MC_END();
7189 return VINF_SUCCESS;
7190 }
7191 return VINF_SUCCESS;
7192}
7193
7194
7195/** Opcode 0x0f 0xc1. */
7196FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7197{
7198 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7199 IEMOP_HLP_MIN_486();
7200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7201
7202 /*
7203 * If rm is denoting a register, no more instruction bytes.
7204 */
7205 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7206 {
7207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7208
7209 switch (pVCpu->iem.s.enmEffOpSize)
7210 {
7211 case IEMMODE_16BIT:
7212 IEM_MC_BEGIN(3, 0);
7213 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7214 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7216
7217 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7218 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7219 IEM_MC_REF_EFLAGS(pEFlags);
7220 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7221
7222 IEM_MC_ADVANCE_RIP();
7223 IEM_MC_END();
7224 return VINF_SUCCESS;
7225
7226 case IEMMODE_32BIT:
7227 IEM_MC_BEGIN(3, 0);
7228 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7229 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7230 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7231
7232 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7233 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7234 IEM_MC_REF_EFLAGS(pEFlags);
7235 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7236
7237 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7238 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7239 IEM_MC_ADVANCE_RIP();
7240 IEM_MC_END();
7241 return VINF_SUCCESS;
7242
7243 case IEMMODE_64BIT:
7244 IEM_MC_BEGIN(3, 0);
7245 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7246 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7248
7249 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7250 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7251 IEM_MC_REF_EFLAGS(pEFlags);
7252 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7253
7254 IEM_MC_ADVANCE_RIP();
7255 IEM_MC_END();
7256 return VINF_SUCCESS;
7257
7258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7259 }
7260 }
7261 else
7262 {
7263 /*
7264 * We're accessing memory.
7265 */
7266 switch (pVCpu->iem.s.enmEffOpSize)
7267 {
7268 case IEMMODE_16BIT:
7269 IEM_MC_BEGIN(3, 3);
7270 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7271 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7272 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7273 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7275
7276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7277 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7278 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7279 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7280 IEM_MC_FETCH_EFLAGS(EFlags);
7281 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7282 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7283 else
7284 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7285
7286 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7287 IEM_MC_COMMIT_EFLAGS(EFlags);
7288 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7289 IEM_MC_ADVANCE_RIP();
7290 IEM_MC_END();
7291 return VINF_SUCCESS;
7292
7293 case IEMMODE_32BIT:
7294 IEM_MC_BEGIN(3, 3);
7295 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7296 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7297 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7298 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7300
7301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7302 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7303 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7304 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7305 IEM_MC_FETCH_EFLAGS(EFlags);
7306 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7307 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7308 else
7309 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7310
7311 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7312 IEM_MC_COMMIT_EFLAGS(EFlags);
7313 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7314 IEM_MC_ADVANCE_RIP();
7315 IEM_MC_END();
7316 return VINF_SUCCESS;
7317
7318 case IEMMODE_64BIT:
7319 IEM_MC_BEGIN(3, 3);
7320 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7321 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7322 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7323 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7325
7326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7327 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7328 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7329 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7330 IEM_MC_FETCH_EFLAGS(EFlags);
7331 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7332 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7333 else
7334 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7335
7336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7337 IEM_MC_COMMIT_EFLAGS(EFlags);
7338 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7339 IEM_MC_ADVANCE_RIP();
7340 IEM_MC_END();
7341 return VINF_SUCCESS;
7342
7343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7344 }
7345 }
7346}
7347
7348
7349/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7350FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7351/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7352FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7353/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7354FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7355/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7356FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7357
7358
7359/** Opcode 0x0f 0xc3. */
7360FNIEMOP_DEF(iemOp_movnti_My_Gy)
7361{
7362 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7363
7364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7365
7366 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7367 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7368 {
7369 switch (pVCpu->iem.s.enmEffOpSize)
7370 {
7371 case IEMMODE_32BIT:
7372 IEM_MC_BEGIN(0, 2);
7373 IEM_MC_LOCAL(uint32_t, u32Value);
7374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7375
7376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7378 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7379 return IEMOP_RAISE_INVALID_OPCODE();
7380
7381 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7382 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7383 IEM_MC_ADVANCE_RIP();
7384 IEM_MC_END();
7385 break;
7386
7387 case IEMMODE_64BIT:
7388 IEM_MC_BEGIN(0, 2);
7389 IEM_MC_LOCAL(uint64_t, u64Value);
7390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7391
7392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7394 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7395 return IEMOP_RAISE_INVALID_OPCODE();
7396
7397 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7398 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7399 IEM_MC_ADVANCE_RIP();
7400 IEM_MC_END();
7401 break;
7402
7403 case IEMMODE_16BIT:
7404 /** @todo check this form. */
7405 return IEMOP_RAISE_INVALID_OPCODE();
7406 }
7407 }
7408 else
7409 return IEMOP_RAISE_INVALID_OPCODE();
7410 return VINF_SUCCESS;
7411}
7412/* Opcode 0x66 0x0f 0xc3 - invalid */
7413/* Opcode 0xf3 0x0f 0xc3 - invalid */
7414/* Opcode 0xf2 0x0f 0xc3 - invalid */
7415
7416/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7417FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7418/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7419FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7420/* Opcode 0xf3 0x0f 0xc4 - invalid */
7421/* Opcode 0xf2 0x0f 0xc4 - invalid */
7422
7423/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7424FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7425/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7426FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7427/* Opcode 0xf3 0x0f 0xc5 - invalid */
7428/* Opcode 0xf2 0x0f 0xc5 - invalid */
7429
7430/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7431FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7432/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7433FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7434/* Opcode 0xf3 0x0f 0xc6 - invalid */
7435/* Opcode 0xf2 0x0f 0xc6 - invalid */
7436
7437
7438/** Opcode 0x0f 0xc7 !11/1. */
7439FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7440{
7441 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7442
7443 IEM_MC_BEGIN(4, 3);
7444 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7445 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7446 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7447 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7448 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7449 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7451
7452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7453 IEMOP_HLP_DONE_DECODING();
7454 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7455
7456 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7457 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7458 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7459
7460 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7461 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7462 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7463
7464 IEM_MC_FETCH_EFLAGS(EFlags);
7465 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7466 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7467 else
7468 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7469
7470 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7471 IEM_MC_COMMIT_EFLAGS(EFlags);
7472 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7473 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7474 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7475 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7476 IEM_MC_ENDIF();
7477 IEM_MC_ADVANCE_RIP();
7478
7479 IEM_MC_END();
7480 return VINF_SUCCESS;
7481}
7482
7483
7484/** Opcode REX.W 0x0f 0xc7 !11/1. */
7485FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7486{
7487 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7488 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7489 {
7490#if 0
7491 RT_NOREF(bRm);
7492 IEMOP_BITCH_ABOUT_STUB();
7493 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7494#else
7495 IEM_MC_BEGIN(4, 3);
7496 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7497 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7498 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7499 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7500 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7501 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7503
7504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7505 IEMOP_HLP_DONE_DECODING();
7506 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7507 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7508
7509 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7510 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7511 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7512
7513 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7514 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7515 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7516
7517 IEM_MC_FETCH_EFLAGS(EFlags);
7518# ifdef RT_ARCH_AMD64
7519 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7520 {
7521 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7522 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7523 else
7524 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7525 }
7526 else
7527# endif
7528 {
7529 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7530 accesses and not all all atomic, which works fine on in UNI CPU guest
7531 configuration (ignoring DMA). If guest SMP is active we have no choice
7532 but to use a rendezvous callback here. Sigh. */
7533 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7534 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7535 else
7536 {
7537 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7538 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7539 }
7540 }
7541
7542 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7543 IEM_MC_COMMIT_EFLAGS(EFlags);
7544 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7545 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7546 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7547 IEM_MC_ENDIF();
7548 IEM_MC_ADVANCE_RIP();
7549
7550 IEM_MC_END();
7551 return VINF_SUCCESS;
7552#endif
7553 }
7554 Log(("cmpxchg16b -> #UD\n"));
7555 return IEMOP_RAISE_INVALID_OPCODE();
7556}
7557
7558FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7559{
7560 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7561 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7562 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7563}
7564
7565/** Opcode 0x0f 0xc7 11/6. */
7566FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7567
7568/** Opcode 0x0f 0xc7 !11/6. */
7569FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7570
7571/** Opcode 0x66 0x0f 0xc7 !11/6. */
7572FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7573
7574/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7575FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7576
7577/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7578FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7579
7580/** Opcode 0x0f 0xc7 11/7. */
7581FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7582
7583
7584/**
7585 * Group 9 jump table for register variant.
7586 */
7587IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7588{ /* pfx: none, 066h, 0f3h, 0f2h */
7589 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7590 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7591 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7592 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7593 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7594 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7595 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7596 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7597};
7598AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7599
7600
7601/**
7602 * Group 9 jump table for memory variant.
7603 */
7604IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7605{ /* pfx: none, 066h, 0f3h, 0f2h */
7606 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7607 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7608 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7609 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7610 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7611 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7612 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7613 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7614};
7615AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7616
7617
7618/** Opcode 0x0f 0xc7. */
7619FNIEMOP_DEF(iemOp_Grp9)
7620{
7621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7623 /* register, register */
7624 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7625 + pVCpu->iem.s.idxPrefix], bRm);
7626 /* memory, register */
7627 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7628 + pVCpu->iem.s.idxPrefix], bRm);
7629}
7630
7631
7632/**
7633 * Common 'bswap register' helper.
7634 */
7635FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7636{
7637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7638 switch (pVCpu->iem.s.enmEffOpSize)
7639 {
7640 case IEMMODE_16BIT:
7641 IEM_MC_BEGIN(1, 0);
7642 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7643 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7644 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7645 IEM_MC_ADVANCE_RIP();
7646 IEM_MC_END();
7647 return VINF_SUCCESS;
7648
7649 case IEMMODE_32BIT:
7650 IEM_MC_BEGIN(1, 0);
7651 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7652 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7653 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7654 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7655 IEM_MC_ADVANCE_RIP();
7656 IEM_MC_END();
7657 return VINF_SUCCESS;
7658
7659 case IEMMODE_64BIT:
7660 IEM_MC_BEGIN(1, 0);
7661 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7662 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7663 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7664 IEM_MC_ADVANCE_RIP();
7665 IEM_MC_END();
7666 return VINF_SUCCESS;
7667
7668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7669 }
7670}
7671
7672
7673/** Opcode 0x0f 0xc8. */
7674FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7675{
7676 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7677 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7678 prefix. REX.B is the correct prefix it appears. For a parallel
7679 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7680 IEMOP_HLP_MIN_486();
7681 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7682}
7683
7684
7685/** Opcode 0x0f 0xc9. */
7686FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7687{
7688 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7689 IEMOP_HLP_MIN_486();
7690 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7691}
7692
7693
7694/** Opcode 0x0f 0xca. */
7695FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7696{
7697 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7698 IEMOP_HLP_MIN_486();
7699 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7700}
7701
7702
7703/** Opcode 0x0f 0xcb. */
7704FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7705{
7706 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7707 IEMOP_HLP_MIN_486();
7708 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7709}
7710
7711
7712/** Opcode 0x0f 0xcc. */
7713FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7714{
7715 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7716 IEMOP_HLP_MIN_486();
7717 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7718}
7719
7720
7721/** Opcode 0x0f 0xcd. */
7722FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7723{
7724 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7725 IEMOP_HLP_MIN_486();
7726 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7727}
7728
7729
7730/** Opcode 0x0f 0xce. */
7731FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7732{
7733 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7734 IEMOP_HLP_MIN_486();
7735 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7736}
7737
7738
7739/** Opcode 0x0f 0xcf. */
7740FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7741{
7742 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7743 IEMOP_HLP_MIN_486();
7744 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7745}
7746
7747
7748/* Opcode 0x0f 0xd0 - invalid */
7749/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
7750FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
7751/* Opcode 0xf3 0x0f 0xd0 - invalid */
7752/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
7753FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
7754
7755/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7756FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7757/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
7758FNIEMOP_STUB(iemOp_psrlw_Vx_W);
7759/* Opcode 0xf3 0x0f 0xd1 - invalid */
7760/* Opcode 0xf2 0x0f 0xd1 - invalid */
7761
7762/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7763FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7764/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
7765FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
7766/* Opcode 0xf3 0x0f 0xd2 - invalid */
7767/* Opcode 0xf2 0x0f 0xd2 - invalid */
7768
7769/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7770FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7771/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
7772FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
7773/* Opcode 0xf3 0x0f 0xd3 - invalid */
7774/* Opcode 0xf2 0x0f 0xd3 - invalid */
7775
7776/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7777FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7778/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
7779FNIEMOP_STUB(iemOp_paddq_Vx_W);
7780/* Opcode 0xf3 0x0f 0xd4 - invalid */
7781/* Opcode 0xf2 0x0f 0xd4 - invalid */
7782
7783/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7784FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7785/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
7786FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
7787/* Opcode 0xf3 0x0f 0xd5 - invalid */
7788/* Opcode 0xf2 0x0f 0xd5 - invalid */
7789
7790/* Opcode 0x0f 0xd6 - invalid */
7791
7792/**
7793 * @opcode 0xd6
7794 * @oppfx 0x66
7795 * @opcpuid sse2
7796 * @opgroup og_sse2_pcksclr_datamove
7797 * @opxcpttype none
7798 * @optest op1=-1 op2=2 -> op1=2
7799 * @optest op1=0 op2=-42 -> op1=-42
7800 */
7801FNIEMOP_DEF(iemOp_movq_Wq_Vq)
7802{
7803 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7806 {
7807 /*
7808 * Register, register.
7809 */
7810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7811 IEM_MC_BEGIN(0, 2);
7812 IEM_MC_LOCAL(uint64_t, uSrc);
7813
7814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7815 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7816
7817 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7818 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7819
7820 IEM_MC_ADVANCE_RIP();
7821 IEM_MC_END();
7822 }
7823 else
7824 {
7825 /*
7826 * Memory, register.
7827 */
7828 IEM_MC_BEGIN(0, 2);
7829 IEM_MC_LOCAL(uint64_t, uSrc);
7830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7831
7832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7834 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7836
7837 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7838 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7839
7840 IEM_MC_ADVANCE_RIP();
7841 IEM_MC_END();
7842 }
7843 return VINF_SUCCESS;
7844}
7845
7846
7847/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7848FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7849/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7850FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7851#if 0
7852FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7853{
7854 /* Docs says register only. */
7855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7856
7857 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7858 {
7859 case IEM_OP_PRF_SIZE_OP: /* SSE */
7860 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
7861 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7862 IEM_MC_BEGIN(2, 0);
7863 IEM_MC_ARG(uint64_t *, pDst, 0);
7864 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7865 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7866 IEM_MC_PREPARE_SSE_USAGE();
7867 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7868 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7869 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7870 IEM_MC_ADVANCE_RIP();
7871 IEM_MC_END();
7872 return VINF_SUCCESS;
7873
7874 case 0: /* MMX */
7875 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7876 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7877 IEM_MC_BEGIN(2, 0);
7878 IEM_MC_ARG(uint64_t *, pDst, 0);
7879 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7880 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7881 IEM_MC_PREPARE_FPU_USAGE();
7882 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7883 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7884 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7885 IEM_MC_ADVANCE_RIP();
7886 IEM_MC_END();
7887 return VINF_SUCCESS;
7888
7889 default:
7890 return IEMOP_RAISE_INVALID_OPCODE();
7891 }
7892}
7893#endif
7894
7895
7896/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7897FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7898{
7899 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7900 /** @todo testcase: Check that the instruction implicitly clears the high
7901 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7902 * and opcode modifications are made to work with the whole width (not
7903 * just 128). */
7904 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7905 /* Docs says register only. */
7906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7907 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7908 {
7909 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7910 IEM_MC_BEGIN(2, 0);
7911 IEM_MC_ARG(uint64_t *, pDst, 0);
7912 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7913 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7914 IEM_MC_PREPARE_FPU_USAGE();
7915 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7916 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7917 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7918 IEM_MC_ADVANCE_RIP();
7919 IEM_MC_END();
7920 return VINF_SUCCESS;
7921 }
7922 return IEMOP_RAISE_INVALID_OPCODE();
7923}
7924
7925/** Opcode 0x66 0x0f 0xd7 - */
7926FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
7927{
7928 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7929 /** @todo testcase: Check that the instruction implicitly clears the high
7930 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7931 * and opcode modifications are made to work with the whole width (not
7932 * just 128). */
7933 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7934 /* Docs says register only. */
7935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7936 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7937 {
7938 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7939 IEM_MC_BEGIN(2, 0);
7940 IEM_MC_ARG(uint64_t *, pDst, 0);
7941 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
7942 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7943 IEM_MC_PREPARE_SSE_USAGE();
7944 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7945 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7946 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7947 IEM_MC_ADVANCE_RIP();
7948 IEM_MC_END();
7949 return VINF_SUCCESS;
7950 }
7951 return IEMOP_RAISE_INVALID_OPCODE();
7952}
7953
7954/* Opcode 0xf3 0x0f 0xd7 - invalid */
7955/* Opcode 0xf2 0x0f 0xd7 - invalid */
7956
7957
7958/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7959FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7960/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
7961FNIEMOP_STUB(iemOp_psubusb_Vx_W);
7962/* Opcode 0xf3 0x0f 0xd8 - invalid */
7963/* Opcode 0xf2 0x0f 0xd8 - invalid */
7964
7965/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7966FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7967/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
7968FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
7969/* Opcode 0xf3 0x0f 0xd9 - invalid */
7970/* Opcode 0xf2 0x0f 0xd9 - invalid */
7971
7972/** Opcode 0x0f 0xda - pminub Pq, Qq */
7973FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7974/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
7975FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
7976/* Opcode 0xf3 0x0f 0xda - invalid */
7977/* Opcode 0xf2 0x0f 0xda - invalid */
7978
7979/** Opcode 0x0f 0xdb - pand Pq, Qq */
7980FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7981/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
7982FNIEMOP_STUB(iemOp_pand_Vx_W);
7983/* Opcode 0xf3 0x0f 0xdb - invalid */
7984/* Opcode 0xf2 0x0f 0xdb - invalid */
7985
7986/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7987FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7988/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
7989FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
7990/* Opcode 0xf3 0x0f 0xdc - invalid */
7991/* Opcode 0xf2 0x0f 0xdc - invalid */
7992
7993/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7994FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7995/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
7996FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
7997/* Opcode 0xf3 0x0f 0xdd - invalid */
7998/* Opcode 0xf2 0x0f 0xdd - invalid */
7999
8000/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8001FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8002/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8003FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8004/* Opcode 0xf3 0x0f 0xde - invalid */
8005/* Opcode 0xf2 0x0f 0xde - invalid */
8006
8007/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8008FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8009/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8010FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8011/* Opcode 0xf3 0x0f 0xdf - invalid */
8012/* Opcode 0xf2 0x0f 0xdf - invalid */
8013
8014/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8015FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8016/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8017FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8018/* Opcode 0xf3 0x0f 0xe0 - invalid */
8019/* Opcode 0xf2 0x0f 0xe0 - invalid */
8020
8021/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8022FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8023/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8024FNIEMOP_STUB(iemOp_psraw_Vx_W);
8025/* Opcode 0xf3 0x0f 0xe1 - invalid */
8026/* Opcode 0xf2 0x0f 0xe1 - invalid */
8027
8028/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8029FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8030/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8031FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8032/* Opcode 0xf3 0x0f 0xe2 - invalid */
8033/* Opcode 0xf2 0x0f 0xe2 - invalid */
8034
8035/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8036FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8037/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8038FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8039/* Opcode 0xf3 0x0f 0xe3 - invalid */
8040/* Opcode 0xf2 0x0f 0xe3 - invalid */
8041
8042/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8043FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8044/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8045FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8046/* Opcode 0xf3 0x0f 0xe4 - invalid */
8047/* Opcode 0xf2 0x0f 0xe4 - invalid */
8048
8049/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8050FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8051/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8052FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8053/* Opcode 0xf3 0x0f 0xe5 - invalid */
8054/* Opcode 0xf2 0x0f 0xe5 - invalid */
8055
8056/* Opcode 0x0f 0xe6 - invalid */
8057/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8058FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8059/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8060FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8061/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8062FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8063
8064
8065/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8066FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8067{
8068 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8070 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8071 {
8072 /* Register, memory. */
8073 IEM_MC_BEGIN(0, 2);
8074 IEM_MC_LOCAL(uint64_t, uSrc);
8075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8076
8077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8079 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8080 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8081
8082 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8083 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8084
8085 IEM_MC_ADVANCE_RIP();
8086 IEM_MC_END();
8087 return VINF_SUCCESS;
8088 }
8089 /* The register, register encoding is invalid. */
8090 return IEMOP_RAISE_INVALID_OPCODE();
8091}
8092
8093/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8094FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8095{
8096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8097 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8098 {
8099 /* Register, memory. */
8100 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8101 IEM_MC_BEGIN(0, 2);
8102 IEM_MC_LOCAL(RTUINT128U, uSrc);
8103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8104
8105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8107 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8109
8110 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8111 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8112
8113 IEM_MC_ADVANCE_RIP();
8114 IEM_MC_END();
8115 return VINF_SUCCESS;
8116 }
8117
8118 /* The register, register encoding is invalid. */
8119 return IEMOP_RAISE_INVALID_OPCODE();
8120}
8121
8122/* Opcode 0xf3 0x0f 0xe7 - invalid */
8123/* Opcode 0xf2 0x0f 0xe7 - invalid */
8124
8125
8126/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8127FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8128/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8129FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8130/* Opcode 0xf3 0x0f 0xe8 - invalid */
8131/* Opcode 0xf2 0x0f 0xe8 - invalid */
8132
8133/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8134FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8135/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8136FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8137/* Opcode 0xf3 0x0f 0xe9 - invalid */
8138/* Opcode 0xf2 0x0f 0xe9 - invalid */
8139
8140/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8141FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8142/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8143FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8144/* Opcode 0xf3 0x0f 0xea - invalid */
8145/* Opcode 0xf2 0x0f 0xea - invalid */
8146
8147/** Opcode 0x0f 0xeb - por Pq, Qq */
8148FNIEMOP_STUB(iemOp_por_Pq_Qq);
8149/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8150FNIEMOP_STUB(iemOp_por_Vx_W);
8151/* Opcode 0xf3 0x0f 0xeb - invalid */
8152/* Opcode 0xf2 0x0f 0xeb - invalid */
8153
8154/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8155FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8156/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8157FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8158/* Opcode 0xf3 0x0f 0xec - invalid */
8159/* Opcode 0xf2 0x0f 0xec - invalid */
8160
8161/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8162FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8163/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8164FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8165/* Opcode 0xf3 0x0f 0xed - invalid */
8166/* Opcode 0xf2 0x0f 0xed - invalid */
8167
8168/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8169FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8170/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8171FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8172/* Opcode 0xf3 0x0f 0xee - invalid */
8173/* Opcode 0xf2 0x0f 0xee - invalid */
8174
8175
8176/** Opcode 0x0f 0xef - pxor Pq, Qq */
8177FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8178{
8179 IEMOP_MNEMONIC(pxor, "pxor");
8180 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8181}
8182
8183/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8184FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8185{
8186 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8187 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8188}
8189
8190/* Opcode 0xf3 0x0f 0xef - invalid */
8191/* Opcode 0xf2 0x0f 0xef - invalid */
8192
8193/* Opcode 0x0f 0xf0 - invalid */
8194/* Opcode 0x66 0x0f 0xf0 - invalid */
8195/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8196FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8197
8198/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8199FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8200/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8201FNIEMOP_STUB(iemOp_psllw_Vx_W);
8202/* Opcode 0xf2 0x0f 0xf1 - invalid */
8203
8204/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8205FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8206/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8207FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8208/* Opcode 0xf2 0x0f 0xf2 - invalid */
8209
8210/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8211FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8212/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8213FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8214/* Opcode 0xf2 0x0f 0xf3 - invalid */
8215
8216/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8217FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8218/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8219FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8220/* Opcode 0xf2 0x0f 0xf4 - invalid */
8221
8222/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8223FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8224/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8225FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8226/* Opcode 0xf2 0x0f 0xf5 - invalid */
8227
8228/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8229FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8230/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8231FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8232/* Opcode 0xf2 0x0f 0xf6 - invalid */
8233
8234/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8235FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8236/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8237FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8238/* Opcode 0xf2 0x0f 0xf7 - invalid */
8239
8240/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8241FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8242/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8243FNIEMOP_STUB(iemOp_psubb_Vx_W);
8244/* Opcode 0xf2 0x0f 0xf8 - invalid */
8245
8246/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8247FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8248/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8249FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8250/* Opcode 0xf2 0x0f 0xf9 - invalid */
8251
8252/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8253FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8254/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8255FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8256/* Opcode 0xf2 0x0f 0xfa - invalid */
8257
8258/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8259FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8260/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8261FNIEMOP_STUB(iemOp_psubq_Vx_W);
8262/* Opcode 0xf2 0x0f 0xfb - invalid */
8263
8264/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8265FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8266/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8267FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8268/* Opcode 0xf2 0x0f 0xfc - invalid */
8269
8270/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8271FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8272/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8273FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8274/* Opcode 0xf2 0x0f 0xfd - invalid */
8275
8276/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8277FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8278/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8279FNIEMOP_STUB(iemOp_paddd_Vx_W);
8280/* Opcode 0xf2 0x0f 0xfe - invalid */
8281
8282
8283/** Opcode **** 0x0f 0xff - UD0 */
8284FNIEMOP_DEF(iemOp_ud0)
8285{
8286 IEMOP_MNEMONIC(ud0, "ud0");
8287 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8288 {
8289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8290#ifndef TST_IEM_CHECK_MC
8291 RTGCPTR GCPtrEff;
8292 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8293 if (rcStrict != VINF_SUCCESS)
8294 return rcStrict;
8295#endif
8296 IEMOP_HLP_DONE_DECODING();
8297 }
8298 return IEMOP_RAISE_INVALID_OPCODE();
8299}
8300
8301
8302
8303/**
8304 * Two byte opcode map, first byte 0x0f.
8305 *
8306 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8307 * check if it needs updating as well when making changes.
8308 */
8309IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8310{
8311 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8312 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8313 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8314 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8315 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8316 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8317 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8318 /* 0x06 */ IEMOP_X4(iemOp_clts),
8319 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8320 /* 0x08 */ IEMOP_X4(iemOp_invd),
8321 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8322 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8323 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8324 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8325 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8326 /* 0x0e */ IEMOP_X4(iemOp_femms),
8327 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8328
8329 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vx_Wsd,
8330 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8331 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8332 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8333 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8334 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8335 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8336 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8337 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8338 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8339 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8340 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8341 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8342 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8343 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8344 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8345
8346 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8347 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8348 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8349 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8350 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8351 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8352 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8353 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8354 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8355 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8356 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8357 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8358 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8359 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8360 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8361 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8362
8363 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8364 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8365 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8366 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8367 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8368 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8369 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8370 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8371 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8372 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8373 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8374 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8375 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8376 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8377 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8378 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8379
8380 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8381 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8382 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8383 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8384 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8385 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8386 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8387 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8388 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8389 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8390 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8391 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8392 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8393 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8394 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8395 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8396
8397 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8398 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8399 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8400 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8401 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8402 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8403 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8404 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8405 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8406 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8407 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8408 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8409 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8410 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8411 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8412 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8413
8414 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8415 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8416 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8417 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8418 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8419 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8420 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8421 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8422 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8423 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8424 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8425 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8426 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8427 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8428 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8429 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8430
8431 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8432 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8433 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8434 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8435 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8436 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8437 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8438 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8439
8440 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8441 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8442 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8443 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8444 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8445 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8446 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8447 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8448
8449 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8450 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8451 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8452 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8453 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8454 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8455 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8456 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8457 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8458 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8459 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8460 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8461 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8462 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8463 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8464 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8465
8466 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8467 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8468 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8469 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8470 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8471 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8472 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8473 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8474 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8475 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8476 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8477 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8478 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8479 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8480 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8481 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8482
8483 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8484 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8485 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8486 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8487 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8488 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8489 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8490 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8491 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8492 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8493 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8494 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8495 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8496 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8497 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8498 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8499
8500 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8501 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8502 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8503 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8504 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8505 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8506 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8507 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8508 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8509 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8510 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8511 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8512 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8513 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8514 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8515 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8516
8517 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8518 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8519 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8520 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8521 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8522 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8523 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8524 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8525 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8526 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8527 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8528 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8529 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8530 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8531 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8532 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8533
8534 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8535 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8536 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8537 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8538 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8539 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8540 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8541 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8542 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8543 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8544 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8545 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8546 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8547 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8548 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8549 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8550
8551 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8552 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8553 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8554 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8555 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8556 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8557 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8558 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8559 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8560 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8561 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8562 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8563 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8564 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8565 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8566 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8567
8568 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8569 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8570 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8571 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8572 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8573 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8574 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8575 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8576 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8577 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8578 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8579 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8580 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8581 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8582 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8583 /* 0xff */ IEMOP_X4(iemOp_ud0),
8584};
8585AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8586
8587/** @} */
8588
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette