VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 66747

最後變更 在這個檔案從66747是 66747,由 vboxsync 提交於 8 年 前

IEM: Implemented movlps Mq,Vq (0x0f 0x13).

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 311.1 KB
 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 66747 2017-05-02 14:00:02Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.alldomusa.eu.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZxReg, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opdone
1646 * @opcode 0x13
1647 * @opcodesub !11 mr/reg
1648 * @oppfx none
1649 * @opcpuid sse
1650 * @opgroup og_sse_simdfp_datamove
1651 * @opxcpttype 5
1652 * @optest op1=1 op2=2 -> op1=2
1653 * @optest op1=0 op2=-42 -> op1=-42
1654 */
1655FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1656{
1657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1658 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1659 {
1660 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, MqWO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1661
1662 IEM_MC_BEGIN(0, 2);
1663 IEM_MC_LOCAL(uint64_t, uSrc);
1664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1665
1666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1668 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1669 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1670
1671 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1672 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1673
1674 IEM_MC_ADVANCE_RIP();
1675 IEM_MC_END();
1676 return VINF_SUCCESS;
1677 }
1678
1679 /**
1680 * @opdone
1681 * @opmnemonic ud0f13m3
1682 * @opcode 0x13
1683 * @opcodesub 11 mr/reg
1684 * @oppfx none
1685 * @opunused immediate
1686 * @opcpuid sse
1687 * @optest ->
1688 */
1689 return IEMOP_RAISE_INVALID_OPCODE();
1690}
1691
1692/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1693FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1694{
1695 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1697 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1698 {
1699#if 0
1700 /*
1701 * Register, register.
1702 */
1703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1704 IEM_MC_BEGIN(0, 1);
1705 IEM_MC_LOCAL(uint64_t, uSrc);
1706 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1707 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1708 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1709 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1710 IEM_MC_ADVANCE_RIP();
1711 IEM_MC_END();
1712#else
1713 return IEMOP_RAISE_INVALID_OPCODE();
1714#endif
1715 }
1716 else
1717 {
1718 /*
1719 * Memory, register.
1720 */
1721 IEM_MC_BEGIN(0, 2);
1722 IEM_MC_LOCAL(uint64_t, uSrc);
1723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1724
1725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1727 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1728 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1729
1730 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1731 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1732
1733 IEM_MC_ADVANCE_RIP();
1734 IEM_MC_END();
1735 }
1736 return VINF_SUCCESS;
1737}
1738
1739/* Opcode 0xf3 0x0f 0x13 - invalid */
1740/* Opcode 0xf2 0x0f 0x13 - invalid */
1741
1742/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1743FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1744/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1745FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1746/* Opcode 0xf3 0x0f 0x14 - invalid */
1747/* Opcode 0xf2 0x0f 0x14 - invalid */
1748/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1749FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1750/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1751FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1752/* Opcode 0xf3 0x0f 0x15 - invalid */
1753/* Opcode 0xf2 0x0f 0x15 - invalid */
1754/** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */
1755FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT
1756/** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */
1757FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT
1758/** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */
1759FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT
1760/* Opcode 0xf2 0x0f 0x16 - invalid */
1761/** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */
1762FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT
1763/** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */
1764FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT
1765/* Opcode 0xf3 0x0f 0x17 - invalid */
1766/* Opcode 0xf2 0x0f 0x17 - invalid */
1767
1768
1769/** Opcode 0x0f 0x18. */
1770FNIEMOP_DEF(iemOp_prefetch_Grp16)
1771{
1772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1773 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1774 {
1775 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1776 {
1777 case 4: /* Aliased to /0 for the time being according to AMD. */
1778 case 5: /* Aliased to /0 for the time being according to AMD. */
1779 case 6: /* Aliased to /0 for the time being according to AMD. */
1780 case 7: /* Aliased to /0 for the time being according to AMD. */
1781 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1782 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1783 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1784 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1786 }
1787
1788 IEM_MC_BEGIN(0, 1);
1789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1792 /* Currently a NOP. */
1793 NOREF(GCPtrEffSrc);
1794 IEM_MC_ADVANCE_RIP();
1795 IEM_MC_END();
1796 return VINF_SUCCESS;
1797 }
1798
1799 return IEMOP_RAISE_INVALID_OPCODE();
1800}
1801
1802
1803/** Opcode 0x0f 0x19..0x1f. */
1804FNIEMOP_DEF(iemOp_nop_Ev)
1805{
1806 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1808 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1809 {
1810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1811 IEM_MC_BEGIN(0, 0);
1812 IEM_MC_ADVANCE_RIP();
1813 IEM_MC_END();
1814 }
1815 else
1816 {
1817 IEM_MC_BEGIN(0, 1);
1818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1821 /* Currently a NOP. */
1822 NOREF(GCPtrEffSrc);
1823 IEM_MC_ADVANCE_RIP();
1824 IEM_MC_END();
1825 }
1826 return VINF_SUCCESS;
1827}
1828
1829
1830/** Opcode 0x0f 0x20. */
1831FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1832{
1833 /* mod is ignored, as is operand size overrides. */
1834 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1835 IEMOP_HLP_MIN_386();
1836 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1837 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1838 else
1839 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1840
1841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1842 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1843 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1844 {
1845 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1846 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1847 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1848 iCrReg |= 8;
1849 }
1850 switch (iCrReg)
1851 {
1852 case 0: case 2: case 3: case 4: case 8:
1853 break;
1854 default:
1855 return IEMOP_RAISE_INVALID_OPCODE();
1856 }
1857 IEMOP_HLP_DONE_DECODING();
1858
1859 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1860}
1861
1862
1863/** Opcode 0x0f 0x21. */
1864FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1865{
1866 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1867 IEMOP_HLP_MIN_386();
1868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1870 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1871 return IEMOP_RAISE_INVALID_OPCODE();
1872 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1873 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1874 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1875}
1876
1877
1878/** Opcode 0x0f 0x22. */
1879FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1880{
1881 /* mod is ignored, as is operand size overrides. */
1882 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1883 IEMOP_HLP_MIN_386();
1884 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1885 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1886 else
1887 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1888
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1891 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1892 {
1893 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1894 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1895 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1896 iCrReg |= 8;
1897 }
1898 switch (iCrReg)
1899 {
1900 case 0: case 2: case 3: case 4: case 8:
1901 break;
1902 default:
1903 return IEMOP_RAISE_INVALID_OPCODE();
1904 }
1905 IEMOP_HLP_DONE_DECODING();
1906
1907 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1908}
1909
1910
1911/** Opcode 0x0f 0x23. */
1912FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1913{
1914 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1915 IEMOP_HLP_MIN_386();
1916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1918 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1919 return IEMOP_RAISE_INVALID_OPCODE();
1920 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1921 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1922 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1923}
1924
1925
1926/** Opcode 0x0f 0x24. */
1927FNIEMOP_DEF(iemOp_mov_Rd_Td)
1928{
1929 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1930 /** @todo works on 386 and 486. */
1931 /* The RM byte is not considered, see testcase. */
1932 return IEMOP_RAISE_INVALID_OPCODE();
1933}
1934
1935
1936/** Opcode 0x0f 0x26. */
1937FNIEMOP_DEF(iemOp_mov_Td_Rd)
1938{
1939 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1940 /** @todo works on 386 and 486. */
1941 /* The RM byte is not considered, see testcase. */
1942 return IEMOP_RAISE_INVALID_OPCODE();
1943}
1944
1945
1946/** Opcode 0x0f 0x28 - movaps Vps, Wps */
1947FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
1948{
1949 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1952 {
1953 /*
1954 * Register, register.
1955 */
1956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1957 IEM_MC_BEGIN(0, 0);
1958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1959 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1960 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1961 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1962 IEM_MC_ADVANCE_RIP();
1963 IEM_MC_END();
1964 }
1965 else
1966 {
1967 /*
1968 * Register, memory.
1969 */
1970 IEM_MC_BEGIN(0, 2);
1971 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1973
1974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1976 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1978
1979 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1980 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1981
1982 IEM_MC_ADVANCE_RIP();
1983 IEM_MC_END();
1984 }
1985 return VINF_SUCCESS;
1986}
1987
1988/** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */
1989FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
1990{
1991 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1994 {
1995 /*
1996 * Register, register.
1997 */
1998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1999 IEM_MC_BEGIN(0, 0);
2000 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2001 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2002 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2003 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2004 IEM_MC_ADVANCE_RIP();
2005 IEM_MC_END();
2006 }
2007 else
2008 {
2009 /*
2010 * Register, memory.
2011 */
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2020
2021 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2022 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 }
2027 return VINF_SUCCESS;
2028}
2029
2030/* Opcode 0xf3 0x0f 0x28 - invalid */
2031/* Opcode 0xf2 0x0f 0x28 - invalid */
2032
2033/** Opcode 0x0f 0x29 - movaps Wps, Vps */
2034FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2035{
2036 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2039 {
2040 /*
2041 * Register, register.
2042 */
2043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2044 IEM_MC_BEGIN(0, 0);
2045 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2046 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2047 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2048 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2049 IEM_MC_ADVANCE_RIP();
2050 IEM_MC_END();
2051 }
2052 else
2053 {
2054 /*
2055 * Memory, register.
2056 */
2057 IEM_MC_BEGIN(0, 2);
2058 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2060
2061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2065
2066 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2067 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2068
2069 IEM_MC_ADVANCE_RIP();
2070 IEM_MC_END();
2071 }
2072 return VINF_SUCCESS;
2073}
2074
2075/** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */
2076FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2077{
2078 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2081 {
2082 /*
2083 * Register, register.
2084 */
2085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2086 IEM_MC_BEGIN(0, 0);
2087 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2088 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2089 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2090 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2091 IEM_MC_ADVANCE_RIP();
2092 IEM_MC_END();
2093 }
2094 else
2095 {
2096 /*
2097 * Memory, register.
2098 */
2099 IEM_MC_BEGIN(0, 2);
2100 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2102
2103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2105 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2106 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2107
2108 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2109 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2110
2111 IEM_MC_ADVANCE_RIP();
2112 IEM_MC_END();
2113 }
2114 return VINF_SUCCESS;
2115}
2116
2117/* Opcode 0xf3 0x0f 0x29 - invalid */
2118/* Opcode 0xf2 0x0f 0x29 - invalid */
2119
2120
2121/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2122FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2123/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2124FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2125/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2126FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2127/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2128FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2129
2130
2131/** Opcode 0x0f 0x2b - vmovntps Mps, Vps */
2132FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2133{
2134 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2136 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2137 {
2138 /*
2139 * memory, register.
2140 */
2141 IEM_MC_BEGIN(0, 2);
2142 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2144
2145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2147 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2148 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2149
2150 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2151 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2152
2153 IEM_MC_ADVANCE_RIP();
2154 IEM_MC_END();
2155 }
2156 /* The register, register encoding is invalid. */
2157 else
2158 return IEMOP_RAISE_INVALID_OPCODE();
2159 return VINF_SUCCESS;
2160}
2161
2162/** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */
2163FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2164{
2165 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2167 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2168 {
2169 /*
2170 * memory, register.
2171 */
2172 IEM_MC_BEGIN(0, 2);
2173 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2175
2176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2178 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2179 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2180
2181 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2182 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2183
2184 IEM_MC_ADVANCE_RIP();
2185 IEM_MC_END();
2186 }
2187 /* The register, register encoding is invalid. */
2188 else
2189 return IEMOP_RAISE_INVALID_OPCODE();
2190 return VINF_SUCCESS;
2191}
2192/* Opcode 0xf3 0x0f 0x2b - invalid */
2193/* Opcode 0xf2 0x0f 0x2b - invalid */
2194
2195
2196/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2197FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2198/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2199FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2200/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2201FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2202/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2203FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2204
2205/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2206FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2207/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2208FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2209/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2210FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2211/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2212FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2213
2214/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2215FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2216/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2217FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2218/* Opcode 0xf3 0x0f 0x2e - invalid */
2219/* Opcode 0xf2 0x0f 0x2e - invalid */
2220
2221/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2222FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2223/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2224FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2225/* Opcode 0xf3 0x0f 0x2f - invalid */
2226/* Opcode 0xf2 0x0f 0x2f - invalid */
2227
2228/** Opcode 0x0f 0x30. */
2229FNIEMOP_DEF(iemOp_wrmsr)
2230{
2231 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2233 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2234}
2235
2236
2237/** Opcode 0x0f 0x31. */
2238FNIEMOP_DEF(iemOp_rdtsc)
2239{
2240 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2242 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2243}
2244
2245
2246/** Opcode 0x0f 0x33. */
2247FNIEMOP_DEF(iemOp_rdmsr)
2248{
2249 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2251 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2252}
2253
2254
2255/** Opcode 0x0f 0x34. */
2256FNIEMOP_DEF(iemOp_rdpmc)
2257{
2258 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2260 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2261}
2262
2263
2264/** Opcode 0x0f 0x34. */
2265FNIEMOP_STUB(iemOp_sysenter);
2266/** Opcode 0x0f 0x35. */
2267FNIEMOP_STUB(iemOp_sysexit);
2268/** Opcode 0x0f 0x37. */
2269FNIEMOP_STUB(iemOp_getsec);
2270
2271
2272/** Opcode 0x0f 0x38. */
2273FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2274{
2275#ifdef IEM_WITH_THREE_0F_38
2276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2277 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2278#else
2279 IEMOP_BITCH_ABOUT_STUB();
2280 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2281#endif
2282}
2283
2284
2285/** Opcode 0x0f 0x3a. */
2286FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2287{
2288#ifdef IEM_WITH_THREE_0F_3A
2289 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2290 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2291#else
2292 IEMOP_BITCH_ABOUT_STUB();
2293 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2294#endif
2295}
2296
2297
2298/**
2299 * Implements a conditional move.
2300 *
2301 * Wish there was an obvious way to do this where we could share and reduce
2302 * code bloat.
2303 *
2304 * @param a_Cnd The conditional "microcode" operation.
2305 */
2306#define CMOV_X(a_Cnd) \
2307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2309 { \
2310 switch (pVCpu->iem.s.enmEffOpSize) \
2311 { \
2312 case IEMMODE_16BIT: \
2313 IEM_MC_BEGIN(0, 1); \
2314 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2315 a_Cnd { \
2316 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2317 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2318 } IEM_MC_ENDIF(); \
2319 IEM_MC_ADVANCE_RIP(); \
2320 IEM_MC_END(); \
2321 return VINF_SUCCESS; \
2322 \
2323 case IEMMODE_32BIT: \
2324 IEM_MC_BEGIN(0, 1); \
2325 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2326 a_Cnd { \
2327 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2328 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2329 } IEM_MC_ELSE() { \
2330 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2331 } IEM_MC_ENDIF(); \
2332 IEM_MC_ADVANCE_RIP(); \
2333 IEM_MC_END(); \
2334 return VINF_SUCCESS; \
2335 \
2336 case IEMMODE_64BIT: \
2337 IEM_MC_BEGIN(0, 1); \
2338 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2339 a_Cnd { \
2340 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2341 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2342 } IEM_MC_ENDIF(); \
2343 IEM_MC_ADVANCE_RIP(); \
2344 IEM_MC_END(); \
2345 return VINF_SUCCESS; \
2346 \
2347 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2348 } \
2349 } \
2350 else \
2351 { \
2352 switch (pVCpu->iem.s.enmEffOpSize) \
2353 { \
2354 case IEMMODE_16BIT: \
2355 IEM_MC_BEGIN(0, 2); \
2356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2357 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2359 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2360 a_Cnd { \
2361 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2362 } IEM_MC_ENDIF(); \
2363 IEM_MC_ADVANCE_RIP(); \
2364 IEM_MC_END(); \
2365 return VINF_SUCCESS; \
2366 \
2367 case IEMMODE_32BIT: \
2368 IEM_MC_BEGIN(0, 2); \
2369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2370 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2372 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2373 a_Cnd { \
2374 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2375 } IEM_MC_ELSE() { \
2376 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2377 } IEM_MC_ENDIF(); \
2378 IEM_MC_ADVANCE_RIP(); \
2379 IEM_MC_END(); \
2380 return VINF_SUCCESS; \
2381 \
2382 case IEMMODE_64BIT: \
2383 IEM_MC_BEGIN(0, 2); \
2384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2385 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2387 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2388 a_Cnd { \
2389 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2390 } IEM_MC_ENDIF(); \
2391 IEM_MC_ADVANCE_RIP(); \
2392 IEM_MC_END(); \
2393 return VINF_SUCCESS; \
2394 \
2395 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2396 } \
2397 } do {} while (0)
2398
2399
2400
2401/** Opcode 0x0f 0x40. */
2402FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2403{
2404 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2405 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2406}
2407
2408
2409/** Opcode 0x0f 0x41. */
2410FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2411{
2412 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2413 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2414}
2415
2416
2417/** Opcode 0x0f 0x42. */
2418FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2419{
2420 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2421 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2422}
2423
2424
2425/** Opcode 0x0f 0x43. */
2426FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2427{
2428 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2429 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2430}
2431
2432
2433/** Opcode 0x0f 0x44. */
2434FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2435{
2436 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2437 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2438}
2439
2440
2441/** Opcode 0x0f 0x45. */
2442FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2443{
2444 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2445 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2446}
2447
2448
2449/** Opcode 0x0f 0x46. */
2450FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2451{
2452 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2453 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2454}
2455
2456
2457/** Opcode 0x0f 0x47. */
2458FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2459{
2460 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2461 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2462}
2463
2464
2465/** Opcode 0x0f 0x48. */
2466FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2467{
2468 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2469 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2470}
2471
2472
2473/** Opcode 0x0f 0x49. */
2474FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2475{
2476 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2477 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2478}
2479
2480
2481/** Opcode 0x0f 0x4a. */
2482FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2483{
2484 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2485 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2486}
2487
2488
2489/** Opcode 0x0f 0x4b. */
2490FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2491{
2492 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2493 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2494}
2495
2496
2497/** Opcode 0x0f 0x4c. */
2498FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2499{
2500 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2501 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2502}
2503
2504
2505/** Opcode 0x0f 0x4d. */
2506FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2507{
2508 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2509 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2510}
2511
2512
2513/** Opcode 0x0f 0x4e. */
2514FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2515{
2516 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2517 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2518}
2519
2520
2521/** Opcode 0x0f 0x4f. */
2522FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2523{
2524 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2525 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2526}
2527
2528#undef CMOV_X
2529
2530/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2531FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2532/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2533FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2534/* Opcode 0xf3 0x0f 0x50 - invalid */
2535/* Opcode 0xf2 0x0f 0x50 - invalid */
2536
2537/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2538FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2539/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2540FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2541/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2542FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2543/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2544FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2545
2546/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2547FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2548/* Opcode 0x66 0x0f 0x52 - invalid */
2549/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2550FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2551/* Opcode 0xf2 0x0f 0x52 - invalid */
2552
2553/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2554FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2555/* Opcode 0x66 0x0f 0x53 - invalid */
2556/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2557FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2558/* Opcode 0xf2 0x0f 0x53 - invalid */
2559
2560/** Opcode 0x0f 0x54 - andps Vps, Wps */
2561FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2562/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2563FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2564/* Opcode 0xf3 0x0f 0x54 - invalid */
2565/* Opcode 0xf2 0x0f 0x54 - invalid */
2566
2567/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2568FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2569/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2570FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2571/* Opcode 0xf3 0x0f 0x55 - invalid */
2572/* Opcode 0xf2 0x0f 0x55 - invalid */
2573
2574/** Opcode 0x0f 0x56 - orps Vps, Wps */
2575FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2576/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2577FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2578/* Opcode 0xf3 0x0f 0x56 - invalid */
2579/* Opcode 0xf2 0x0f 0x56 - invalid */
2580
2581/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2582FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2583/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2584FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2585/* Opcode 0xf3 0x0f 0x57 - invalid */
2586/* Opcode 0xf2 0x0f 0x57 - invalid */
2587
2588/** Opcode 0x0f 0x58 - addps Vps, Wps */
2589FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2590/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2591FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2592/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2593FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2594/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2595FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2596
2597/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2598FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2599/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2600FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2601/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2602FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2603/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2604FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2605
2606/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
2607FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
2608/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
2609FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
2610/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
2611FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
2612/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
2613FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
2614
2615/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
2616FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
2617/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
2618FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
2619/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
2620FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
2621/* Opcode 0xf2 0x0f 0x5b - invalid */
2622
2623/** Opcode 0x0f 0x5c - subps Vps, Wps */
2624FNIEMOP_STUB(iemOp_subps_Vps_Wps);
2625/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
2626FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
2627/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
2628FNIEMOP_STUB(iemOp_subss_Vss_Wss);
2629/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
2630FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
2631
2632/** Opcode 0x0f 0x5d - minps Vps, Wps */
2633FNIEMOP_STUB(iemOp_minps_Vps_Wps);
2634/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
2635FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
2636/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
2637FNIEMOP_STUB(iemOp_minss_Vss_Wss);
2638/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
2639FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
2640
2641/** Opcode 0x0f 0x5e - divps Vps, Wps */
2642FNIEMOP_STUB(iemOp_divps_Vps_Wps);
2643/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
2644FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
2645/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
2646FNIEMOP_STUB(iemOp_divss_Vss_Wss);
2647/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
2648FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
2649
2650/** Opcode 0x0f 0x5f - maxps Vps, Wps */
2651FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
2652/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
2653FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
2654/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
2655FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
2656/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
2657FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
2658
2659/**
2660 * Common worker for MMX instructions on the forms:
2661 * pxxxx mm1, mm2/mem32
2662 *
2663 * The 2nd operand is the first half of a register, which in the memory case
2664 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2665 * memory accessed for MMX.
2666 *
2667 * Exceptions type 4.
2668 */
2669FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2670{
2671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2673 {
2674 /*
2675 * Register, register.
2676 */
2677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2678 IEM_MC_BEGIN(2, 0);
2679 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2680 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2681 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2682 IEM_MC_PREPARE_SSE_USAGE();
2683 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2684 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2685 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2686 IEM_MC_ADVANCE_RIP();
2687 IEM_MC_END();
2688 }
2689 else
2690 {
2691 /*
2692 * Register, memory.
2693 */
2694 IEM_MC_BEGIN(2, 2);
2695 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2696 IEM_MC_LOCAL(uint64_t, uSrc);
2697 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2699
2700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2702 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2703 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2704
2705 IEM_MC_PREPARE_SSE_USAGE();
2706 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2707 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2708
2709 IEM_MC_ADVANCE_RIP();
2710 IEM_MC_END();
2711 }
2712 return VINF_SUCCESS;
2713}
2714
2715
2716/**
2717 * Common worker for SSE2 instructions on the forms:
2718 * pxxxx xmm1, xmm2/mem128
2719 *
2720 * The 2nd operand is the first half of a register, which in the memory case
2721 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2722 * memory accessed for MMX.
2723 *
2724 * Exceptions type 4.
2725 */
2726FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2727{
2728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2729 if (!pImpl->pfnU64)
2730 return IEMOP_RAISE_INVALID_OPCODE();
2731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2732 {
2733 /*
2734 * Register, register.
2735 */
2736 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2737 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2739 IEM_MC_BEGIN(2, 0);
2740 IEM_MC_ARG(uint64_t *, pDst, 0);
2741 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2742 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2743 IEM_MC_PREPARE_FPU_USAGE();
2744 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2745 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2746 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2747 IEM_MC_ADVANCE_RIP();
2748 IEM_MC_END();
2749 }
2750 else
2751 {
2752 /*
2753 * Register, memory.
2754 */
2755 IEM_MC_BEGIN(2, 2);
2756 IEM_MC_ARG(uint64_t *, pDst, 0);
2757 IEM_MC_LOCAL(uint32_t, uSrc);
2758 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2760
2761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2764 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2765
2766 IEM_MC_PREPARE_FPU_USAGE();
2767 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2768 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2769
2770 IEM_MC_ADVANCE_RIP();
2771 IEM_MC_END();
2772 }
2773 return VINF_SUCCESS;
2774}
2775
2776
2777/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2778FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2779{
2780 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2781 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2782}
2783
2784/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
2785FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
2786{
2787 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
2788 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2789}
2790
2791/* Opcode 0xf3 0x0f 0x60 - invalid */
2792
2793
2794/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2795FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2796{
2797 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2798 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2799}
2800
2801/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
2802FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
2803{
2804 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
2805 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2806}
2807
2808/* Opcode 0xf3 0x0f 0x61 - invalid */
2809
2810
2811/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2812FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2813{
2814 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2815 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2816}
2817
2818/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
2819FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
2820{
2821 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
2822 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2823}
2824
2825/* Opcode 0xf3 0x0f 0x62 - invalid */
2826
2827
2828
2829/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2830FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2831/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
2832FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
2833/* Opcode 0xf3 0x0f 0x63 - invalid */
2834
2835/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2836FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2837/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
2838FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
2839/* Opcode 0xf3 0x0f 0x64 - invalid */
2840
2841/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2842FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2843/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
2844FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
2845/* Opcode 0xf3 0x0f 0x65 - invalid */
2846
2847/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2848FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2849/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
2850FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
2851/* Opcode 0xf3 0x0f 0x66 - invalid */
2852
2853/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2854FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2855/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
2856FNIEMOP_STUB(iemOp_packuswb_Vx_W);
2857/* Opcode 0xf3 0x0f 0x67 - invalid */
2858
2859
2860/**
2861 * Common worker for MMX instructions on the form:
2862 * pxxxx mm1, mm2/mem64
2863 *
2864 * The 2nd operand is the second half of a register, which in the memory case
2865 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2866 * where it may read the full 128 bits or only the upper 64 bits.
2867 *
2868 * Exceptions type 4.
2869 */
2870FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2871{
2872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2873 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2875 {
2876 /*
2877 * Register, register.
2878 */
2879 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2880 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2882 IEM_MC_BEGIN(2, 0);
2883 IEM_MC_ARG(uint64_t *, pDst, 0);
2884 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2885 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2886 IEM_MC_PREPARE_FPU_USAGE();
2887 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2888 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2889 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2890 IEM_MC_ADVANCE_RIP();
2891 IEM_MC_END();
2892 }
2893 else
2894 {
2895 /*
2896 * Register, memory.
2897 */
2898 IEM_MC_BEGIN(2, 2);
2899 IEM_MC_ARG(uint64_t *, pDst, 0);
2900 IEM_MC_LOCAL(uint64_t, uSrc);
2901 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2903
2904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2906 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2907 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2908
2909 IEM_MC_PREPARE_FPU_USAGE();
2910 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2911 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2912
2913 IEM_MC_ADVANCE_RIP();
2914 IEM_MC_END();
2915 }
2916 return VINF_SUCCESS;
2917}
2918
2919
2920/**
2921 * Common worker for SSE2 instructions on the form:
2922 * pxxxx xmm1, xmm2/mem128
2923 *
2924 * The 2nd operand is the second half of a register, which in the memory case
2925 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2926 * where it may read the full 128 bits or only the upper 64 bits.
2927 *
2928 * Exceptions type 4.
2929 */
2930FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2931{
2932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2934 {
2935 /*
2936 * Register, register.
2937 */
2938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2939 IEM_MC_BEGIN(2, 0);
2940 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2941 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2942 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2943 IEM_MC_PREPARE_SSE_USAGE();
2944 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2945 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2946 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2947 IEM_MC_ADVANCE_RIP();
2948 IEM_MC_END();
2949 }
2950 else
2951 {
2952 /*
2953 * Register, memory.
2954 */
2955 IEM_MC_BEGIN(2, 2);
2956 IEM_MC_ARG(PRTUINT128U, pDst, 0);
2957 IEM_MC_LOCAL(RTUINT128U, uSrc);
2958 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2960
2961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2964 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2965
2966 IEM_MC_PREPARE_SSE_USAGE();
2967 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2968 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2969
2970 IEM_MC_ADVANCE_RIP();
2971 IEM_MC_END();
2972 }
2973 return VINF_SUCCESS;
2974}
2975
2976
2977/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2978FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2979{
2980 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2981 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2982}
2983
2984/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
2985FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
2986{
2987 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
2988 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2989}
2990/* Opcode 0xf3 0x0f 0x68 - invalid */
2991
2992
2993/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2994FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2995{
2996 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2997 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2998}
2999
3000/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3001FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3002{
3003 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3004 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3005
3006}
3007/* Opcode 0xf3 0x0f 0x69 - invalid */
3008
3009
3010/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3011FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3012{
3013 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3014 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3015}
3016
3017/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3018FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3019{
3020 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3021 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3022}
3023/* Opcode 0xf3 0x0f 0x6a - invalid */
3024
3025
3026/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3027FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3028/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3029FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3030/* Opcode 0xf3 0x0f 0x6b - invalid */
3031
3032
3033/* Opcode 0x0f 0x6c - invalid */
3034
3035/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3036FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3037{
3038 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3039 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3040}
3041
3042/* Opcode 0xf3 0x0f 0x6c - invalid */
3043/* Opcode 0xf2 0x0f 0x6c - invalid */
3044
3045
3046/* Opcode 0x0f 0x6d - invalid */
3047
3048/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3049FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3050{
3051 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3052 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3053}
3054
3055/* Opcode 0xf3 0x0f 0x6d - invalid */
3056
3057
3058/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3059FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3060{
3061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3062 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3063 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3064 else
3065 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 /* MMX, greg */
3069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3070 IEM_MC_BEGIN(0, 1);
3071 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3072 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3073 IEM_MC_LOCAL(uint64_t, u64Tmp);
3074 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3075 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3076 else
3077 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3078 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3079 IEM_MC_ADVANCE_RIP();
3080 IEM_MC_END();
3081 }
3082 else
3083 {
3084 /* MMX, [mem] */
3085 IEM_MC_BEGIN(0, 2);
3086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3087 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3090 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3092 {
3093 IEM_MC_LOCAL(uint64_t, u64Tmp);
3094 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3095 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3096 }
3097 else
3098 {
3099 IEM_MC_LOCAL(uint32_t, u32Tmp);
3100 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3101 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3102 }
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 return VINF_SUCCESS;
3107}
3108
3109/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3110FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3111{
3112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3113 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3114 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3115 else
3116 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3118 {
3119 /* XMM, greg*/
3120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3121 IEM_MC_BEGIN(0, 1);
3122 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3123 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3124 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3125 {
3126 IEM_MC_LOCAL(uint64_t, u64Tmp);
3127 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3128 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3129 }
3130 else
3131 {
3132 IEM_MC_LOCAL(uint32_t, u32Tmp);
3133 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3134 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3135 }
3136 IEM_MC_ADVANCE_RIP();
3137 IEM_MC_END();
3138 }
3139 else
3140 {
3141 /* XMM, [mem] */
3142 IEM_MC_BEGIN(0, 2);
3143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3144 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3147 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3148 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3149 {
3150 IEM_MC_LOCAL(uint64_t, u64Tmp);
3151 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3152 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3153 }
3154 else
3155 {
3156 IEM_MC_LOCAL(uint32_t, u32Tmp);
3157 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3158 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3159 }
3160 IEM_MC_ADVANCE_RIP();
3161 IEM_MC_END();
3162 }
3163 return VINF_SUCCESS;
3164}
3165
3166/* Opcode 0xf3 0x0f 0x6e - invalid */
3167
3168
3169/** Opcode 0x0f 0x6f - movq Pq, Qq */
3170FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3171{
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3174 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3175 {
3176 /*
3177 * Register, register.
3178 */
3179 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3180 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3182 IEM_MC_BEGIN(0, 1);
3183 IEM_MC_LOCAL(uint64_t, u64Tmp);
3184 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3185 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3186 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3187 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3188 IEM_MC_ADVANCE_RIP();
3189 IEM_MC_END();
3190 }
3191 else
3192 {
3193 /*
3194 * Register, memory.
3195 */
3196 IEM_MC_BEGIN(0, 2);
3197 IEM_MC_LOCAL(uint64_t, u64Tmp);
3198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3199
3200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3202 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3203 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3204 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3205 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3206
3207 IEM_MC_ADVANCE_RIP();
3208 IEM_MC_END();
3209 }
3210 return VINF_SUCCESS;
3211}
3212
3213/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3214FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3215{
3216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3217 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3218 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3219 {
3220 /*
3221 * Register, register.
3222 */
3223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3224 IEM_MC_BEGIN(0, 0);
3225 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3226 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3227 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3228 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3229 IEM_MC_ADVANCE_RIP();
3230 IEM_MC_END();
3231 }
3232 else
3233 {
3234 /*
3235 * Register, memory.
3236 */
3237 IEM_MC_BEGIN(0, 2);
3238 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3240
3241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3243 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3244 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3245 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3246 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3247
3248 IEM_MC_ADVANCE_RIP();
3249 IEM_MC_END();
3250 }
3251 return VINF_SUCCESS;
3252}
3253
3254/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3255FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3256{
3257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3258 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3259 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3260 {
3261 /*
3262 * Register, register.
3263 */
3264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3265 IEM_MC_BEGIN(0, 0);
3266 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3267 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3268 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3269 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3270 IEM_MC_ADVANCE_RIP();
3271 IEM_MC_END();
3272 }
3273 else
3274 {
3275 /*
3276 * Register, memory.
3277 */
3278 IEM_MC_BEGIN(0, 2);
3279 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3281
3282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3284 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3285 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3286 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3287 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3288
3289 IEM_MC_ADVANCE_RIP();
3290 IEM_MC_END();
3291 }
3292 return VINF_SUCCESS;
3293}
3294
3295
3296/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3297FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3298{
3299 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3302 {
3303 /*
3304 * Register, register.
3305 */
3306 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3308
3309 IEM_MC_BEGIN(3, 0);
3310 IEM_MC_ARG(uint64_t *, pDst, 0);
3311 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3312 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3313 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3314 IEM_MC_PREPARE_FPU_USAGE();
3315 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3316 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3317 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3318 IEM_MC_ADVANCE_RIP();
3319 IEM_MC_END();
3320 }
3321 else
3322 {
3323 /*
3324 * Register, memory.
3325 */
3326 IEM_MC_BEGIN(3, 2);
3327 IEM_MC_ARG(uint64_t *, pDst, 0);
3328 IEM_MC_LOCAL(uint64_t, uSrc);
3329 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3331
3332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3333 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3334 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3336 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3337
3338 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3339 IEM_MC_PREPARE_FPU_USAGE();
3340 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3341 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3342
3343 IEM_MC_ADVANCE_RIP();
3344 IEM_MC_END();
3345 }
3346 return VINF_SUCCESS;
3347}
3348
3349/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3350FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3351{
3352 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3354 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3355 {
3356 /*
3357 * Register, register.
3358 */
3359 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3361
3362 IEM_MC_BEGIN(3, 0);
3363 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3364 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3365 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3366 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3367 IEM_MC_PREPARE_SSE_USAGE();
3368 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3369 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3370 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3371 IEM_MC_ADVANCE_RIP();
3372 IEM_MC_END();
3373 }
3374 else
3375 {
3376 /*
3377 * Register, memory.
3378 */
3379 IEM_MC_BEGIN(3, 2);
3380 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3381 IEM_MC_LOCAL(RTUINT128U, uSrc);
3382 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3384
3385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3386 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3387 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3389 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3390
3391 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3392 IEM_MC_PREPARE_SSE_USAGE();
3393 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3394 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3395
3396 IEM_MC_ADVANCE_RIP();
3397 IEM_MC_END();
3398 }
3399 return VINF_SUCCESS;
3400}
3401
3402/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3403FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3404{
3405 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3407 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3408 {
3409 /*
3410 * Register, register.
3411 */
3412 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3414
3415 IEM_MC_BEGIN(3, 0);
3416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3418 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3419 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3420 IEM_MC_PREPARE_SSE_USAGE();
3421 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3422 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3423 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3424 IEM_MC_ADVANCE_RIP();
3425 IEM_MC_END();
3426 }
3427 else
3428 {
3429 /*
3430 * Register, memory.
3431 */
3432 IEM_MC_BEGIN(3, 2);
3433 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3434 IEM_MC_LOCAL(RTUINT128U, uSrc);
3435 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3437
3438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3439 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3440 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3443
3444 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3445 IEM_MC_PREPARE_SSE_USAGE();
3446 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3447 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3448
3449 IEM_MC_ADVANCE_RIP();
3450 IEM_MC_END();
3451 }
3452 return VINF_SUCCESS;
3453}
3454
3455/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3456FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3457{
3458 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3461 {
3462 /*
3463 * Register, register.
3464 */
3465 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3467
3468 IEM_MC_BEGIN(3, 0);
3469 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3470 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3471 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3472 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3473 IEM_MC_PREPARE_SSE_USAGE();
3474 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3475 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3476 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3477 IEM_MC_ADVANCE_RIP();
3478 IEM_MC_END();
3479 }
3480 else
3481 {
3482 /*
3483 * Register, memory.
3484 */
3485 IEM_MC_BEGIN(3, 2);
3486 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3487 IEM_MC_LOCAL(RTUINT128U, uSrc);
3488 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3490
3491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3492 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3493 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3495 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3496
3497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3498 IEM_MC_PREPARE_SSE_USAGE();
3499 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3500 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3501
3502 IEM_MC_ADVANCE_RIP();
3503 IEM_MC_END();
3504 }
3505 return VINF_SUCCESS;
3506}
3507
3508
3509/** Opcode 0x0f 0x71 11/2. */
3510FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3511
3512/** Opcode 0x66 0x0f 0x71 11/2. */
3513FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
3514
3515/** Opcode 0x0f 0x71 11/4. */
3516FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3517
3518/** Opcode 0x66 0x0f 0x71 11/4. */
3519FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
3520
3521/** Opcode 0x0f 0x71 11/6. */
3522FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3523
3524/** Opcode 0x66 0x0f 0x71 11/6. */
3525FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
3526
3527
3528/**
3529 * Group 12 jump table for register variant.
3530 */
3531IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
3532{
3533 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3534 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3535 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3536 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3537 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3538 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3539 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3540 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3541};
3542AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
3543
3544
3545/** Opcode 0x0f 0x71. */
3546FNIEMOP_DEF(iemOp_Grp12)
3547{
3548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3550 /* register, register */
3551 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3552 + pVCpu->iem.s.idxPrefix], bRm);
3553 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3554}
3555
3556
3557/** Opcode 0x0f 0x72 11/2. */
3558FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3559
3560/** Opcode 0x66 0x0f 0x72 11/2. */
3561FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
3562
3563/** Opcode 0x0f 0x72 11/4. */
3564FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3565
3566/** Opcode 0x66 0x0f 0x72 11/4. */
3567FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
3568
3569/** Opcode 0x0f 0x72 11/6. */
3570FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3571
3572/** Opcode 0x66 0x0f 0x72 11/6. */
3573FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
3574
3575
3576/**
3577 * Group 13 jump table for register variant.
3578 */
3579IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
3580{
3581 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3582 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3583 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3584 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3585 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3586 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3587 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3588 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3589};
3590AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
3591
3592/** Opcode 0x0f 0x72. */
3593FNIEMOP_DEF(iemOp_Grp13)
3594{
3595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3597 /* register, register */
3598 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3599 + pVCpu->iem.s.idxPrefix], bRm);
3600 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3601}
3602
3603
3604/** Opcode 0x0f 0x73 11/2. */
3605FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3606
3607/** Opcode 0x66 0x0f 0x73 11/2. */
3608FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
3609
3610/** Opcode 0x66 0x0f 0x73 11/3. */
3611FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
3612
3613/** Opcode 0x0f 0x73 11/6. */
3614FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3615
3616/** Opcode 0x66 0x0f 0x73 11/6. */
3617FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
3618
3619/** Opcode 0x66 0x0f 0x73 11/7. */
3620FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
3621
3622/**
3623 * Group 14 jump table for register variant.
3624 */
3625IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
3626{
3627 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3628 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3629 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3630 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3631 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3632 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3633 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3634 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3635};
3636AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
3637
3638
3639/** Opcode 0x0f 0x73. */
3640FNIEMOP_DEF(iemOp_Grp14)
3641{
3642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3643 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3644 /* register, register */
3645 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3646 + pVCpu->iem.s.idxPrefix], bRm);
3647 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3648}
3649
3650
3651/**
3652 * Common worker for MMX instructions on the form:
3653 * pxxx mm1, mm2/mem64
3654 */
3655FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3656{
3657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3659 {
3660 /*
3661 * Register, register.
3662 */
3663 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3664 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3666 IEM_MC_BEGIN(2, 0);
3667 IEM_MC_ARG(uint64_t *, pDst, 0);
3668 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3669 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3670 IEM_MC_PREPARE_FPU_USAGE();
3671 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3672 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3673 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3674 IEM_MC_ADVANCE_RIP();
3675 IEM_MC_END();
3676 }
3677 else
3678 {
3679 /*
3680 * Register, memory.
3681 */
3682 IEM_MC_BEGIN(2, 2);
3683 IEM_MC_ARG(uint64_t *, pDst, 0);
3684 IEM_MC_LOCAL(uint64_t, uSrc);
3685 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3687
3688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3690 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3691 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3692
3693 IEM_MC_PREPARE_FPU_USAGE();
3694 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3695 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3696
3697 IEM_MC_ADVANCE_RIP();
3698 IEM_MC_END();
3699 }
3700 return VINF_SUCCESS;
3701}
3702
3703
3704/**
3705 * Common worker for SSE2 instructions on the forms:
3706 * pxxx xmm1, xmm2/mem128
3707 *
3708 * Proper alignment of the 128-bit operand is enforced.
3709 * Exceptions type 4. SSE2 cpuid checks.
3710 */
3711FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3712{
3713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3715 {
3716 /*
3717 * Register, register.
3718 */
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_BEGIN(2, 0);
3721 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3722 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3723 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3724 IEM_MC_PREPARE_SSE_USAGE();
3725 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3726 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3727 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3728 IEM_MC_ADVANCE_RIP();
3729 IEM_MC_END();
3730 }
3731 else
3732 {
3733 /*
3734 * Register, memory.
3735 */
3736 IEM_MC_BEGIN(2, 2);
3737 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3738 IEM_MC_LOCAL(RTUINT128U, uSrc);
3739 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3741
3742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3744 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3745 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3746
3747 IEM_MC_PREPARE_SSE_USAGE();
3748 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3749 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3750
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 }
3754 return VINF_SUCCESS;
3755}
3756
3757
3758/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3759FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3760{
3761 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3762 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3763}
3764
3765/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
3766FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
3767{
3768 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
3769 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3770}
3771
3772/* Opcode 0xf3 0x0f 0x74 - invalid */
3773/* Opcode 0xf2 0x0f 0x74 - invalid */
3774
3775
3776/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3777FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3778{
3779 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3780 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3781}
3782
3783/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
3784FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
3785{
3786 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
3787 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3788}
3789
3790/* Opcode 0xf3 0x0f 0x75 - invalid */
3791/* Opcode 0xf2 0x0f 0x75 - invalid */
3792
3793
3794/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3795FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3796{
3797 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3798 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3799}
3800
3801/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
3802FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
3803{
3804 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
3805 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3806}
3807
3808/* Opcode 0xf3 0x0f 0x76 - invalid */
3809/* Opcode 0xf2 0x0f 0x76 - invalid */
3810
3811
3812/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
3813FNIEMOP_STUB(iemOp_emms);
3814/* Opcode 0x66 0x0f 0x77 - invalid */
3815/* Opcode 0xf3 0x0f 0x77 - invalid */
3816/* Opcode 0xf2 0x0f 0x77 - invalid */
3817
3818/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3819FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3820/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3821FNIEMOP_STUB(iemOp_AmdGrp17);
3822/* Opcode 0xf3 0x0f 0x78 - invalid */
3823/* Opcode 0xf2 0x0f 0x78 - invalid */
3824
3825/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3826FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3827/* Opcode 0x66 0x0f 0x79 - invalid */
3828/* Opcode 0xf3 0x0f 0x79 - invalid */
3829/* Opcode 0xf2 0x0f 0x79 - invalid */
3830
3831/* Opcode 0x0f 0x7a - invalid */
3832/* Opcode 0x66 0x0f 0x7a - invalid */
3833/* Opcode 0xf3 0x0f 0x7a - invalid */
3834/* Opcode 0xf2 0x0f 0x7a - invalid */
3835
3836/* Opcode 0x0f 0x7b - invalid */
3837/* Opcode 0x66 0x0f 0x7b - invalid */
3838/* Opcode 0xf3 0x0f 0x7b - invalid */
3839/* Opcode 0xf2 0x0f 0x7b - invalid */
3840
3841/* Opcode 0x0f 0x7c - invalid */
3842/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
3843FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
3844/* Opcode 0xf3 0x0f 0x7c - invalid */
3845/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
3846FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
3847
3848/* Opcode 0x0f 0x7d - invalid */
3849/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
3850FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
3851/* Opcode 0xf3 0x0f 0x7d - invalid */
3852/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
3853FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
3854
3855
3856/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3857FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3858{
3859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3860 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3861 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3862 else
3863 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3865 {
3866 /* greg, MMX */
3867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3868 IEM_MC_BEGIN(0, 1);
3869 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3870 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3871 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3872 {
3873 IEM_MC_LOCAL(uint64_t, u64Tmp);
3874 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3875 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3876 }
3877 else
3878 {
3879 IEM_MC_LOCAL(uint32_t, u32Tmp);
3880 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3881 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3882 }
3883 IEM_MC_ADVANCE_RIP();
3884 IEM_MC_END();
3885 }
3886 else
3887 {
3888 /* [mem], MMX */
3889 IEM_MC_BEGIN(0, 2);
3890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3891 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3894 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3895 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3896 {
3897 IEM_MC_LOCAL(uint64_t, u64Tmp);
3898 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3899 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3900 }
3901 else
3902 {
3903 IEM_MC_LOCAL(uint32_t, u32Tmp);
3904 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3905 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3906 }
3907 IEM_MC_ADVANCE_RIP();
3908 IEM_MC_END();
3909 }
3910 return VINF_SUCCESS;
3911}
3912
3913/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
3914FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
3915{
3916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3917 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3918 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3919 else
3920 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3922 {
3923 /* greg, XMM */
3924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3925 IEM_MC_BEGIN(0, 1);
3926 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3927 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3928 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3929 {
3930 IEM_MC_LOCAL(uint64_t, u64Tmp);
3931 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3932 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3933 }
3934 else
3935 {
3936 IEM_MC_LOCAL(uint32_t, u32Tmp);
3937 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3938 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3939 }
3940 IEM_MC_ADVANCE_RIP();
3941 IEM_MC_END();
3942 }
3943 else
3944 {
3945 /* [mem], XMM */
3946 IEM_MC_BEGIN(0, 2);
3947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3951 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3952 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3953 {
3954 IEM_MC_LOCAL(uint64_t, u64Tmp);
3955 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3956 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3957 }
3958 else
3959 {
3960 IEM_MC_LOCAL(uint32_t, u32Tmp);
3961 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3962 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3963 }
3964 IEM_MC_ADVANCE_RIP();
3965 IEM_MC_END();
3966 }
3967 return VINF_SUCCESS;
3968}
3969
3970/** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */
3971FNIEMOP_STUB(iemOp_movq_Vq_Wq);
3972/* Opcode 0xf2 0x0f 0x7e - invalid */
3973
3974
3975/** Opcode 0x0f 0x7f - movq Qq, Pq */
3976FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3977{
3978 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3981 {
3982 /*
3983 * Register, register.
3984 */
3985 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3986 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3988 IEM_MC_BEGIN(0, 1);
3989 IEM_MC_LOCAL(uint64_t, u64Tmp);
3990 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3991 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3992 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3993 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3994 IEM_MC_ADVANCE_RIP();
3995 IEM_MC_END();
3996 }
3997 else
3998 {
3999 /*
4000 * Register, memory.
4001 */
4002 IEM_MC_BEGIN(0, 2);
4003 IEM_MC_LOCAL(uint64_t, u64Tmp);
4004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4005
4006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4008 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4009 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4010
4011 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4012 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4013
4014 IEM_MC_ADVANCE_RIP();
4015 IEM_MC_END();
4016 }
4017 return VINF_SUCCESS;
4018}
4019
4020/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4021FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4022{
4023 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4025 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4026 {
4027 /*
4028 * Register, register.
4029 */
4030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4031 IEM_MC_BEGIN(0, 0);
4032 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4033 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4034 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4035 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4036 IEM_MC_ADVANCE_RIP();
4037 IEM_MC_END();
4038 }
4039 else
4040 {
4041 /*
4042 * Register, memory.
4043 */
4044 IEM_MC_BEGIN(0, 2);
4045 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4047
4048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4051 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4052
4053 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4054 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4055
4056 IEM_MC_ADVANCE_RIP();
4057 IEM_MC_END();
4058 }
4059 return VINF_SUCCESS;
4060}
4061
4062/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4063FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4064{
4065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4066 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4068 {
4069 /*
4070 * Register, register.
4071 */
4072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4073 IEM_MC_BEGIN(0, 0);
4074 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4075 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4076 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4077 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4078 IEM_MC_ADVANCE_RIP();
4079 IEM_MC_END();
4080 }
4081 else
4082 {
4083 /*
4084 * Register, memory.
4085 */
4086 IEM_MC_BEGIN(0, 2);
4087 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4089
4090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4092 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4093 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4094
4095 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4096 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4097
4098 IEM_MC_ADVANCE_RIP();
4099 IEM_MC_END();
4100 }
4101 return VINF_SUCCESS;
4102}
4103
4104/* Opcode 0xf2 0x0f 0x7f - invalid */
4105
4106
4107
4108/** Opcode 0x0f 0x80. */
4109FNIEMOP_DEF(iemOp_jo_Jv)
4110{
4111 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4112 IEMOP_HLP_MIN_386();
4113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4114 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4115 {
4116 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4118
4119 IEM_MC_BEGIN(0, 0);
4120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4121 IEM_MC_REL_JMP_S16(i16Imm);
4122 } IEM_MC_ELSE() {
4123 IEM_MC_ADVANCE_RIP();
4124 } IEM_MC_ENDIF();
4125 IEM_MC_END();
4126 }
4127 else
4128 {
4129 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4131
4132 IEM_MC_BEGIN(0, 0);
4133 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4134 IEM_MC_REL_JMP_S32(i32Imm);
4135 } IEM_MC_ELSE() {
4136 IEM_MC_ADVANCE_RIP();
4137 } IEM_MC_ENDIF();
4138 IEM_MC_END();
4139 }
4140 return VINF_SUCCESS;
4141}
4142
4143
4144/** Opcode 0x0f 0x81. */
4145FNIEMOP_DEF(iemOp_jno_Jv)
4146{
4147 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4148 IEMOP_HLP_MIN_386();
4149 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4150 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4151 {
4152 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4154
4155 IEM_MC_BEGIN(0, 0);
4156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4157 IEM_MC_ADVANCE_RIP();
4158 } IEM_MC_ELSE() {
4159 IEM_MC_REL_JMP_S16(i16Imm);
4160 } IEM_MC_ENDIF();
4161 IEM_MC_END();
4162 }
4163 else
4164 {
4165 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4167
4168 IEM_MC_BEGIN(0, 0);
4169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4170 IEM_MC_ADVANCE_RIP();
4171 } IEM_MC_ELSE() {
4172 IEM_MC_REL_JMP_S32(i32Imm);
4173 } IEM_MC_ENDIF();
4174 IEM_MC_END();
4175 }
4176 return VINF_SUCCESS;
4177}
4178
4179
4180/** Opcode 0x0f 0x82. */
4181FNIEMOP_DEF(iemOp_jc_Jv)
4182{
4183 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4184 IEMOP_HLP_MIN_386();
4185 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4186 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4187 {
4188 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4190
4191 IEM_MC_BEGIN(0, 0);
4192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4193 IEM_MC_REL_JMP_S16(i16Imm);
4194 } IEM_MC_ELSE() {
4195 IEM_MC_ADVANCE_RIP();
4196 } IEM_MC_ENDIF();
4197 IEM_MC_END();
4198 }
4199 else
4200 {
4201 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4203
4204 IEM_MC_BEGIN(0, 0);
4205 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4206 IEM_MC_REL_JMP_S32(i32Imm);
4207 } IEM_MC_ELSE() {
4208 IEM_MC_ADVANCE_RIP();
4209 } IEM_MC_ENDIF();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0x0f 0x83. */
4217FNIEMOP_DEF(iemOp_jnc_Jv)
4218{
4219 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4220 IEMOP_HLP_MIN_386();
4221 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4222 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4223 {
4224 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4226
4227 IEM_MC_BEGIN(0, 0);
4228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4229 IEM_MC_ADVANCE_RIP();
4230 } IEM_MC_ELSE() {
4231 IEM_MC_REL_JMP_S16(i16Imm);
4232 } IEM_MC_ENDIF();
4233 IEM_MC_END();
4234 }
4235 else
4236 {
4237 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4239
4240 IEM_MC_BEGIN(0, 0);
4241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4242 IEM_MC_ADVANCE_RIP();
4243 } IEM_MC_ELSE() {
4244 IEM_MC_REL_JMP_S32(i32Imm);
4245 } IEM_MC_ENDIF();
4246 IEM_MC_END();
4247 }
4248 return VINF_SUCCESS;
4249}
4250
4251
4252/** Opcode 0x0f 0x84. */
4253FNIEMOP_DEF(iemOp_je_Jv)
4254{
4255 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4256 IEMOP_HLP_MIN_386();
4257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4258 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4259 {
4260 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4262
4263 IEM_MC_BEGIN(0, 0);
4264 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4265 IEM_MC_REL_JMP_S16(i16Imm);
4266 } IEM_MC_ELSE() {
4267 IEM_MC_ADVANCE_RIP();
4268 } IEM_MC_ENDIF();
4269 IEM_MC_END();
4270 }
4271 else
4272 {
4273 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4275
4276 IEM_MC_BEGIN(0, 0);
4277 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4278 IEM_MC_REL_JMP_S32(i32Imm);
4279 } IEM_MC_ELSE() {
4280 IEM_MC_ADVANCE_RIP();
4281 } IEM_MC_ENDIF();
4282 IEM_MC_END();
4283 }
4284 return VINF_SUCCESS;
4285}
4286
4287
4288/** Opcode 0x0f 0x85. */
4289FNIEMOP_DEF(iemOp_jne_Jv)
4290{
4291 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4292 IEMOP_HLP_MIN_386();
4293 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4294 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4295 {
4296 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4298
4299 IEM_MC_BEGIN(0, 0);
4300 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4301 IEM_MC_ADVANCE_RIP();
4302 } IEM_MC_ELSE() {
4303 IEM_MC_REL_JMP_S16(i16Imm);
4304 } IEM_MC_ENDIF();
4305 IEM_MC_END();
4306 }
4307 else
4308 {
4309 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4311
4312 IEM_MC_BEGIN(0, 0);
4313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4314 IEM_MC_ADVANCE_RIP();
4315 } IEM_MC_ELSE() {
4316 IEM_MC_REL_JMP_S32(i32Imm);
4317 } IEM_MC_ENDIF();
4318 IEM_MC_END();
4319 }
4320 return VINF_SUCCESS;
4321}
4322
4323
4324/** Opcode 0x0f 0x86. */
4325FNIEMOP_DEF(iemOp_jbe_Jv)
4326{
4327 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4328 IEMOP_HLP_MIN_386();
4329 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4330 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4331 {
4332 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4334
4335 IEM_MC_BEGIN(0, 0);
4336 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4337 IEM_MC_REL_JMP_S16(i16Imm);
4338 } IEM_MC_ELSE() {
4339 IEM_MC_ADVANCE_RIP();
4340 } IEM_MC_ENDIF();
4341 IEM_MC_END();
4342 }
4343 else
4344 {
4345 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4347
4348 IEM_MC_BEGIN(0, 0);
4349 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4350 IEM_MC_REL_JMP_S32(i32Imm);
4351 } IEM_MC_ELSE() {
4352 IEM_MC_ADVANCE_RIP();
4353 } IEM_MC_ENDIF();
4354 IEM_MC_END();
4355 }
4356 return VINF_SUCCESS;
4357}
4358
4359
4360/** Opcode 0x0f 0x87. */
4361FNIEMOP_DEF(iemOp_jnbe_Jv)
4362{
4363 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4364 IEMOP_HLP_MIN_386();
4365 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4366 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4367 {
4368 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4370
4371 IEM_MC_BEGIN(0, 0);
4372 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4373 IEM_MC_ADVANCE_RIP();
4374 } IEM_MC_ELSE() {
4375 IEM_MC_REL_JMP_S16(i16Imm);
4376 } IEM_MC_ENDIF();
4377 IEM_MC_END();
4378 }
4379 else
4380 {
4381 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4383
4384 IEM_MC_BEGIN(0, 0);
4385 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4386 IEM_MC_ADVANCE_RIP();
4387 } IEM_MC_ELSE() {
4388 IEM_MC_REL_JMP_S32(i32Imm);
4389 } IEM_MC_ENDIF();
4390 IEM_MC_END();
4391 }
4392 return VINF_SUCCESS;
4393}
4394
4395
4396/** Opcode 0x0f 0x88. */
4397FNIEMOP_DEF(iemOp_js_Jv)
4398{
4399 IEMOP_MNEMONIC(js_Jv, "js Jv");
4400 IEMOP_HLP_MIN_386();
4401 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4402 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4403 {
4404 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4406
4407 IEM_MC_BEGIN(0, 0);
4408 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4409 IEM_MC_REL_JMP_S16(i16Imm);
4410 } IEM_MC_ELSE() {
4411 IEM_MC_ADVANCE_RIP();
4412 } IEM_MC_ENDIF();
4413 IEM_MC_END();
4414 }
4415 else
4416 {
4417 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4419
4420 IEM_MC_BEGIN(0, 0);
4421 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4422 IEM_MC_REL_JMP_S32(i32Imm);
4423 } IEM_MC_ELSE() {
4424 IEM_MC_ADVANCE_RIP();
4425 } IEM_MC_ENDIF();
4426 IEM_MC_END();
4427 }
4428 return VINF_SUCCESS;
4429}
4430
4431
4432/** Opcode 0x0f 0x89. */
4433FNIEMOP_DEF(iemOp_jns_Jv)
4434{
4435 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4436 IEMOP_HLP_MIN_386();
4437 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4438 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4439 {
4440 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4442
4443 IEM_MC_BEGIN(0, 0);
4444 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4445 IEM_MC_ADVANCE_RIP();
4446 } IEM_MC_ELSE() {
4447 IEM_MC_REL_JMP_S16(i16Imm);
4448 } IEM_MC_ENDIF();
4449 IEM_MC_END();
4450 }
4451 else
4452 {
4453 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4455
4456 IEM_MC_BEGIN(0, 0);
4457 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4458 IEM_MC_ADVANCE_RIP();
4459 } IEM_MC_ELSE() {
4460 IEM_MC_REL_JMP_S32(i32Imm);
4461 } IEM_MC_ENDIF();
4462 IEM_MC_END();
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x8a. */
4469FNIEMOP_DEF(iemOp_jp_Jv)
4470{
4471 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4472 IEMOP_HLP_MIN_386();
4473 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4474 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4475 {
4476 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4478
4479 IEM_MC_BEGIN(0, 0);
4480 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4481 IEM_MC_REL_JMP_S16(i16Imm);
4482 } IEM_MC_ELSE() {
4483 IEM_MC_ADVANCE_RIP();
4484 } IEM_MC_ENDIF();
4485 IEM_MC_END();
4486 }
4487 else
4488 {
4489 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4491
4492 IEM_MC_BEGIN(0, 0);
4493 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4494 IEM_MC_REL_JMP_S32(i32Imm);
4495 } IEM_MC_ELSE() {
4496 IEM_MC_ADVANCE_RIP();
4497 } IEM_MC_ENDIF();
4498 IEM_MC_END();
4499 }
4500 return VINF_SUCCESS;
4501}
4502
4503
4504/** Opcode 0x0f 0x8b. */
4505FNIEMOP_DEF(iemOp_jnp_Jv)
4506{
4507 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4508 IEMOP_HLP_MIN_386();
4509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4510 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4511 {
4512 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4514
4515 IEM_MC_BEGIN(0, 0);
4516 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4517 IEM_MC_ADVANCE_RIP();
4518 } IEM_MC_ELSE() {
4519 IEM_MC_REL_JMP_S16(i16Imm);
4520 } IEM_MC_ENDIF();
4521 IEM_MC_END();
4522 }
4523 else
4524 {
4525 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4527
4528 IEM_MC_BEGIN(0, 0);
4529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4530 IEM_MC_ADVANCE_RIP();
4531 } IEM_MC_ELSE() {
4532 IEM_MC_REL_JMP_S32(i32Imm);
4533 } IEM_MC_ENDIF();
4534 IEM_MC_END();
4535 }
4536 return VINF_SUCCESS;
4537}
4538
4539
4540/** Opcode 0x0f 0x8c. */
4541FNIEMOP_DEF(iemOp_jl_Jv)
4542{
4543 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4544 IEMOP_HLP_MIN_386();
4545 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4546 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4547 {
4548 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4550
4551 IEM_MC_BEGIN(0, 0);
4552 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4553 IEM_MC_REL_JMP_S16(i16Imm);
4554 } IEM_MC_ELSE() {
4555 IEM_MC_ADVANCE_RIP();
4556 } IEM_MC_ENDIF();
4557 IEM_MC_END();
4558 }
4559 else
4560 {
4561 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4563
4564 IEM_MC_BEGIN(0, 0);
4565 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4566 IEM_MC_REL_JMP_S32(i32Imm);
4567 } IEM_MC_ELSE() {
4568 IEM_MC_ADVANCE_RIP();
4569 } IEM_MC_ENDIF();
4570 IEM_MC_END();
4571 }
4572 return VINF_SUCCESS;
4573}
4574
4575
4576/** Opcode 0x0f 0x8d. */
4577FNIEMOP_DEF(iemOp_jnl_Jv)
4578{
4579 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4580 IEMOP_HLP_MIN_386();
4581 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4582 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4583 {
4584 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4586
4587 IEM_MC_BEGIN(0, 0);
4588 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4589 IEM_MC_ADVANCE_RIP();
4590 } IEM_MC_ELSE() {
4591 IEM_MC_REL_JMP_S16(i16Imm);
4592 } IEM_MC_ENDIF();
4593 IEM_MC_END();
4594 }
4595 else
4596 {
4597 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4599
4600 IEM_MC_BEGIN(0, 0);
4601 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4602 IEM_MC_ADVANCE_RIP();
4603 } IEM_MC_ELSE() {
4604 IEM_MC_REL_JMP_S32(i32Imm);
4605 } IEM_MC_ENDIF();
4606 IEM_MC_END();
4607 }
4608 return VINF_SUCCESS;
4609}
4610
4611
4612/** Opcode 0x0f 0x8e. */
4613FNIEMOP_DEF(iemOp_jle_Jv)
4614{
4615 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4616 IEMOP_HLP_MIN_386();
4617 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4618 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4619 {
4620 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4622
4623 IEM_MC_BEGIN(0, 0);
4624 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4625 IEM_MC_REL_JMP_S16(i16Imm);
4626 } IEM_MC_ELSE() {
4627 IEM_MC_ADVANCE_RIP();
4628 } IEM_MC_ENDIF();
4629 IEM_MC_END();
4630 }
4631 else
4632 {
4633 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4635
4636 IEM_MC_BEGIN(0, 0);
4637 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4638 IEM_MC_REL_JMP_S32(i32Imm);
4639 } IEM_MC_ELSE() {
4640 IEM_MC_ADVANCE_RIP();
4641 } IEM_MC_ENDIF();
4642 IEM_MC_END();
4643 }
4644 return VINF_SUCCESS;
4645}
4646
4647
4648/** Opcode 0x0f 0x8f. */
4649FNIEMOP_DEF(iemOp_jnle_Jv)
4650{
4651 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4652 IEMOP_HLP_MIN_386();
4653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4654 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4655 {
4656 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4658
4659 IEM_MC_BEGIN(0, 0);
4660 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4661 IEM_MC_ADVANCE_RIP();
4662 } IEM_MC_ELSE() {
4663 IEM_MC_REL_JMP_S16(i16Imm);
4664 } IEM_MC_ENDIF();
4665 IEM_MC_END();
4666 }
4667 else
4668 {
4669 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4671
4672 IEM_MC_BEGIN(0, 0);
4673 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4674 IEM_MC_ADVANCE_RIP();
4675 } IEM_MC_ELSE() {
4676 IEM_MC_REL_JMP_S32(i32Imm);
4677 } IEM_MC_ENDIF();
4678 IEM_MC_END();
4679 }
4680 return VINF_SUCCESS;
4681}
4682
4683
4684/** Opcode 0x0f 0x90. */
4685FNIEMOP_DEF(iemOp_seto_Eb)
4686{
4687 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4688 IEMOP_HLP_MIN_386();
4689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4690
4691 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4692 * any way. AMD says it's "unused", whatever that means. We're
4693 * ignoring for now. */
4694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4695 {
4696 /* register target */
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_BEGIN(0, 0);
4699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4700 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4701 } IEM_MC_ELSE() {
4702 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4703 } IEM_MC_ENDIF();
4704 IEM_MC_ADVANCE_RIP();
4705 IEM_MC_END();
4706 }
4707 else
4708 {
4709 /* memory target */
4710 IEM_MC_BEGIN(0, 1);
4711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4715 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4716 } IEM_MC_ELSE() {
4717 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4718 } IEM_MC_ENDIF();
4719 IEM_MC_ADVANCE_RIP();
4720 IEM_MC_END();
4721 }
4722 return VINF_SUCCESS;
4723}
4724
4725
4726/** Opcode 0x0f 0x91. */
4727FNIEMOP_DEF(iemOp_setno_Eb)
4728{
4729 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4730 IEMOP_HLP_MIN_386();
4731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4732
4733 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4734 * any way. AMD says it's "unused", whatever that means. We're
4735 * ignoring for now. */
4736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4737 {
4738 /* register target */
4739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4740 IEM_MC_BEGIN(0, 0);
4741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4742 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4743 } IEM_MC_ELSE() {
4744 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4745 } IEM_MC_ENDIF();
4746 IEM_MC_ADVANCE_RIP();
4747 IEM_MC_END();
4748 }
4749 else
4750 {
4751 /* memory target */
4752 IEM_MC_BEGIN(0, 1);
4753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4756 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4757 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4758 } IEM_MC_ELSE() {
4759 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4760 } IEM_MC_ENDIF();
4761 IEM_MC_ADVANCE_RIP();
4762 IEM_MC_END();
4763 }
4764 return VINF_SUCCESS;
4765}
4766
4767
4768/** Opcode 0x0f 0x92. */
4769FNIEMOP_DEF(iemOp_setc_Eb)
4770{
4771 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4772 IEMOP_HLP_MIN_386();
4773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4774
4775 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4776 * any way. AMD says it's "unused", whatever that means. We're
4777 * ignoring for now. */
4778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4779 {
4780 /* register target */
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782 IEM_MC_BEGIN(0, 0);
4783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4784 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4785 } IEM_MC_ELSE() {
4786 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4787 } IEM_MC_ENDIF();
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 }
4791 else
4792 {
4793 /* memory target */
4794 IEM_MC_BEGIN(0, 1);
4795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4799 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4800 } IEM_MC_ELSE() {
4801 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4802 } IEM_MC_ENDIF();
4803 IEM_MC_ADVANCE_RIP();
4804 IEM_MC_END();
4805 }
4806 return VINF_SUCCESS;
4807}
4808
4809
4810/** Opcode 0x0f 0x93. */
4811FNIEMOP_DEF(iemOp_setnc_Eb)
4812{
4813 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4814 IEMOP_HLP_MIN_386();
4815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4816
4817 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4818 * any way. AMD says it's "unused", whatever that means. We're
4819 * ignoring for now. */
4820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4821 {
4822 /* register target */
4823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4824 IEM_MC_BEGIN(0, 0);
4825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4826 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4827 } IEM_MC_ELSE() {
4828 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4829 } IEM_MC_ENDIF();
4830 IEM_MC_ADVANCE_RIP();
4831 IEM_MC_END();
4832 }
4833 else
4834 {
4835 /* memory target */
4836 IEM_MC_BEGIN(0, 1);
4837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4841 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4842 } IEM_MC_ELSE() {
4843 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4844 } IEM_MC_ENDIF();
4845 IEM_MC_ADVANCE_RIP();
4846 IEM_MC_END();
4847 }
4848 return VINF_SUCCESS;
4849}
4850
4851
4852/** Opcode 0x0f 0x94. */
4853FNIEMOP_DEF(iemOp_sete_Eb)
4854{
4855 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4856 IEMOP_HLP_MIN_386();
4857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4858
4859 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4860 * any way. AMD says it's "unused", whatever that means. We're
4861 * ignoring for now. */
4862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4863 {
4864 /* register target */
4865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4866 IEM_MC_BEGIN(0, 0);
4867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4868 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4869 } IEM_MC_ELSE() {
4870 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4871 } IEM_MC_ENDIF();
4872 IEM_MC_ADVANCE_RIP();
4873 IEM_MC_END();
4874 }
4875 else
4876 {
4877 /* memory target */
4878 IEM_MC_BEGIN(0, 1);
4879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4883 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4884 } IEM_MC_ELSE() {
4885 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4886 } IEM_MC_ENDIF();
4887 IEM_MC_ADVANCE_RIP();
4888 IEM_MC_END();
4889 }
4890 return VINF_SUCCESS;
4891}
4892
4893
4894/** Opcode 0x0f 0x95. */
4895FNIEMOP_DEF(iemOp_setne_Eb)
4896{
4897 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4898 IEMOP_HLP_MIN_386();
4899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4900
4901 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4902 * any way. AMD says it's "unused", whatever that means. We're
4903 * ignoring for now. */
4904 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4905 {
4906 /* register target */
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4908 IEM_MC_BEGIN(0, 0);
4909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4910 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4911 } IEM_MC_ELSE() {
4912 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4913 } IEM_MC_ENDIF();
4914 IEM_MC_ADVANCE_RIP();
4915 IEM_MC_END();
4916 }
4917 else
4918 {
4919 /* memory target */
4920 IEM_MC_BEGIN(0, 1);
4921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4924 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4925 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4926 } IEM_MC_ELSE() {
4927 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4928 } IEM_MC_ENDIF();
4929 IEM_MC_ADVANCE_RIP();
4930 IEM_MC_END();
4931 }
4932 return VINF_SUCCESS;
4933}
4934
4935
4936/** Opcode 0x0f 0x96. */
4937FNIEMOP_DEF(iemOp_setbe_Eb)
4938{
4939 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4940 IEMOP_HLP_MIN_386();
4941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4942
4943 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4944 * any way. AMD says it's "unused", whatever that means. We're
4945 * ignoring for now. */
4946 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4947 {
4948 /* register target */
4949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4950 IEM_MC_BEGIN(0, 0);
4951 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4952 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4953 } IEM_MC_ELSE() {
4954 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4955 } IEM_MC_ENDIF();
4956 IEM_MC_ADVANCE_RIP();
4957 IEM_MC_END();
4958 }
4959 else
4960 {
4961 /* memory target */
4962 IEM_MC_BEGIN(0, 1);
4963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4966 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4967 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4968 } IEM_MC_ELSE() {
4969 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4970 } IEM_MC_ENDIF();
4971 IEM_MC_ADVANCE_RIP();
4972 IEM_MC_END();
4973 }
4974 return VINF_SUCCESS;
4975}
4976
4977
4978/** Opcode 0x0f 0x97. */
4979FNIEMOP_DEF(iemOp_setnbe_Eb)
4980{
4981 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4982 IEMOP_HLP_MIN_386();
4983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4984
4985 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4986 * any way. AMD says it's "unused", whatever that means. We're
4987 * ignoring for now. */
4988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4989 {
4990 /* register target */
4991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4992 IEM_MC_BEGIN(0, 0);
4993 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4994 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4995 } IEM_MC_ELSE() {
4996 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4997 } IEM_MC_ENDIF();
4998 IEM_MC_ADVANCE_RIP();
4999 IEM_MC_END();
5000 }
5001 else
5002 {
5003 /* memory target */
5004 IEM_MC_BEGIN(0, 1);
5005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5008 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5009 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5010 } IEM_MC_ELSE() {
5011 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5012 } IEM_MC_ENDIF();
5013 IEM_MC_ADVANCE_RIP();
5014 IEM_MC_END();
5015 }
5016 return VINF_SUCCESS;
5017}
5018
5019
5020/** Opcode 0x0f 0x98. */
5021FNIEMOP_DEF(iemOp_sets_Eb)
5022{
5023 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5024 IEMOP_HLP_MIN_386();
5025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5026
5027 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5028 * any way. AMD says it's "unused", whatever that means. We're
5029 * ignoring for now. */
5030 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5031 {
5032 /* register target */
5033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5034 IEM_MC_BEGIN(0, 0);
5035 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5036 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5037 } IEM_MC_ELSE() {
5038 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5039 } IEM_MC_ENDIF();
5040 IEM_MC_ADVANCE_RIP();
5041 IEM_MC_END();
5042 }
5043 else
5044 {
5045 /* memory target */
5046 IEM_MC_BEGIN(0, 1);
5047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5051 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5052 } IEM_MC_ELSE() {
5053 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5054 } IEM_MC_ENDIF();
5055 IEM_MC_ADVANCE_RIP();
5056 IEM_MC_END();
5057 }
5058 return VINF_SUCCESS;
5059}
5060
5061
5062/** Opcode 0x0f 0x99. */
5063FNIEMOP_DEF(iemOp_setns_Eb)
5064{
5065 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5066 IEMOP_HLP_MIN_386();
5067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5068
5069 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5070 * any way. AMD says it's "unused", whatever that means. We're
5071 * ignoring for now. */
5072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5073 {
5074 /* register target */
5075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5076 IEM_MC_BEGIN(0, 0);
5077 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5078 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5079 } IEM_MC_ELSE() {
5080 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5081 } IEM_MC_ENDIF();
5082 IEM_MC_ADVANCE_RIP();
5083 IEM_MC_END();
5084 }
5085 else
5086 {
5087 /* memory target */
5088 IEM_MC_BEGIN(0, 1);
5089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5092 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5093 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5094 } IEM_MC_ELSE() {
5095 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5096 } IEM_MC_ENDIF();
5097 IEM_MC_ADVANCE_RIP();
5098 IEM_MC_END();
5099 }
5100 return VINF_SUCCESS;
5101}
5102
5103
5104/** Opcode 0x0f 0x9a. */
5105FNIEMOP_DEF(iemOp_setp_Eb)
5106{
5107 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5108 IEMOP_HLP_MIN_386();
5109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5110
5111 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5112 * any way. AMD says it's "unused", whatever that means. We're
5113 * ignoring for now. */
5114 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5115 {
5116 /* register target */
5117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5118 IEM_MC_BEGIN(0, 0);
5119 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5120 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5121 } IEM_MC_ELSE() {
5122 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5123 } IEM_MC_ENDIF();
5124 IEM_MC_ADVANCE_RIP();
5125 IEM_MC_END();
5126 }
5127 else
5128 {
5129 /* memory target */
5130 IEM_MC_BEGIN(0, 1);
5131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5134 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5135 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5136 } IEM_MC_ELSE() {
5137 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5138 } IEM_MC_ENDIF();
5139 IEM_MC_ADVANCE_RIP();
5140 IEM_MC_END();
5141 }
5142 return VINF_SUCCESS;
5143}
5144
5145
5146/** Opcode 0x0f 0x9b. */
5147FNIEMOP_DEF(iemOp_setnp_Eb)
5148{
5149 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5150 IEMOP_HLP_MIN_386();
5151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5152
5153 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5154 * any way. AMD says it's "unused", whatever that means. We're
5155 * ignoring for now. */
5156 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5157 {
5158 /* register target */
5159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5160 IEM_MC_BEGIN(0, 0);
5161 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5162 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5163 } IEM_MC_ELSE() {
5164 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5165 } IEM_MC_ENDIF();
5166 IEM_MC_ADVANCE_RIP();
5167 IEM_MC_END();
5168 }
5169 else
5170 {
5171 /* memory target */
5172 IEM_MC_BEGIN(0, 1);
5173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5176 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5177 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5178 } IEM_MC_ELSE() {
5179 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5180 } IEM_MC_ENDIF();
5181 IEM_MC_ADVANCE_RIP();
5182 IEM_MC_END();
5183 }
5184 return VINF_SUCCESS;
5185}
5186
5187
5188/** Opcode 0x0f 0x9c. */
5189FNIEMOP_DEF(iemOp_setl_Eb)
5190{
5191 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5192 IEMOP_HLP_MIN_386();
5193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5194
5195 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5196 * any way. AMD says it's "unused", whatever that means. We're
5197 * ignoring for now. */
5198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5199 {
5200 /* register target */
5201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5202 IEM_MC_BEGIN(0, 0);
5203 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5204 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5205 } IEM_MC_ELSE() {
5206 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5207 } IEM_MC_ENDIF();
5208 IEM_MC_ADVANCE_RIP();
5209 IEM_MC_END();
5210 }
5211 else
5212 {
5213 /* memory target */
5214 IEM_MC_BEGIN(0, 1);
5215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5218 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5219 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5220 } IEM_MC_ELSE() {
5221 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5222 } IEM_MC_ENDIF();
5223 IEM_MC_ADVANCE_RIP();
5224 IEM_MC_END();
5225 }
5226 return VINF_SUCCESS;
5227}
5228
5229
5230/** Opcode 0x0f 0x9d. */
5231FNIEMOP_DEF(iemOp_setnl_Eb)
5232{
5233 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5234 IEMOP_HLP_MIN_386();
5235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5236
5237 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5238 * any way. AMD says it's "unused", whatever that means. We're
5239 * ignoring for now. */
5240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5241 {
5242 /* register target */
5243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5244 IEM_MC_BEGIN(0, 0);
5245 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5246 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5247 } IEM_MC_ELSE() {
5248 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5249 } IEM_MC_ENDIF();
5250 IEM_MC_ADVANCE_RIP();
5251 IEM_MC_END();
5252 }
5253 else
5254 {
5255 /* memory target */
5256 IEM_MC_BEGIN(0, 1);
5257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5260 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5261 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5262 } IEM_MC_ELSE() {
5263 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5264 } IEM_MC_ENDIF();
5265 IEM_MC_ADVANCE_RIP();
5266 IEM_MC_END();
5267 }
5268 return VINF_SUCCESS;
5269}
5270
5271
5272/** Opcode 0x0f 0x9e. */
5273FNIEMOP_DEF(iemOp_setle_Eb)
5274{
5275 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5276 IEMOP_HLP_MIN_386();
5277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5278
5279 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5280 * any way. AMD says it's "unused", whatever that means. We're
5281 * ignoring for now. */
5282 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5283 {
5284 /* register target */
5285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5286 IEM_MC_BEGIN(0, 0);
5287 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5288 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5289 } IEM_MC_ELSE() {
5290 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5291 } IEM_MC_ENDIF();
5292 IEM_MC_ADVANCE_RIP();
5293 IEM_MC_END();
5294 }
5295 else
5296 {
5297 /* memory target */
5298 IEM_MC_BEGIN(0, 1);
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5302 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5303 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5304 } IEM_MC_ELSE() {
5305 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5306 } IEM_MC_ENDIF();
5307 IEM_MC_ADVANCE_RIP();
5308 IEM_MC_END();
5309 }
5310 return VINF_SUCCESS;
5311}
5312
5313
5314/** Opcode 0x0f 0x9f. */
5315FNIEMOP_DEF(iemOp_setnle_Eb)
5316{
5317 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5318 IEMOP_HLP_MIN_386();
5319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5320
5321 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5322 * any way. AMD says it's "unused", whatever that means. We're
5323 * ignoring for now. */
5324 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5325 {
5326 /* register target */
5327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5328 IEM_MC_BEGIN(0, 0);
5329 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5330 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5331 } IEM_MC_ELSE() {
5332 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5333 } IEM_MC_ENDIF();
5334 IEM_MC_ADVANCE_RIP();
5335 IEM_MC_END();
5336 }
5337 else
5338 {
5339 /* memory target */
5340 IEM_MC_BEGIN(0, 1);
5341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5344 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5345 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5346 } IEM_MC_ELSE() {
5347 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5348 } IEM_MC_ENDIF();
5349 IEM_MC_ADVANCE_RIP();
5350 IEM_MC_END();
5351 }
5352 return VINF_SUCCESS;
5353}
5354
5355
5356/**
5357 * Common 'push segment-register' helper.
5358 */
5359FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5360{
5361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5362 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5363 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5364
5365 switch (pVCpu->iem.s.enmEffOpSize)
5366 {
5367 case IEMMODE_16BIT:
5368 IEM_MC_BEGIN(0, 1);
5369 IEM_MC_LOCAL(uint16_t, u16Value);
5370 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5371 IEM_MC_PUSH_U16(u16Value);
5372 IEM_MC_ADVANCE_RIP();
5373 IEM_MC_END();
5374 break;
5375
5376 case IEMMODE_32BIT:
5377 IEM_MC_BEGIN(0, 1);
5378 IEM_MC_LOCAL(uint32_t, u32Value);
5379 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5380 IEM_MC_PUSH_U32_SREG(u32Value);
5381 IEM_MC_ADVANCE_RIP();
5382 IEM_MC_END();
5383 break;
5384
5385 case IEMMODE_64BIT:
5386 IEM_MC_BEGIN(0, 1);
5387 IEM_MC_LOCAL(uint64_t, u64Value);
5388 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5389 IEM_MC_PUSH_U64(u64Value);
5390 IEM_MC_ADVANCE_RIP();
5391 IEM_MC_END();
5392 break;
5393 }
5394
5395 return VINF_SUCCESS;
5396}
5397
5398
5399/** Opcode 0x0f 0xa0. */
5400FNIEMOP_DEF(iemOp_push_fs)
5401{
5402 IEMOP_MNEMONIC(push_fs, "push fs");
5403 IEMOP_HLP_MIN_386();
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5406}
5407
5408
5409/** Opcode 0x0f 0xa1. */
5410FNIEMOP_DEF(iemOp_pop_fs)
5411{
5412 IEMOP_MNEMONIC(pop_fs, "pop fs");
5413 IEMOP_HLP_MIN_386();
5414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5415 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5416}
5417
5418
5419/** Opcode 0x0f 0xa2. */
5420FNIEMOP_DEF(iemOp_cpuid)
5421{
5422 IEMOP_MNEMONIC(cpuid, "cpuid");
5423 IEMOP_HLP_MIN_486(); /* not all 486es. */
5424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5425 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5426}
5427
5428
5429/**
5430 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5431 * iemOp_bts_Ev_Gv.
5432 */
5433FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5434{
5435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5436 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5437
5438 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5439 {
5440 /* register destination. */
5441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5442 switch (pVCpu->iem.s.enmEffOpSize)
5443 {
5444 case IEMMODE_16BIT:
5445 IEM_MC_BEGIN(3, 0);
5446 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5447 IEM_MC_ARG(uint16_t, u16Src, 1);
5448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5449
5450 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5451 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5452 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5453 IEM_MC_REF_EFLAGS(pEFlags);
5454 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5455
5456 IEM_MC_ADVANCE_RIP();
5457 IEM_MC_END();
5458 return VINF_SUCCESS;
5459
5460 case IEMMODE_32BIT:
5461 IEM_MC_BEGIN(3, 0);
5462 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5463 IEM_MC_ARG(uint32_t, u32Src, 1);
5464 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5465
5466 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5467 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5468 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5469 IEM_MC_REF_EFLAGS(pEFlags);
5470 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5471
5472 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5473 IEM_MC_ADVANCE_RIP();
5474 IEM_MC_END();
5475 return VINF_SUCCESS;
5476
5477 case IEMMODE_64BIT:
5478 IEM_MC_BEGIN(3, 0);
5479 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5480 IEM_MC_ARG(uint64_t, u64Src, 1);
5481 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5482
5483 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5484 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5485 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5486 IEM_MC_REF_EFLAGS(pEFlags);
5487 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5488
5489 IEM_MC_ADVANCE_RIP();
5490 IEM_MC_END();
5491 return VINF_SUCCESS;
5492
5493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5494 }
5495 }
5496 else
5497 {
5498 /* memory destination. */
5499
5500 uint32_t fAccess;
5501 if (pImpl->pfnLockedU16)
5502 fAccess = IEM_ACCESS_DATA_RW;
5503 else /* BT */
5504 fAccess = IEM_ACCESS_DATA_R;
5505
5506 /** @todo test negative bit offsets! */
5507 switch (pVCpu->iem.s.enmEffOpSize)
5508 {
5509 case IEMMODE_16BIT:
5510 IEM_MC_BEGIN(3, 2);
5511 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5512 IEM_MC_ARG(uint16_t, u16Src, 1);
5513 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5515 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5516
5517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5518 if (pImpl->pfnLockedU16)
5519 IEMOP_HLP_DONE_DECODING();
5520 else
5521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5522 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5523 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5524 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5525 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5526 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5527 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5528 IEM_MC_FETCH_EFLAGS(EFlags);
5529
5530 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5531 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5532 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5533 else
5534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5535 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5536
5537 IEM_MC_COMMIT_EFLAGS(EFlags);
5538 IEM_MC_ADVANCE_RIP();
5539 IEM_MC_END();
5540 return VINF_SUCCESS;
5541
5542 case IEMMODE_32BIT:
5543 IEM_MC_BEGIN(3, 2);
5544 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5545 IEM_MC_ARG(uint32_t, u32Src, 1);
5546 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5548 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5549
5550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5551 if (pImpl->pfnLockedU16)
5552 IEMOP_HLP_DONE_DECODING();
5553 else
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5556 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5557 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5558 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5559 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5560 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5561 IEM_MC_FETCH_EFLAGS(EFlags);
5562
5563 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5564 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5565 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5566 else
5567 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5568 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5569
5570 IEM_MC_COMMIT_EFLAGS(EFlags);
5571 IEM_MC_ADVANCE_RIP();
5572 IEM_MC_END();
5573 return VINF_SUCCESS;
5574
5575 case IEMMODE_64BIT:
5576 IEM_MC_BEGIN(3, 2);
5577 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5578 IEM_MC_ARG(uint64_t, u64Src, 1);
5579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5581 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5582
5583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5584 if (pImpl->pfnLockedU16)
5585 IEMOP_HLP_DONE_DECODING();
5586 else
5587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5588 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5589 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5590 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5591 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5592 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5593 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5594 IEM_MC_FETCH_EFLAGS(EFlags);
5595
5596 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5597 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5598 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5599 else
5600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5601 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5602
5603 IEM_MC_COMMIT_EFLAGS(EFlags);
5604 IEM_MC_ADVANCE_RIP();
5605 IEM_MC_END();
5606 return VINF_SUCCESS;
5607
5608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5609 }
5610 }
5611}
5612
5613
5614/** Opcode 0x0f 0xa3. */
5615FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5616{
5617 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5618 IEMOP_HLP_MIN_386();
5619 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5620}
5621
5622
5623/**
5624 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5625 */
5626FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5627{
5628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5629 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5630
5631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5632 {
5633 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5635
5636 switch (pVCpu->iem.s.enmEffOpSize)
5637 {
5638 case IEMMODE_16BIT:
5639 IEM_MC_BEGIN(4, 0);
5640 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5641 IEM_MC_ARG(uint16_t, u16Src, 1);
5642 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5643 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5644
5645 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5646 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5647 IEM_MC_REF_EFLAGS(pEFlags);
5648 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5649
5650 IEM_MC_ADVANCE_RIP();
5651 IEM_MC_END();
5652 return VINF_SUCCESS;
5653
5654 case IEMMODE_32BIT:
5655 IEM_MC_BEGIN(4, 0);
5656 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5657 IEM_MC_ARG(uint32_t, u32Src, 1);
5658 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5659 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5660
5661 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5662 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5663 IEM_MC_REF_EFLAGS(pEFlags);
5664 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5665
5666 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5667 IEM_MC_ADVANCE_RIP();
5668 IEM_MC_END();
5669 return VINF_SUCCESS;
5670
5671 case IEMMODE_64BIT:
5672 IEM_MC_BEGIN(4, 0);
5673 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5674 IEM_MC_ARG(uint64_t, u64Src, 1);
5675 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5676 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5677
5678 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5679 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5680 IEM_MC_REF_EFLAGS(pEFlags);
5681 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5682
5683 IEM_MC_ADVANCE_RIP();
5684 IEM_MC_END();
5685 return VINF_SUCCESS;
5686
5687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5688 }
5689 }
5690 else
5691 {
5692 switch (pVCpu->iem.s.enmEffOpSize)
5693 {
5694 case IEMMODE_16BIT:
5695 IEM_MC_BEGIN(4, 2);
5696 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5697 IEM_MC_ARG(uint16_t, u16Src, 1);
5698 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5699 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5701
5702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5703 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5704 IEM_MC_ASSIGN(cShiftArg, cShift);
5705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5706 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5707 IEM_MC_FETCH_EFLAGS(EFlags);
5708 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5709 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5710
5711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5712 IEM_MC_COMMIT_EFLAGS(EFlags);
5713 IEM_MC_ADVANCE_RIP();
5714 IEM_MC_END();
5715 return VINF_SUCCESS;
5716
5717 case IEMMODE_32BIT:
5718 IEM_MC_BEGIN(4, 2);
5719 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5720 IEM_MC_ARG(uint32_t, u32Src, 1);
5721 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5722 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5724
5725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5726 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5727 IEM_MC_ASSIGN(cShiftArg, cShift);
5728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5729 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5730 IEM_MC_FETCH_EFLAGS(EFlags);
5731 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5732 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5733
5734 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5735 IEM_MC_COMMIT_EFLAGS(EFlags);
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 return VINF_SUCCESS;
5739
5740 case IEMMODE_64BIT:
5741 IEM_MC_BEGIN(4, 2);
5742 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5743 IEM_MC_ARG(uint64_t, u64Src, 1);
5744 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5745 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5747
5748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5749 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5750 IEM_MC_ASSIGN(cShiftArg, cShift);
5751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5752 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5753 IEM_MC_FETCH_EFLAGS(EFlags);
5754 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5755 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5756
5757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5758 IEM_MC_COMMIT_EFLAGS(EFlags);
5759 IEM_MC_ADVANCE_RIP();
5760 IEM_MC_END();
5761 return VINF_SUCCESS;
5762
5763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5764 }
5765 }
5766}
5767
5768
5769/**
5770 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5771 */
5772FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5773{
5774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5775 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5776
5777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5778 {
5779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5780
5781 switch (pVCpu->iem.s.enmEffOpSize)
5782 {
5783 case IEMMODE_16BIT:
5784 IEM_MC_BEGIN(4, 0);
5785 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5786 IEM_MC_ARG(uint16_t, u16Src, 1);
5787 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5788 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5789
5790 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5791 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5792 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5793 IEM_MC_REF_EFLAGS(pEFlags);
5794 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5795
5796 IEM_MC_ADVANCE_RIP();
5797 IEM_MC_END();
5798 return VINF_SUCCESS;
5799
5800 case IEMMODE_32BIT:
5801 IEM_MC_BEGIN(4, 0);
5802 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5803 IEM_MC_ARG(uint32_t, u32Src, 1);
5804 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5805 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5806
5807 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5808 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5809 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5810 IEM_MC_REF_EFLAGS(pEFlags);
5811 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5812
5813 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5814 IEM_MC_ADVANCE_RIP();
5815 IEM_MC_END();
5816 return VINF_SUCCESS;
5817
5818 case IEMMODE_64BIT:
5819 IEM_MC_BEGIN(4, 0);
5820 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5821 IEM_MC_ARG(uint64_t, u64Src, 1);
5822 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5823 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5824
5825 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5826 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5827 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5828 IEM_MC_REF_EFLAGS(pEFlags);
5829 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5830
5831 IEM_MC_ADVANCE_RIP();
5832 IEM_MC_END();
5833 return VINF_SUCCESS;
5834
5835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5836 }
5837 }
5838 else
5839 {
5840 switch (pVCpu->iem.s.enmEffOpSize)
5841 {
5842 case IEMMODE_16BIT:
5843 IEM_MC_BEGIN(4, 2);
5844 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5845 IEM_MC_ARG(uint16_t, u16Src, 1);
5846 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5847 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5849
5850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5852 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5853 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5854 IEM_MC_FETCH_EFLAGS(EFlags);
5855 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5856 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5857
5858 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5859 IEM_MC_COMMIT_EFLAGS(EFlags);
5860 IEM_MC_ADVANCE_RIP();
5861 IEM_MC_END();
5862 return VINF_SUCCESS;
5863
5864 case IEMMODE_32BIT:
5865 IEM_MC_BEGIN(4, 2);
5866 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5867 IEM_MC_ARG(uint32_t, u32Src, 1);
5868 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5869 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5871
5872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5874 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5875 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5876 IEM_MC_FETCH_EFLAGS(EFlags);
5877 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5878 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5879
5880 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5881 IEM_MC_COMMIT_EFLAGS(EFlags);
5882 IEM_MC_ADVANCE_RIP();
5883 IEM_MC_END();
5884 return VINF_SUCCESS;
5885
5886 case IEMMODE_64BIT:
5887 IEM_MC_BEGIN(4, 2);
5888 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5889 IEM_MC_ARG(uint64_t, u64Src, 1);
5890 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5891 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5893
5894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5896 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5897 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5898 IEM_MC_FETCH_EFLAGS(EFlags);
5899 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5900 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5901
5902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5903 IEM_MC_COMMIT_EFLAGS(EFlags);
5904 IEM_MC_ADVANCE_RIP();
5905 IEM_MC_END();
5906 return VINF_SUCCESS;
5907
5908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5909 }
5910 }
5911}
5912
5913
5914
5915/** Opcode 0x0f 0xa4. */
5916FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5917{
5918 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5919 IEMOP_HLP_MIN_386();
5920 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5921}
5922
5923
5924/** Opcode 0x0f 0xa5. */
5925FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5926{
5927 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5928 IEMOP_HLP_MIN_386();
5929 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5930}
5931
5932
5933/** Opcode 0x0f 0xa8. */
5934FNIEMOP_DEF(iemOp_push_gs)
5935{
5936 IEMOP_MNEMONIC(push_gs, "push gs");
5937 IEMOP_HLP_MIN_386();
5938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5939 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5940}
5941
5942
5943/** Opcode 0x0f 0xa9. */
5944FNIEMOP_DEF(iemOp_pop_gs)
5945{
5946 IEMOP_MNEMONIC(pop_gs, "pop gs");
5947 IEMOP_HLP_MIN_386();
5948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5949 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5950}
5951
5952
5953/** Opcode 0x0f 0xaa. */
5954FNIEMOP_DEF(iemOp_rsm)
5955{
5956 IEMOP_MNEMONIC(rsm, "rsm");
5957 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
5958 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
5959 * intercept). */
5960 IEMOP_BITCH_ABOUT_STUB();
5961 return IEMOP_RAISE_INVALID_OPCODE();
5962}
5963
5964//IEMOP_HLP_MIN_386();
5965
5966
5967/** Opcode 0x0f 0xab. */
5968FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5969{
5970 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5971 IEMOP_HLP_MIN_386();
5972 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5973}
5974
5975
5976/** Opcode 0x0f 0xac. */
5977FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5978{
5979 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5980 IEMOP_HLP_MIN_386();
5981 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5982}
5983
5984
5985/** Opcode 0x0f 0xad. */
5986FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5987{
5988 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5989 IEMOP_HLP_MIN_386();
5990 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5991}
5992
5993
5994/** Opcode 0x0f 0xae mem/0. */
5995FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5996{
5997 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5998 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5999 return IEMOP_RAISE_INVALID_OPCODE();
6000
6001 IEM_MC_BEGIN(3, 1);
6002 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6003 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6004 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6007 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6008 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6009 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6010 IEM_MC_END();
6011 return VINF_SUCCESS;
6012}
6013
6014
6015/** Opcode 0x0f 0xae mem/1. */
6016FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6017{
6018 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6019 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6020 return IEMOP_RAISE_INVALID_OPCODE();
6021
6022 IEM_MC_BEGIN(3, 1);
6023 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6024 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6025 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6029 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6030 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6031 IEM_MC_END();
6032 return VINF_SUCCESS;
6033}
6034
6035
6036/**
6037 * @opmaps grp15
6038 * @opcode !11/2
6039 * @oppfx none
6040 * @opcpuid sse
6041 * @opgroup og_sse_mxcsrsm
6042 * @opxcpttype 5
6043 * @optest op1=0 -> mxcsr=0
6044 * @optest op1=0x2083 -> mxcsr=0x2083
6045 * @optest op1=0xfffffffe -> value.xcpt=0xd
6046 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6047 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6048 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6049 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6050 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6051 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6052 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6053 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6054 */
6055FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6056{
6057 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6058 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6059 return IEMOP_RAISE_INVALID_OPCODE();
6060
6061 IEM_MC_BEGIN(2, 0);
6062 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6063 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6066 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6067 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6068 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6069 IEM_MC_END();
6070 return VINF_SUCCESS;
6071}
6072
6073
6074/**
6075 * @opmaps grp15
6076 * @opcode !11/3
6077 * @oppfx none
6078 * @opcpuid sse
6079 * @opgroup og_sse_mxcsrsm
6080 * @opxcpttype 5
6081 * @optest mxcsr=0 -> op1=0
6082 * @optest mxcsr=0x2083 -> op1=0x2083
6083 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6084 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6085 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6086 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6087 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6088 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6089 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6090 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6091 */
6092FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6093{
6094 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6095 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6096 return IEMOP_RAISE_INVALID_OPCODE();
6097
6098 IEM_MC_BEGIN(2, 0);
6099 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6100 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6104 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6105 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6106 IEM_MC_END();
6107 return VINF_SUCCESS;
6108}
6109
6110
6111/**
6112 * @opmaps grp15
6113 * @opcode !11/4
6114 * @oppfx none
6115 * @opcpuid xsave
6116 * @opgroup og_system
6117 * @opxcpttype none
6118 */
6119FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6120{
6121 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0);
6122 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6123 return IEMOP_RAISE_INVALID_OPCODE();
6124
6125 IEM_MC_BEGIN(3, 0);
6126 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6127 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6128 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6131 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6132 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6133 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6134 IEM_MC_END();
6135 return VINF_SUCCESS;
6136}
6137
6138
6139/**
6140 * @opmaps grp15
6141 * @opcode !11/5
6142 * @oppfx none
6143 * @opcpuid xsave
6144 * @opgroup og_system
6145 * @opxcpttype none
6146 */
6147FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6148{
6149 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0);
6150 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6151 return IEMOP_RAISE_INVALID_OPCODE();
6152
6153 IEM_MC_BEGIN(3, 0);
6154 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6155 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6156 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6159 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6160 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6161 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6162 IEM_MC_END();
6163 return VINF_SUCCESS;
6164}
6165
6166/** Opcode 0x0f 0xae mem/6. */
6167FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6168
6169/**
6170 * @opmaps grp15
6171 * @opcode !11/7
6172 * @oppfx none
6173 * @opcpuid clfsh
6174 * @opgroup og_cachectl
6175 * @optest op1=1 ->
6176 */
6177FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6178{
6179 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6180 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6181 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6182
6183 IEM_MC_BEGIN(2, 0);
6184 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6185 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6188 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6189 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6190 IEM_MC_END();
6191 return VINF_SUCCESS;
6192}
6193
6194/**
6195 * @opmaps grp15
6196 * @opcode !11/7
6197 * @oppfx 0x66
6198 * @opcpuid clflushopt
6199 * @opgroup og_cachectl
6200 * @optest op1=1 ->
6201 */
6202FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6203{
6204 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
6205 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6206 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6207
6208 IEM_MC_BEGIN(2, 0);
6209 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6210 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6213 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6214 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6215 IEM_MC_END();
6216 return VINF_SUCCESS;
6217}
6218
6219
6220/** Opcode 0x0f 0xae 11b/5. */
6221FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6222{
6223 RT_NOREF_PV(bRm);
6224 IEMOP_MNEMONIC(lfence, "lfence");
6225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6226 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6227 return IEMOP_RAISE_INVALID_OPCODE();
6228
6229 IEM_MC_BEGIN(0, 0);
6230 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6231 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6232 else
6233 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6234 IEM_MC_ADVANCE_RIP();
6235 IEM_MC_END();
6236 return VINF_SUCCESS;
6237}
6238
6239
6240/** Opcode 0x0f 0xae 11b/6. */
6241FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6242{
6243 RT_NOREF_PV(bRm);
6244 IEMOP_MNEMONIC(mfence, "mfence");
6245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6246 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6247 return IEMOP_RAISE_INVALID_OPCODE();
6248
6249 IEM_MC_BEGIN(0, 0);
6250 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6251 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6252 else
6253 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6254 IEM_MC_ADVANCE_RIP();
6255 IEM_MC_END();
6256 return VINF_SUCCESS;
6257}
6258
6259
6260/** Opcode 0x0f 0xae 11b/7. */
6261FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6262{
6263 RT_NOREF_PV(bRm);
6264 IEMOP_MNEMONIC(sfence, "sfence");
6265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6266 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6267 return IEMOP_RAISE_INVALID_OPCODE();
6268
6269 IEM_MC_BEGIN(0, 0);
6270 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6271 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6272 else
6273 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6274 IEM_MC_ADVANCE_RIP();
6275 IEM_MC_END();
6276 return VINF_SUCCESS;
6277}
6278
6279
6280/** Opcode 0xf3 0x0f 0xae 11b/0. */
6281FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6282
6283/** Opcode 0xf3 0x0f 0xae 11b/1. */
6284FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6285
6286/** Opcode 0xf3 0x0f 0xae 11b/2. */
6287FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6288
6289/** Opcode 0xf3 0x0f 0xae 11b/3. */
6290FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6291
6292
6293/**
6294 * Group 15 jump table for register variant.
6295 */
6296IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6297{ /* pfx: none, 066h, 0f3h, 0f2h */
6298 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6299 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6300 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6301 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6302 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6303 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6304 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6305 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6306};
6307AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6308
6309
6310/**
6311 * Group 15 jump table for memory variant.
6312 */
6313IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6314{ /* pfx: none, 066h, 0f3h, 0f2h */
6315 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6316 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6317 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6318 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6319 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6320 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6321 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6322 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6323};
6324AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6325
6326
6327/** Opcode 0x0f 0xae. */
6328FNIEMOP_DEF(iemOp_Grp15)
6329{
6330 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6333 /* register, register */
6334 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6335 + pVCpu->iem.s.idxPrefix], bRm);
6336 /* memory, register */
6337 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6338 + pVCpu->iem.s.idxPrefix], bRm);
6339}
6340
6341
6342/** Opcode 0x0f 0xaf. */
6343FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6344{
6345 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6346 IEMOP_HLP_MIN_386();
6347 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6349}
6350
6351
6352/** Opcode 0x0f 0xb0. */
6353FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6354{
6355 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6356 IEMOP_HLP_MIN_486();
6357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6358
6359 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6360 {
6361 IEMOP_HLP_DONE_DECODING();
6362 IEM_MC_BEGIN(4, 0);
6363 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6364 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6365 IEM_MC_ARG(uint8_t, u8Src, 2);
6366 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6367
6368 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6369 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6370 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6371 IEM_MC_REF_EFLAGS(pEFlags);
6372 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6373 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6374 else
6375 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6376
6377 IEM_MC_ADVANCE_RIP();
6378 IEM_MC_END();
6379 }
6380 else
6381 {
6382 IEM_MC_BEGIN(4, 3);
6383 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6384 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6385 IEM_MC_ARG(uint8_t, u8Src, 2);
6386 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6388 IEM_MC_LOCAL(uint8_t, u8Al);
6389
6390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6391 IEMOP_HLP_DONE_DECODING();
6392 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6393 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6394 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6395 IEM_MC_FETCH_EFLAGS(EFlags);
6396 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6397 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6398 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6399 else
6400 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6401
6402 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6403 IEM_MC_COMMIT_EFLAGS(EFlags);
6404 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6405 IEM_MC_ADVANCE_RIP();
6406 IEM_MC_END();
6407 }
6408 return VINF_SUCCESS;
6409}
6410
6411/** Opcode 0x0f 0xb1. */
6412FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6413{
6414 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6415 IEMOP_HLP_MIN_486();
6416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6417
6418 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6419 {
6420 IEMOP_HLP_DONE_DECODING();
6421 switch (pVCpu->iem.s.enmEffOpSize)
6422 {
6423 case IEMMODE_16BIT:
6424 IEM_MC_BEGIN(4, 0);
6425 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6426 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6427 IEM_MC_ARG(uint16_t, u16Src, 2);
6428 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6429
6430 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6431 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6432 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6433 IEM_MC_REF_EFLAGS(pEFlags);
6434 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6435 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6436 else
6437 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6438
6439 IEM_MC_ADVANCE_RIP();
6440 IEM_MC_END();
6441 return VINF_SUCCESS;
6442
6443 case IEMMODE_32BIT:
6444 IEM_MC_BEGIN(4, 0);
6445 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6446 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6447 IEM_MC_ARG(uint32_t, u32Src, 2);
6448 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6449
6450 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6451 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6452 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6453 IEM_MC_REF_EFLAGS(pEFlags);
6454 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6455 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6456 else
6457 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6458
6459 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6460 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6461 IEM_MC_ADVANCE_RIP();
6462 IEM_MC_END();
6463 return VINF_SUCCESS;
6464
6465 case IEMMODE_64BIT:
6466 IEM_MC_BEGIN(4, 0);
6467 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6468 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6469#ifdef RT_ARCH_X86
6470 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6471#else
6472 IEM_MC_ARG(uint64_t, u64Src, 2);
6473#endif
6474 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6475
6476 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6477 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6478 IEM_MC_REF_EFLAGS(pEFlags);
6479#ifdef RT_ARCH_X86
6480 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6481 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6482 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6483 else
6484 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6485#else
6486 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6487 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6488 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6489 else
6490 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6491#endif
6492
6493 IEM_MC_ADVANCE_RIP();
6494 IEM_MC_END();
6495 return VINF_SUCCESS;
6496
6497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6498 }
6499 }
6500 else
6501 {
6502 switch (pVCpu->iem.s.enmEffOpSize)
6503 {
6504 case IEMMODE_16BIT:
6505 IEM_MC_BEGIN(4, 3);
6506 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6507 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6508 IEM_MC_ARG(uint16_t, u16Src, 2);
6509 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6511 IEM_MC_LOCAL(uint16_t, u16Ax);
6512
6513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6514 IEMOP_HLP_DONE_DECODING();
6515 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6516 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6517 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6518 IEM_MC_FETCH_EFLAGS(EFlags);
6519 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6520 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6521 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6522 else
6523 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6524
6525 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6526 IEM_MC_COMMIT_EFLAGS(EFlags);
6527 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6528 IEM_MC_ADVANCE_RIP();
6529 IEM_MC_END();
6530 return VINF_SUCCESS;
6531
6532 case IEMMODE_32BIT:
6533 IEM_MC_BEGIN(4, 3);
6534 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6535 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6536 IEM_MC_ARG(uint32_t, u32Src, 2);
6537 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6539 IEM_MC_LOCAL(uint32_t, u32Eax);
6540
6541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6542 IEMOP_HLP_DONE_DECODING();
6543 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6544 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6545 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6546 IEM_MC_FETCH_EFLAGS(EFlags);
6547 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6548 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6549 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6550 else
6551 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6552
6553 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6554 IEM_MC_COMMIT_EFLAGS(EFlags);
6555 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6556 IEM_MC_ADVANCE_RIP();
6557 IEM_MC_END();
6558 return VINF_SUCCESS;
6559
6560 case IEMMODE_64BIT:
6561 IEM_MC_BEGIN(4, 3);
6562 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6563 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6564#ifdef RT_ARCH_X86
6565 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6566#else
6567 IEM_MC_ARG(uint64_t, u64Src, 2);
6568#endif
6569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6571 IEM_MC_LOCAL(uint64_t, u64Rax);
6572
6573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6574 IEMOP_HLP_DONE_DECODING();
6575 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6576 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6577 IEM_MC_FETCH_EFLAGS(EFlags);
6578 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6579#ifdef RT_ARCH_X86
6580 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6581 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6582 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6583 else
6584 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6585#else
6586 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6587 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6588 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6589 else
6590 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6591#endif
6592
6593 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6594 IEM_MC_COMMIT_EFLAGS(EFlags);
6595 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6596 IEM_MC_ADVANCE_RIP();
6597 IEM_MC_END();
6598 return VINF_SUCCESS;
6599
6600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6601 }
6602 }
6603}
6604
6605
6606FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6607{
6608 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6609 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6610
6611 switch (pVCpu->iem.s.enmEffOpSize)
6612 {
6613 case IEMMODE_16BIT:
6614 IEM_MC_BEGIN(5, 1);
6615 IEM_MC_ARG(uint16_t, uSel, 0);
6616 IEM_MC_ARG(uint16_t, offSeg, 1);
6617 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6618 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6619 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6620 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6624 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6625 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6626 IEM_MC_END();
6627 return VINF_SUCCESS;
6628
6629 case IEMMODE_32BIT:
6630 IEM_MC_BEGIN(5, 1);
6631 IEM_MC_ARG(uint16_t, uSel, 0);
6632 IEM_MC_ARG(uint32_t, offSeg, 1);
6633 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6634 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6635 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6636 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6639 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6640 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6641 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6642 IEM_MC_END();
6643 return VINF_SUCCESS;
6644
6645 case IEMMODE_64BIT:
6646 IEM_MC_BEGIN(5, 1);
6647 IEM_MC_ARG(uint16_t, uSel, 0);
6648 IEM_MC_ARG(uint64_t, offSeg, 1);
6649 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6650 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6651 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6652 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6655 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6656 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6657 else
6658 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6659 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6660 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6661 IEM_MC_END();
6662 return VINF_SUCCESS;
6663
6664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6665 }
6666}
6667
6668
6669/** Opcode 0x0f 0xb2. */
6670FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6671{
6672 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6673 IEMOP_HLP_MIN_386();
6674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6676 return IEMOP_RAISE_INVALID_OPCODE();
6677 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6678}
6679
6680
6681/** Opcode 0x0f 0xb3. */
6682FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6683{
6684 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6685 IEMOP_HLP_MIN_386();
6686 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6687}
6688
6689
6690/** Opcode 0x0f 0xb4. */
6691FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6692{
6693 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6694 IEMOP_HLP_MIN_386();
6695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6696 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6697 return IEMOP_RAISE_INVALID_OPCODE();
6698 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6699}
6700
6701
6702/** Opcode 0x0f 0xb5. */
6703FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6704{
6705 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6706 IEMOP_HLP_MIN_386();
6707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6708 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6709 return IEMOP_RAISE_INVALID_OPCODE();
6710 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6711}
6712
6713
6714/** Opcode 0x0f 0xb6. */
6715FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6716{
6717 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6718 IEMOP_HLP_MIN_386();
6719
6720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6721
6722 /*
6723 * If rm is denoting a register, no more instruction bytes.
6724 */
6725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6726 {
6727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6728 switch (pVCpu->iem.s.enmEffOpSize)
6729 {
6730 case IEMMODE_16BIT:
6731 IEM_MC_BEGIN(0, 1);
6732 IEM_MC_LOCAL(uint16_t, u16Value);
6733 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6734 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6735 IEM_MC_ADVANCE_RIP();
6736 IEM_MC_END();
6737 return VINF_SUCCESS;
6738
6739 case IEMMODE_32BIT:
6740 IEM_MC_BEGIN(0, 1);
6741 IEM_MC_LOCAL(uint32_t, u32Value);
6742 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6743 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6744 IEM_MC_ADVANCE_RIP();
6745 IEM_MC_END();
6746 return VINF_SUCCESS;
6747
6748 case IEMMODE_64BIT:
6749 IEM_MC_BEGIN(0, 1);
6750 IEM_MC_LOCAL(uint64_t, u64Value);
6751 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6752 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6753 IEM_MC_ADVANCE_RIP();
6754 IEM_MC_END();
6755 return VINF_SUCCESS;
6756
6757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6758 }
6759 }
6760 else
6761 {
6762 /*
6763 * We're loading a register from memory.
6764 */
6765 switch (pVCpu->iem.s.enmEffOpSize)
6766 {
6767 case IEMMODE_16BIT:
6768 IEM_MC_BEGIN(0, 2);
6769 IEM_MC_LOCAL(uint16_t, u16Value);
6770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6773 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6774 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6775 IEM_MC_ADVANCE_RIP();
6776 IEM_MC_END();
6777 return VINF_SUCCESS;
6778
6779 case IEMMODE_32BIT:
6780 IEM_MC_BEGIN(0, 2);
6781 IEM_MC_LOCAL(uint32_t, u32Value);
6782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6785 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6786 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6787 IEM_MC_ADVANCE_RIP();
6788 IEM_MC_END();
6789 return VINF_SUCCESS;
6790
6791 case IEMMODE_64BIT:
6792 IEM_MC_BEGIN(0, 2);
6793 IEM_MC_LOCAL(uint64_t, u64Value);
6794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6797 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6798 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6799 IEM_MC_ADVANCE_RIP();
6800 IEM_MC_END();
6801 return VINF_SUCCESS;
6802
6803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6804 }
6805 }
6806}
6807
6808
6809/** Opcode 0x0f 0xb7. */
6810FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6811{
6812 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6813 IEMOP_HLP_MIN_386();
6814
6815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6816
6817 /** @todo Not entirely sure how the operand size prefix is handled here,
6818 * assuming that it will be ignored. Would be nice to have a few
6819 * test for this. */
6820 /*
6821 * If rm is denoting a register, no more instruction bytes.
6822 */
6823 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6824 {
6825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6826 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6827 {
6828 IEM_MC_BEGIN(0, 1);
6829 IEM_MC_LOCAL(uint32_t, u32Value);
6830 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6831 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6832 IEM_MC_ADVANCE_RIP();
6833 IEM_MC_END();
6834 }
6835 else
6836 {
6837 IEM_MC_BEGIN(0, 1);
6838 IEM_MC_LOCAL(uint64_t, u64Value);
6839 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6840 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6841 IEM_MC_ADVANCE_RIP();
6842 IEM_MC_END();
6843 }
6844 }
6845 else
6846 {
6847 /*
6848 * We're loading a register from memory.
6849 */
6850 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6851 {
6852 IEM_MC_BEGIN(0, 2);
6853 IEM_MC_LOCAL(uint32_t, u32Value);
6854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6857 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6858 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6859 IEM_MC_ADVANCE_RIP();
6860 IEM_MC_END();
6861 }
6862 else
6863 {
6864 IEM_MC_BEGIN(0, 2);
6865 IEM_MC_LOCAL(uint64_t, u64Value);
6866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6869 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6870 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6871 IEM_MC_ADVANCE_RIP();
6872 IEM_MC_END();
6873 }
6874 }
6875 return VINF_SUCCESS;
6876}
6877
6878
6879/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6880FNIEMOP_UD_STUB(iemOp_jmpe);
6881/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6882FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6883
6884
6885/**
6886 * @opcode 0xb9
6887 * @opinvalid intel-modrm
6888 * @optest ->
6889 */
6890FNIEMOP_DEF(iemOp_Grp10)
6891{
6892 /*
6893 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
6894 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
6895 */
6896 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
6897 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */
6898 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
6899}
6900
6901
6902/** Opcode 0x0f 0xba. */
6903FNIEMOP_DEF(iemOp_Grp8)
6904{
6905 IEMOP_HLP_MIN_386();
6906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6907 PCIEMOPBINSIZES pImpl;
6908 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6909 {
6910 case 0: case 1: case 2: case 3:
6911 /* Both AMD and Intel want full modr/m decoding and imm8. */
6912 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
6913 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6914 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6915 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6916 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6918 }
6919 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6920
6921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6922 {
6923 /* register destination. */
6924 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6926
6927 switch (pVCpu->iem.s.enmEffOpSize)
6928 {
6929 case IEMMODE_16BIT:
6930 IEM_MC_BEGIN(3, 0);
6931 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6932 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6933 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6934
6935 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6936 IEM_MC_REF_EFLAGS(pEFlags);
6937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6938
6939 IEM_MC_ADVANCE_RIP();
6940 IEM_MC_END();
6941 return VINF_SUCCESS;
6942
6943 case IEMMODE_32BIT:
6944 IEM_MC_BEGIN(3, 0);
6945 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6946 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6947 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6948
6949 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6950 IEM_MC_REF_EFLAGS(pEFlags);
6951 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6952
6953 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6954 IEM_MC_ADVANCE_RIP();
6955 IEM_MC_END();
6956 return VINF_SUCCESS;
6957
6958 case IEMMODE_64BIT:
6959 IEM_MC_BEGIN(3, 0);
6960 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6961 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6962 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6963
6964 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6965 IEM_MC_REF_EFLAGS(pEFlags);
6966 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6967
6968 IEM_MC_ADVANCE_RIP();
6969 IEM_MC_END();
6970 return VINF_SUCCESS;
6971
6972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6973 }
6974 }
6975 else
6976 {
6977 /* memory destination. */
6978
6979 uint32_t fAccess;
6980 if (pImpl->pfnLockedU16)
6981 fAccess = IEM_ACCESS_DATA_RW;
6982 else /* BT */
6983 fAccess = IEM_ACCESS_DATA_R;
6984
6985 /** @todo test negative bit offsets! */
6986 switch (pVCpu->iem.s.enmEffOpSize)
6987 {
6988 case IEMMODE_16BIT:
6989 IEM_MC_BEGIN(3, 1);
6990 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6991 IEM_MC_ARG(uint16_t, u16Src, 1);
6992 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6994
6995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6996 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6997 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6998 if (pImpl->pfnLockedU16)
6999 IEMOP_HLP_DONE_DECODING();
7000 else
7001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7002 IEM_MC_FETCH_EFLAGS(EFlags);
7003 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7004 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7006 else
7007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7009
7010 IEM_MC_COMMIT_EFLAGS(EFlags);
7011 IEM_MC_ADVANCE_RIP();
7012 IEM_MC_END();
7013 return VINF_SUCCESS;
7014
7015 case IEMMODE_32BIT:
7016 IEM_MC_BEGIN(3, 1);
7017 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7018 IEM_MC_ARG(uint32_t, u32Src, 1);
7019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7021
7022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7023 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7024 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7025 if (pImpl->pfnLockedU16)
7026 IEMOP_HLP_DONE_DECODING();
7027 else
7028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7029 IEM_MC_FETCH_EFLAGS(EFlags);
7030 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7031 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7032 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7033 else
7034 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7035 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7036
7037 IEM_MC_COMMIT_EFLAGS(EFlags);
7038 IEM_MC_ADVANCE_RIP();
7039 IEM_MC_END();
7040 return VINF_SUCCESS;
7041
7042 case IEMMODE_64BIT:
7043 IEM_MC_BEGIN(3, 1);
7044 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7045 IEM_MC_ARG(uint64_t, u64Src, 1);
7046 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7048
7049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7050 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7051 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7052 if (pImpl->pfnLockedU16)
7053 IEMOP_HLP_DONE_DECODING();
7054 else
7055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7056 IEM_MC_FETCH_EFLAGS(EFlags);
7057 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7058 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7059 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7060 else
7061 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7062 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7063
7064 IEM_MC_COMMIT_EFLAGS(EFlags);
7065 IEM_MC_ADVANCE_RIP();
7066 IEM_MC_END();
7067 return VINF_SUCCESS;
7068
7069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7070 }
7071 }
7072}
7073
7074
7075/** Opcode 0x0f 0xbb. */
7076FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7077{
7078 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7079 IEMOP_HLP_MIN_386();
7080 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7081}
7082
7083
7084/** Opcode 0x0f 0xbc. */
7085FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7086{
7087 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7088 IEMOP_HLP_MIN_386();
7089 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7090 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7091}
7092
7093
7094/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7095FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7096
7097
7098/** Opcode 0x0f 0xbd. */
7099FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7100{
7101 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7102 IEMOP_HLP_MIN_386();
7103 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7104 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7105}
7106
7107
7108/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7109FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7110
7111
7112/** Opcode 0x0f 0xbe. */
7113FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7114{
7115 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7116 IEMOP_HLP_MIN_386();
7117
7118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7119
7120 /*
7121 * If rm is denoting a register, no more instruction bytes.
7122 */
7123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7124 {
7125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7126 switch (pVCpu->iem.s.enmEffOpSize)
7127 {
7128 case IEMMODE_16BIT:
7129 IEM_MC_BEGIN(0, 1);
7130 IEM_MC_LOCAL(uint16_t, u16Value);
7131 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7132 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7133 IEM_MC_ADVANCE_RIP();
7134 IEM_MC_END();
7135 return VINF_SUCCESS;
7136
7137 case IEMMODE_32BIT:
7138 IEM_MC_BEGIN(0, 1);
7139 IEM_MC_LOCAL(uint32_t, u32Value);
7140 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7141 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7142 IEM_MC_ADVANCE_RIP();
7143 IEM_MC_END();
7144 return VINF_SUCCESS;
7145
7146 case IEMMODE_64BIT:
7147 IEM_MC_BEGIN(0, 1);
7148 IEM_MC_LOCAL(uint64_t, u64Value);
7149 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7150 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7151 IEM_MC_ADVANCE_RIP();
7152 IEM_MC_END();
7153 return VINF_SUCCESS;
7154
7155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7156 }
7157 }
7158 else
7159 {
7160 /*
7161 * We're loading a register from memory.
7162 */
7163 switch (pVCpu->iem.s.enmEffOpSize)
7164 {
7165 case IEMMODE_16BIT:
7166 IEM_MC_BEGIN(0, 2);
7167 IEM_MC_LOCAL(uint16_t, u16Value);
7168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7171 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7172 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7173 IEM_MC_ADVANCE_RIP();
7174 IEM_MC_END();
7175 return VINF_SUCCESS;
7176
7177 case IEMMODE_32BIT:
7178 IEM_MC_BEGIN(0, 2);
7179 IEM_MC_LOCAL(uint32_t, u32Value);
7180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7183 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7184 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7185 IEM_MC_ADVANCE_RIP();
7186 IEM_MC_END();
7187 return VINF_SUCCESS;
7188
7189 case IEMMODE_64BIT:
7190 IEM_MC_BEGIN(0, 2);
7191 IEM_MC_LOCAL(uint64_t, u64Value);
7192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7195 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7196 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7197 IEM_MC_ADVANCE_RIP();
7198 IEM_MC_END();
7199 return VINF_SUCCESS;
7200
7201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7202 }
7203 }
7204}
7205
7206
7207/** Opcode 0x0f 0xbf. */
7208FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7209{
7210 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7211 IEMOP_HLP_MIN_386();
7212
7213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7214
7215 /** @todo Not entirely sure how the operand size prefix is handled here,
7216 * assuming that it will be ignored. Would be nice to have a few
7217 * test for this. */
7218 /*
7219 * If rm is denoting a register, no more instruction bytes.
7220 */
7221 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7222 {
7223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7224 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7225 {
7226 IEM_MC_BEGIN(0, 1);
7227 IEM_MC_LOCAL(uint32_t, u32Value);
7228 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7229 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7230 IEM_MC_ADVANCE_RIP();
7231 IEM_MC_END();
7232 }
7233 else
7234 {
7235 IEM_MC_BEGIN(0, 1);
7236 IEM_MC_LOCAL(uint64_t, u64Value);
7237 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7238 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7239 IEM_MC_ADVANCE_RIP();
7240 IEM_MC_END();
7241 }
7242 }
7243 else
7244 {
7245 /*
7246 * We're loading a register from memory.
7247 */
7248 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7249 {
7250 IEM_MC_BEGIN(0, 2);
7251 IEM_MC_LOCAL(uint32_t, u32Value);
7252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7255 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7256 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7257 IEM_MC_ADVANCE_RIP();
7258 IEM_MC_END();
7259 }
7260 else
7261 {
7262 IEM_MC_BEGIN(0, 2);
7263 IEM_MC_LOCAL(uint64_t, u64Value);
7264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7267 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7268 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7269 IEM_MC_ADVANCE_RIP();
7270 IEM_MC_END();
7271 }
7272 }
7273 return VINF_SUCCESS;
7274}
7275
7276
7277/** Opcode 0x0f 0xc0. */
7278FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7279{
7280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7281 IEMOP_HLP_MIN_486();
7282 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7283
7284 /*
7285 * If rm is denoting a register, no more instruction bytes.
7286 */
7287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7288 {
7289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7290
7291 IEM_MC_BEGIN(3, 0);
7292 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7293 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7294 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7295
7296 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7297 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7298 IEM_MC_REF_EFLAGS(pEFlags);
7299 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7300
7301 IEM_MC_ADVANCE_RIP();
7302 IEM_MC_END();
7303 }
7304 else
7305 {
7306 /*
7307 * We're accessing memory.
7308 */
7309 IEM_MC_BEGIN(3, 3);
7310 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7311 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7312 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7313 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7315
7316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7317 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7318 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7319 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7320 IEM_MC_FETCH_EFLAGS(EFlags);
7321 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7322 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7323 else
7324 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7325
7326 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7327 IEM_MC_COMMIT_EFLAGS(EFlags);
7328 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7329 IEM_MC_ADVANCE_RIP();
7330 IEM_MC_END();
7331 return VINF_SUCCESS;
7332 }
7333 return VINF_SUCCESS;
7334}
7335
7336
7337/** Opcode 0x0f 0xc1. */
7338FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7339{
7340 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7341 IEMOP_HLP_MIN_486();
7342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7343
7344 /*
7345 * If rm is denoting a register, no more instruction bytes.
7346 */
7347 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7348 {
7349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7350
7351 switch (pVCpu->iem.s.enmEffOpSize)
7352 {
7353 case IEMMODE_16BIT:
7354 IEM_MC_BEGIN(3, 0);
7355 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7356 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7357 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7358
7359 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7360 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7361 IEM_MC_REF_EFLAGS(pEFlags);
7362 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7363
7364 IEM_MC_ADVANCE_RIP();
7365 IEM_MC_END();
7366 return VINF_SUCCESS;
7367
7368 case IEMMODE_32BIT:
7369 IEM_MC_BEGIN(3, 0);
7370 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7371 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7372 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7373
7374 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7375 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7376 IEM_MC_REF_EFLAGS(pEFlags);
7377 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7378
7379 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7380 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7381 IEM_MC_ADVANCE_RIP();
7382 IEM_MC_END();
7383 return VINF_SUCCESS;
7384
7385 case IEMMODE_64BIT:
7386 IEM_MC_BEGIN(3, 0);
7387 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7388 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7389 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7390
7391 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7392 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7393 IEM_MC_REF_EFLAGS(pEFlags);
7394 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7395
7396 IEM_MC_ADVANCE_RIP();
7397 IEM_MC_END();
7398 return VINF_SUCCESS;
7399
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402 }
7403 else
7404 {
7405 /*
7406 * We're accessing memory.
7407 */
7408 switch (pVCpu->iem.s.enmEffOpSize)
7409 {
7410 case IEMMODE_16BIT:
7411 IEM_MC_BEGIN(3, 3);
7412 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7413 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7414 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7415 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7417
7418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7419 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7420 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7421 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7422 IEM_MC_FETCH_EFLAGS(EFlags);
7423 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7424 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7425 else
7426 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7427
7428 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7429 IEM_MC_COMMIT_EFLAGS(EFlags);
7430 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7431 IEM_MC_ADVANCE_RIP();
7432 IEM_MC_END();
7433 return VINF_SUCCESS;
7434
7435 case IEMMODE_32BIT:
7436 IEM_MC_BEGIN(3, 3);
7437 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7438 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7439 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7440 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7442
7443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7444 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7445 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7446 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7447 IEM_MC_FETCH_EFLAGS(EFlags);
7448 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7449 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7450 else
7451 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7452
7453 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7454 IEM_MC_COMMIT_EFLAGS(EFlags);
7455 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7456 IEM_MC_ADVANCE_RIP();
7457 IEM_MC_END();
7458 return VINF_SUCCESS;
7459
7460 case IEMMODE_64BIT:
7461 IEM_MC_BEGIN(3, 3);
7462 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7463 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7464 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7465 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7467
7468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7469 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7470 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7471 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7472 IEM_MC_FETCH_EFLAGS(EFlags);
7473 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7474 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7475 else
7476 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7477
7478 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7479 IEM_MC_COMMIT_EFLAGS(EFlags);
7480 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7481 IEM_MC_ADVANCE_RIP();
7482 IEM_MC_END();
7483 return VINF_SUCCESS;
7484
7485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7486 }
7487 }
7488}
7489
7490
7491/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
7492FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
7493/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
7494FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
7495/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
7496FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
7497/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
7498FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
7499
7500
7501/** Opcode 0x0f 0xc3. */
7502FNIEMOP_DEF(iemOp_movnti_My_Gy)
7503{
7504 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7505
7506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7507
7508 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7509 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7510 {
7511 switch (pVCpu->iem.s.enmEffOpSize)
7512 {
7513 case IEMMODE_32BIT:
7514 IEM_MC_BEGIN(0, 2);
7515 IEM_MC_LOCAL(uint32_t, u32Value);
7516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7517
7518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7520 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7521 return IEMOP_RAISE_INVALID_OPCODE();
7522
7523 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7524 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7525 IEM_MC_ADVANCE_RIP();
7526 IEM_MC_END();
7527 break;
7528
7529 case IEMMODE_64BIT:
7530 IEM_MC_BEGIN(0, 2);
7531 IEM_MC_LOCAL(uint64_t, u64Value);
7532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7533
7534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7536 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7537 return IEMOP_RAISE_INVALID_OPCODE();
7538
7539 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7540 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7541 IEM_MC_ADVANCE_RIP();
7542 IEM_MC_END();
7543 break;
7544
7545 case IEMMODE_16BIT:
7546 /** @todo check this form. */
7547 return IEMOP_RAISE_INVALID_OPCODE();
7548 }
7549 }
7550 else
7551 return IEMOP_RAISE_INVALID_OPCODE();
7552 return VINF_SUCCESS;
7553}
7554/* Opcode 0x66 0x0f 0xc3 - invalid */
7555/* Opcode 0xf3 0x0f 0xc3 - invalid */
7556/* Opcode 0xf2 0x0f 0xc3 - invalid */
7557
7558/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
7559FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7560/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
7561FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
7562/* Opcode 0xf3 0x0f 0xc4 - invalid */
7563/* Opcode 0xf2 0x0f 0xc4 - invalid */
7564
7565/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7566FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7567/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
7568FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
7569/* Opcode 0xf3 0x0f 0xc5 - invalid */
7570/* Opcode 0xf2 0x0f 0xc5 - invalid */
7571
7572/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
7573FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
7574/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
7575FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
7576/* Opcode 0xf3 0x0f 0xc6 - invalid */
7577/* Opcode 0xf2 0x0f 0xc6 - invalid */
7578
7579
7580/** Opcode 0x0f 0xc7 !11/1. */
7581FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7582{
7583 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7584
7585 IEM_MC_BEGIN(4, 3);
7586 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7587 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7588 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7589 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7590 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7591 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7593
7594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7595 IEMOP_HLP_DONE_DECODING();
7596 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7597
7598 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7599 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7600 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7601
7602 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7603 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7604 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7605
7606 IEM_MC_FETCH_EFLAGS(EFlags);
7607 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7608 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7609 else
7610 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7611
7612 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7613 IEM_MC_COMMIT_EFLAGS(EFlags);
7614 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7615 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7616 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7617 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7618 IEM_MC_ENDIF();
7619 IEM_MC_ADVANCE_RIP();
7620
7621 IEM_MC_END();
7622 return VINF_SUCCESS;
7623}
7624
7625
7626/** Opcode REX.W 0x0f 0xc7 !11/1. */
7627FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7628{
7629 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7630 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7631 {
7632#if 0
7633 RT_NOREF(bRm);
7634 IEMOP_BITCH_ABOUT_STUB();
7635 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7636#else
7637 IEM_MC_BEGIN(4, 3);
7638 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7639 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7640 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7641 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7642 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7643 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7645
7646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7647 IEMOP_HLP_DONE_DECODING();
7648 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7649 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7650
7651 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7652 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7653 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7654
7655 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7656 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7657 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7658
7659 IEM_MC_FETCH_EFLAGS(EFlags);
7660# ifdef RT_ARCH_AMD64
7661 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7662 {
7663 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7664 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7665 else
7666 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7667 }
7668 else
7669# endif
7670 {
7671 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7672 accesses and not all all atomic, which works fine on in UNI CPU guest
7673 configuration (ignoring DMA). If guest SMP is active we have no choice
7674 but to use a rendezvous callback here. Sigh. */
7675 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7676 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7677 else
7678 {
7679 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7680 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7681 }
7682 }
7683
7684 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7685 IEM_MC_COMMIT_EFLAGS(EFlags);
7686 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7687 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7688 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7689 IEM_MC_ENDIF();
7690 IEM_MC_ADVANCE_RIP();
7691
7692 IEM_MC_END();
7693 return VINF_SUCCESS;
7694#endif
7695 }
7696 Log(("cmpxchg16b -> #UD\n"));
7697 return IEMOP_RAISE_INVALID_OPCODE();
7698}
7699
7700FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
7701{
7702 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7703 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7704 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7705}
7706
7707/** Opcode 0x0f 0xc7 11/6. */
7708FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7709
7710/** Opcode 0x0f 0xc7 !11/6. */
7711FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7712
7713/** Opcode 0x66 0x0f 0xc7 !11/6. */
7714FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7715
7716/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7717FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7718
7719/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7720FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7721
7722/** Opcode 0x0f 0xc7 11/7. */
7723FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
7724
7725
7726/**
7727 * Group 9 jump table for register variant.
7728 */
7729IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
7730{ /* pfx: none, 066h, 0f3h, 0f2h */
7731 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7732 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
7733 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7734 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7735 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7736 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7737 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7738 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7739};
7740AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
7741
7742
7743/**
7744 * Group 9 jump table for memory variant.
7745 */
7746IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
7747{ /* pfx: none, 066h, 0f3h, 0f2h */
7748 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
7749 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
7750 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
7751 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
7752 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
7753 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
7754 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
7755 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
7756};
7757AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
7758
7759
7760/** Opcode 0x0f 0xc7. */
7761FNIEMOP_DEF(iemOp_Grp9)
7762{
7763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7764 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7765 /* register, register */
7766 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7767 + pVCpu->iem.s.idxPrefix], bRm);
7768 /* memory, register */
7769 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
7770 + pVCpu->iem.s.idxPrefix], bRm);
7771}
7772
7773
7774/**
7775 * Common 'bswap register' helper.
7776 */
7777FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7778{
7779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7780 switch (pVCpu->iem.s.enmEffOpSize)
7781 {
7782 case IEMMODE_16BIT:
7783 IEM_MC_BEGIN(1, 0);
7784 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7785 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7786 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7787 IEM_MC_ADVANCE_RIP();
7788 IEM_MC_END();
7789 return VINF_SUCCESS;
7790
7791 case IEMMODE_32BIT:
7792 IEM_MC_BEGIN(1, 0);
7793 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7794 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7795 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7796 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7797 IEM_MC_ADVANCE_RIP();
7798 IEM_MC_END();
7799 return VINF_SUCCESS;
7800
7801 case IEMMODE_64BIT:
7802 IEM_MC_BEGIN(1, 0);
7803 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7804 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7805 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7806 IEM_MC_ADVANCE_RIP();
7807 IEM_MC_END();
7808 return VINF_SUCCESS;
7809
7810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7811 }
7812}
7813
7814
7815/** Opcode 0x0f 0xc8. */
7816FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7817{
7818 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7819 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7820 prefix. REX.B is the correct prefix it appears. For a parallel
7821 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7822 IEMOP_HLP_MIN_486();
7823 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7824}
7825
7826
7827/** Opcode 0x0f 0xc9. */
7828FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7829{
7830 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7831 IEMOP_HLP_MIN_486();
7832 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7833}
7834
7835
7836/** Opcode 0x0f 0xca. */
7837FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7838{
7839 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7840 IEMOP_HLP_MIN_486();
7841 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7842}
7843
7844
7845/** Opcode 0x0f 0xcb. */
7846FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7847{
7848 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7849 IEMOP_HLP_MIN_486();
7850 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7851}
7852
7853
7854/** Opcode 0x0f 0xcc. */
7855FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7856{
7857 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7858 IEMOP_HLP_MIN_486();
7859 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7860}
7861
7862
7863/** Opcode 0x0f 0xcd. */
7864FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7865{
7866 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7867 IEMOP_HLP_MIN_486();
7868 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7869}
7870
7871
7872/** Opcode 0x0f 0xce. */
7873FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7874{
7875 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7876 IEMOP_HLP_MIN_486();
7877 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7878}
7879
7880
7881/** Opcode 0x0f 0xcf. */
7882FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7883{
7884 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7885 IEMOP_HLP_MIN_486();
7886 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7887}
7888
7889
7890/* Opcode 0x0f 0xd0 - invalid */
7891/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
7892FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
7893/* Opcode 0xf3 0x0f 0xd0 - invalid */
7894/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
7895FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
7896
7897/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7898FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7899/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
7900FNIEMOP_STUB(iemOp_psrlw_Vx_W);
7901/* Opcode 0xf3 0x0f 0xd1 - invalid */
7902/* Opcode 0xf2 0x0f 0xd1 - invalid */
7903
7904/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7905FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7906/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
7907FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
7908/* Opcode 0xf3 0x0f 0xd2 - invalid */
7909/* Opcode 0xf2 0x0f 0xd2 - invalid */
7910
7911/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7912FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7913/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
7914FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
7915/* Opcode 0xf3 0x0f 0xd3 - invalid */
7916/* Opcode 0xf2 0x0f 0xd3 - invalid */
7917
7918/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7919FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7920/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
7921FNIEMOP_STUB(iemOp_paddq_Vx_W);
7922/* Opcode 0xf3 0x0f 0xd4 - invalid */
7923/* Opcode 0xf2 0x0f 0xd4 - invalid */
7924
7925/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7926FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7927/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
7928FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
7929/* Opcode 0xf3 0x0f 0xd5 - invalid */
7930/* Opcode 0xf2 0x0f 0xd5 - invalid */
7931
7932/* Opcode 0x0f 0xd6 - invalid */
7933
7934/**
7935 * @opcode 0xd6
7936 * @oppfx 0x66
7937 * @opcpuid sse2
7938 * @opgroup og_sse2_pcksclr_datamove
7939 * @opxcpttype none
7940 * @optest op1=-1 op2=2 -> op1=2
7941 * @optest op1=0 op2=-42 -> op1=-42
7942 */
7943FNIEMOP_DEF(iemOp_movq_Wq_Vq)
7944{
7945 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
7946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7947 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7948 {
7949 /*
7950 * Register, register.
7951 */
7952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7953 IEM_MC_BEGIN(0, 2);
7954 IEM_MC_LOCAL(uint64_t, uSrc);
7955
7956 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7957 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7958
7959 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7960 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
7961
7962 IEM_MC_ADVANCE_RIP();
7963 IEM_MC_END();
7964 }
7965 else
7966 {
7967 /*
7968 * Memory, register.
7969 */
7970 IEM_MC_BEGIN(0, 2);
7971 IEM_MC_LOCAL(uint64_t, uSrc);
7972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7973
7974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7976 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7977 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7978
7979 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7980 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7981
7982 IEM_MC_ADVANCE_RIP();
7983 IEM_MC_END();
7984 }
7985 return VINF_SUCCESS;
7986}
7987
7988
7989/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7990FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7991/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7992FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7993#if 0
7994FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7995{
7996 /* Docs says register only. */
7997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7998
7999 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8000 {
8001 case IEM_OP_PRF_SIZE_OP: /* SSE */
8002 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq");
8003 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8004 IEM_MC_BEGIN(2, 0);
8005 IEM_MC_ARG(uint64_t *, pDst, 0);
8006 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8007 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8008 IEM_MC_PREPARE_SSE_USAGE();
8009 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8010 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8011 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8012 IEM_MC_ADVANCE_RIP();
8013 IEM_MC_END();
8014 return VINF_SUCCESS;
8015
8016 case 0: /* MMX */
8017 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
8018 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8019 IEM_MC_BEGIN(2, 0);
8020 IEM_MC_ARG(uint64_t *, pDst, 0);
8021 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8022 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8023 IEM_MC_PREPARE_FPU_USAGE();
8024 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8025 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8026 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8027 IEM_MC_ADVANCE_RIP();
8028 IEM_MC_END();
8029 return VINF_SUCCESS;
8030
8031 default:
8032 return IEMOP_RAISE_INVALID_OPCODE();
8033 }
8034}
8035#endif
8036
8037
8038/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8039FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8040{
8041 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8042 /** @todo testcase: Check that the instruction implicitly clears the high
8043 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8044 * and opcode modifications are made to work with the whole width (not
8045 * just 128). */
8046 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8047 /* Docs says register only. */
8048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8049 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8050 {
8051 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8052 IEM_MC_BEGIN(2, 0);
8053 IEM_MC_ARG(uint64_t *, pDst, 0);
8054 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8055 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8056 IEM_MC_PREPARE_FPU_USAGE();
8057 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8058 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8059 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8060 IEM_MC_ADVANCE_RIP();
8061 IEM_MC_END();
8062 return VINF_SUCCESS;
8063 }
8064 return IEMOP_RAISE_INVALID_OPCODE();
8065}
8066
8067/** Opcode 0x66 0x0f 0xd7 - */
8068FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8069{
8070 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8071 /** @todo testcase: Check that the instruction implicitly clears the high
8072 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8073 * and opcode modifications are made to work with the whole width (not
8074 * just 128). */
8075 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8076 /* Docs says register only. */
8077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8079 {
8080 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8081 IEM_MC_BEGIN(2, 0);
8082 IEM_MC_ARG(uint64_t *, pDst, 0);
8083 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8084 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8085 IEM_MC_PREPARE_SSE_USAGE();
8086 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8087 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8088 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8089 IEM_MC_ADVANCE_RIP();
8090 IEM_MC_END();
8091 return VINF_SUCCESS;
8092 }
8093 return IEMOP_RAISE_INVALID_OPCODE();
8094}
8095
8096/* Opcode 0xf3 0x0f 0xd7 - invalid */
8097/* Opcode 0xf2 0x0f 0xd7 - invalid */
8098
8099
8100/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8101FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8102/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8103FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8104/* Opcode 0xf3 0x0f 0xd8 - invalid */
8105/* Opcode 0xf2 0x0f 0xd8 - invalid */
8106
8107/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8108FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8109/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8110FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8111/* Opcode 0xf3 0x0f 0xd9 - invalid */
8112/* Opcode 0xf2 0x0f 0xd9 - invalid */
8113
8114/** Opcode 0x0f 0xda - pminub Pq, Qq */
8115FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8116/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8117FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8118/* Opcode 0xf3 0x0f 0xda - invalid */
8119/* Opcode 0xf2 0x0f 0xda - invalid */
8120
8121/** Opcode 0x0f 0xdb - pand Pq, Qq */
8122FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8123/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8124FNIEMOP_STUB(iemOp_pand_Vx_W);
8125/* Opcode 0xf3 0x0f 0xdb - invalid */
8126/* Opcode 0xf2 0x0f 0xdb - invalid */
8127
8128/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8129FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8130/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8131FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8132/* Opcode 0xf3 0x0f 0xdc - invalid */
8133/* Opcode 0xf2 0x0f 0xdc - invalid */
8134
8135/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8136FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8137/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8138FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8139/* Opcode 0xf3 0x0f 0xdd - invalid */
8140/* Opcode 0xf2 0x0f 0xdd - invalid */
8141
8142/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8143FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8144/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8145FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8146/* Opcode 0xf3 0x0f 0xde - invalid */
8147/* Opcode 0xf2 0x0f 0xde - invalid */
8148
8149/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8150FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8151/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8152FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8153/* Opcode 0xf3 0x0f 0xdf - invalid */
8154/* Opcode 0xf2 0x0f 0xdf - invalid */
8155
8156/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8157FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8158/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8159FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8160/* Opcode 0xf3 0x0f 0xe0 - invalid */
8161/* Opcode 0xf2 0x0f 0xe0 - invalid */
8162
8163/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8164FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8165/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8166FNIEMOP_STUB(iemOp_psraw_Vx_W);
8167/* Opcode 0xf3 0x0f 0xe1 - invalid */
8168/* Opcode 0xf2 0x0f 0xe1 - invalid */
8169
8170/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8171FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8172/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8173FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8174/* Opcode 0xf3 0x0f 0xe2 - invalid */
8175/* Opcode 0xf2 0x0f 0xe2 - invalid */
8176
8177/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8178FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8179/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8180FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8181/* Opcode 0xf3 0x0f 0xe3 - invalid */
8182/* Opcode 0xf2 0x0f 0xe3 - invalid */
8183
8184/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8185FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8186/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8187FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8188/* Opcode 0xf3 0x0f 0xe4 - invalid */
8189/* Opcode 0xf2 0x0f 0xe4 - invalid */
8190
8191/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8192FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8193/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8194FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8195/* Opcode 0xf3 0x0f 0xe5 - invalid */
8196/* Opcode 0xf2 0x0f 0xe5 - invalid */
8197
8198/* Opcode 0x0f 0xe6 - invalid */
8199/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8200FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8201/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8202FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8203/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8204FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8205
8206
8207/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8208FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8209{
8210 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8212 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8213 {
8214 /* Register, memory. */
8215 IEM_MC_BEGIN(0, 2);
8216 IEM_MC_LOCAL(uint64_t, uSrc);
8217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8218
8219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8221 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8222 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8223
8224 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8225 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8226
8227 IEM_MC_ADVANCE_RIP();
8228 IEM_MC_END();
8229 return VINF_SUCCESS;
8230 }
8231 /* The register, register encoding is invalid. */
8232 return IEMOP_RAISE_INVALID_OPCODE();
8233}
8234
8235/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8236FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8237{
8238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8239 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8240 {
8241 /* Register, memory. */
8242 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8243 IEM_MC_BEGIN(0, 2);
8244 IEM_MC_LOCAL(RTUINT128U, uSrc);
8245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8246
8247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8249 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8250 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8251
8252 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8253 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8254
8255 IEM_MC_ADVANCE_RIP();
8256 IEM_MC_END();
8257 return VINF_SUCCESS;
8258 }
8259
8260 /* The register, register encoding is invalid. */
8261 return IEMOP_RAISE_INVALID_OPCODE();
8262}
8263
8264/* Opcode 0xf3 0x0f 0xe7 - invalid */
8265/* Opcode 0xf2 0x0f 0xe7 - invalid */
8266
8267
8268/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8269FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8270/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8271FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8272/* Opcode 0xf3 0x0f 0xe8 - invalid */
8273/* Opcode 0xf2 0x0f 0xe8 - invalid */
8274
8275/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8276FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8277/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8278FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8279/* Opcode 0xf3 0x0f 0xe9 - invalid */
8280/* Opcode 0xf2 0x0f 0xe9 - invalid */
8281
8282/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8283FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8284/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8285FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8286/* Opcode 0xf3 0x0f 0xea - invalid */
8287/* Opcode 0xf2 0x0f 0xea - invalid */
8288
8289/** Opcode 0x0f 0xeb - por Pq, Qq */
8290FNIEMOP_STUB(iemOp_por_Pq_Qq);
8291/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8292FNIEMOP_STUB(iemOp_por_Vx_W);
8293/* Opcode 0xf3 0x0f 0xeb - invalid */
8294/* Opcode 0xf2 0x0f 0xeb - invalid */
8295
8296/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8297FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8298/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8299FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8300/* Opcode 0xf3 0x0f 0xec - invalid */
8301/* Opcode 0xf2 0x0f 0xec - invalid */
8302
8303/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8304FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8305/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8306FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8307/* Opcode 0xf3 0x0f 0xed - invalid */
8308/* Opcode 0xf2 0x0f 0xed - invalid */
8309
8310/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8311FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8312/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8313FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8314/* Opcode 0xf3 0x0f 0xee - invalid */
8315/* Opcode 0xf2 0x0f 0xee - invalid */
8316
8317
8318/** Opcode 0x0f 0xef - pxor Pq, Qq */
8319FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8320{
8321 IEMOP_MNEMONIC(pxor, "pxor");
8322 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8323}
8324
8325/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8326FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8327{
8328 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8329 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8330}
8331
8332/* Opcode 0xf3 0x0f 0xef - invalid */
8333/* Opcode 0xf2 0x0f 0xef - invalid */
8334
8335/* Opcode 0x0f 0xf0 - invalid */
8336/* Opcode 0x66 0x0f 0xf0 - invalid */
8337/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8338FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8339
8340/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8341FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8342/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8343FNIEMOP_STUB(iemOp_psllw_Vx_W);
8344/* Opcode 0xf2 0x0f 0xf1 - invalid */
8345
8346/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8347FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8348/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8349FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8350/* Opcode 0xf2 0x0f 0xf2 - invalid */
8351
8352/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8353FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8354/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8355FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8356/* Opcode 0xf2 0x0f 0xf3 - invalid */
8357
8358/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8359FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8360/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8361FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8362/* Opcode 0xf2 0x0f 0xf4 - invalid */
8363
8364/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8365FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8366/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8367FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8368/* Opcode 0xf2 0x0f 0xf5 - invalid */
8369
8370/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8371FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8372/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8373FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8374/* Opcode 0xf2 0x0f 0xf6 - invalid */
8375
8376/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8377FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8378/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8379FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8380/* Opcode 0xf2 0x0f 0xf7 - invalid */
8381
8382/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8383FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8384/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8385FNIEMOP_STUB(iemOp_psubb_Vx_W);
8386/* Opcode 0xf2 0x0f 0xf8 - invalid */
8387
8388/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8389FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8390/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8391FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8392/* Opcode 0xf2 0x0f 0xf9 - invalid */
8393
8394/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8395FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8396/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
8397FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
8398/* Opcode 0xf2 0x0f 0xfa - invalid */
8399
8400/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8401FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8402/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
8403FNIEMOP_STUB(iemOp_psubq_Vx_W);
8404/* Opcode 0xf2 0x0f 0xfb - invalid */
8405
8406/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8407FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8408/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
8409FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
8410/* Opcode 0xf2 0x0f 0xfc - invalid */
8411
8412/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8413FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8414/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
8415FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
8416/* Opcode 0xf2 0x0f 0xfd - invalid */
8417
8418/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8419FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8420/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
8421FNIEMOP_STUB(iemOp_paddd_Vx_W);
8422/* Opcode 0xf2 0x0f 0xfe - invalid */
8423
8424
8425/** Opcode **** 0x0f 0xff - UD0 */
8426FNIEMOP_DEF(iemOp_ud0)
8427{
8428 IEMOP_MNEMONIC(ud0, "ud0");
8429 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8430 {
8431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8432#ifndef TST_IEM_CHECK_MC
8433 RTGCPTR GCPtrEff;
8434 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8435 if (rcStrict != VINF_SUCCESS)
8436 return rcStrict;
8437#endif
8438 IEMOP_HLP_DONE_DECODING();
8439 }
8440 return IEMOP_RAISE_INVALID_OPCODE();
8441}
8442
8443
8444
8445/**
8446 * Two byte opcode map, first byte 0x0f.
8447 *
8448 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
8449 * check if it needs updating as well when making changes.
8450 */
8451IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8452{
8453 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8454 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8455 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8456 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8457 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8458 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8459 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8460 /* 0x06 */ IEMOP_X4(iemOp_clts),
8461 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8462 /* 0x08 */ IEMOP_X4(iemOp_invd),
8463 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8464 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8465 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8466 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8467 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8468 /* 0x0e */ IEMOP_X4(iemOp_femms),
8469 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8470
8471 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
8472 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
8473 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
8474 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8475 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8476 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8477 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM,
8478 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8479 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8480 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8481 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8482 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8483 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8484 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8485 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8486 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8487
8488 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8489 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8490 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8491 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8492 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8493 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8494 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8495 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8496 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8497 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8498 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
8499 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8500 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
8501 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
8502 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8503 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8504
8505 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8506 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8507 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8508 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8509 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8510 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8511 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8512 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8513 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
8514 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8515 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
8516 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8517 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8518 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8519 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8520 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8521
8522 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8523 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8524 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8525 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8526 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8527 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8528 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8529 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8530 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8531 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8532 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8533 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8534 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8535 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8536 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8537 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8538
8539 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8540 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
8541 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
8542 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
8543 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8544 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8545 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8546 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8547 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
8548 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
8549 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
8550 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8551 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
8552 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
8553 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
8554 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
8555
8556 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8557 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8558 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8559 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8560 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8561 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8562 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8563 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8564 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8565 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8566 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8567 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8568 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8569 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8570 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8571 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
8572
8573 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
8574 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8575 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8576 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8577 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8578 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8579 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8580 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8581
8582 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8583 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8584 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8585 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8586 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
8587 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
8588 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
8589 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
8590
8591 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8592 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8593 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8594 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8595 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8596 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8597 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8598 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8599 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8600 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8601 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8602 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8603 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8604 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8605 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8606 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8607
8608 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8609 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8610 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8611 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8612 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8613 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8614 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8615 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8616 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8617 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8618 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8619 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8620 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8621 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8622 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8623 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8624
8625 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8626 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8627 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8628 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8629 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8630 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8631 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8632 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8633 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8634 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8635 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8636 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8637 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8638 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8639 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8640 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8641
8642 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8643 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8644 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8645 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8646 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8647 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8648 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8649 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8650 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8651 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8652 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8653 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8654 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8655 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8656 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8657 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8658
8659 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8660 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8661 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
8662 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8663 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8664 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8665 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8666 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8667 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8668 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8669 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8670 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8671 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8672 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8673 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8674 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8675
8676 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
8677 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8678 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8679 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8680 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8681 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8682 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8683 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8684 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8685 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8686 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8687 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8688 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8689 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8690 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8691 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8692
8693 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8694 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8695 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8696 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8697 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8698 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8699 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
8700 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8701 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8702 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8703 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8704 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8705 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8706 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8707 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8708 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8709
8710 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
8711 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8712 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8713 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8714 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8715 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8716 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8717 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8718 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8719 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8720 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8721 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8722 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8723 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8724 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8725 /* 0xff */ IEMOP_X4(iemOp_ud0),
8726};
8727AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8728
8729/** @} */
8730
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette