VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 103194

最後變更 在這個檔案從103194是 103191,由 vboxsync 提交於 14 月 前

VMM/IEMAllInst*: Liveness analysis, part 4: Flag input & modification annotations. bugref:10372

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 528.8 KB
 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 103191 2024-02-04 23:41:47Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.alldomusa.eu.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1203 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1204 }
1205
1206 /* Ignore operand size here, memory refs are always 16-bit. */
1207 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1229 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1230 }
1231
1232 /* Ignore operand size here, memory refs are always 16-bit. */
1233 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1253 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1314 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1341}
1342
1343
1344/** Opcode 0x0f 0x00 /5. */
1345FNIEMOPRM_DEF(iemOp_Grp6_verw)
1346{
1347 IEMOP_MNEMONIC(verw, "verw Ew");
1348 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1349}
1350
1351
1352/**
1353 * Group 6 jump table.
1354 */
1355IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1356{
1357 iemOp_Grp6_sldt,
1358 iemOp_Grp6_str,
1359 iemOp_Grp6_lldt,
1360 iemOp_Grp6_ltr,
1361 iemOp_Grp6_verr,
1362 iemOp_Grp6_verw,
1363 iemOp_InvalidWithRM,
1364 iemOp_InvalidWithRM
1365};
1366
1367/** Opcode 0x0f 0x00. */
1368FNIEMOP_DEF(iemOp_Grp6)
1369{
1370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1371 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1372}
1373
1374
1375/** Opcode 0x0f 0x01 /0. */
1376FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1377{
1378 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1379 IEMOP_HLP_MIN_286();
1380 IEMOP_HLP_64BIT_OP_SIZE();
1381 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1382 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1385 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1386 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1387 IEM_MC_END();
1388}
1389
1390
1391/** Opcode 0x0f 0x01 /0. */
1392FNIEMOP_DEF(iemOp_Grp7_vmcall)
1393{
1394 IEMOP_MNEMONIC(vmcall, "vmcall");
1395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1396
1397 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1398 want all hypercalls regardless of instruction used, and if a
1399 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1400 (NEM/win makes ASSUMPTIONS about this behavior.) */
1401 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1402}
1403
1404
1405/** Opcode 0x0f 0x01 /0. */
1406#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1407FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1408{
1409 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1410 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1411 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1412 IEMOP_HLP_DONE_DECODING();
1413 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1414 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1415 iemCImpl_vmlaunch);
1416}
1417#else
1418FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1419{
1420 IEMOP_BITCH_ABOUT_STUB();
1421 IEMOP_RAISE_INVALID_OPCODE_RET();
1422}
1423#endif
1424
1425
1426/** Opcode 0x0f 0x01 /0. */
1427#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1428FNIEMOP_DEF(iemOp_Grp7_vmresume)
1429{
1430 IEMOP_MNEMONIC(vmresume, "vmresume");
1431 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1432 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1433 IEMOP_HLP_DONE_DECODING();
1434 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1435 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1436 iemCImpl_vmresume);
1437}
1438#else
1439FNIEMOP_DEF(iemOp_Grp7_vmresume)
1440{
1441 IEMOP_BITCH_ABOUT_STUB();
1442 IEMOP_RAISE_INVALID_OPCODE_RET();
1443}
1444#endif
1445
1446
1447/** Opcode 0x0f 0x01 /0. */
1448#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1449FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1450{
1451 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1452 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1453 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1454 IEMOP_HLP_DONE_DECODING();
1455 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1456}
1457#else
1458FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1459{
1460 IEMOP_BITCH_ABOUT_STUB();
1461 IEMOP_RAISE_INVALID_OPCODE_RET();
1462}
1463#endif
1464
1465
1466/** Opcode 0x0f 0x01 /1. */
1467FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1468{
1469 IEMOP_MNEMONIC(sidt, "sidt Ms");
1470 IEMOP_HLP_MIN_286();
1471 IEMOP_HLP_64BIT_OP_SIZE();
1472 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1473 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1476 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1477 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1478 IEM_MC_END();
1479}
1480
1481
1482/** Opcode 0x0f 0x01 /1. */
1483FNIEMOP_DEF(iemOp_Grp7_monitor)
1484{
1485 IEMOP_MNEMONIC(monitor, "monitor");
1486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1487 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1488}
1489
1490
1491/** Opcode 0x0f 0x01 /1. */
1492FNIEMOP_DEF(iemOp_Grp7_mwait)
1493{
1494 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1496 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1497}
1498
1499
1500/** Opcode 0x0f 0x01 /2. */
1501FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1502{
1503 IEMOP_MNEMONIC(lgdt, "lgdt");
1504 IEMOP_HLP_64BIT_OP_SIZE();
1505 IEM_MC_BEGIN(3, 1, 0, 0);
1506 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1509 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1510 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1511 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1512 IEM_MC_END();
1513}
1514
1515
1516/** Opcode 0x0f 0x01 0xd0. */
1517FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1518{
1519 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1520 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1521 {
1522 /** @todo r=ramshankar: We should use
1523 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1524 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1525/** @todo testcase: test prefixes and exceptions. currently not checking for the
1526 * OPSIZE one ... */
1527 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1528 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1529 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1530 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1531 iemCImpl_xgetbv);
1532 }
1533 IEMOP_RAISE_INVALID_OPCODE_RET();
1534}
1535
1536
1537/** Opcode 0x0f 0x01 0xd1. */
1538FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1539{
1540 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1541 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1542 {
1543 /** @todo r=ramshankar: We should use
1544 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1545 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1546/** @todo testcase: test prefixes and exceptions. currently not checking for the
1547 * OPSIZE one ... */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1550 }
1551 IEMOP_RAISE_INVALID_OPCODE_RET();
1552}
1553
1554
1555/** Opcode 0x0f 0x01 /3. */
1556FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1557{
1558 IEMOP_MNEMONIC(lidt, "lidt");
1559 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1560 IEM_MC_BEGIN(3, 1, 0, 0);
1561 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1564 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1565 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1566 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1567 IEM_MC_END();
1568}
1569
1570
1571/** Opcode 0x0f 0x01 0xd8. */
1572#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1573FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1574{
1575 IEMOP_MNEMONIC(vmrun, "vmrun");
1576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1577 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1578 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1579 iemCImpl_vmrun);
1580}
1581#else
1582FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1583#endif
1584
1585/** Opcode 0x0f 0x01 0xd9. */
1586FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1587{
1588 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1589 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1590 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1591 * here cannot be right... */
1592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1593
1594 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1595 want all hypercalls regardless of instruction used, and if a
1596 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1597 (NEM/win makes ASSUMPTIONS about this behavior.) */
1598 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1599}
1600
1601/** Opcode 0x0f 0x01 0xda. */
1602#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1603FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1604{
1605 IEMOP_MNEMONIC(vmload, "vmload");
1606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1607 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1608}
1609#else
1610FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1611#endif
1612
1613
1614/** Opcode 0x0f 0x01 0xdb. */
1615#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1616FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1617{
1618 IEMOP_MNEMONIC(vmsave, "vmsave");
1619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1620 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1621}
1622#else
1623FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1624#endif
1625
1626
1627/** Opcode 0x0f 0x01 0xdc. */
1628#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1629FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1630{
1631 IEMOP_MNEMONIC(stgi, "stgi");
1632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1633 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1634}
1635#else
1636FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1637#endif
1638
1639
1640/** Opcode 0x0f 0x01 0xdd. */
1641#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1642FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1643{
1644 IEMOP_MNEMONIC(clgi, "clgi");
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1647}
1648#else
1649FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1650#endif
1651
1652
1653/** Opcode 0x0f 0x01 0xdf. */
1654#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1655FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1656{
1657 IEMOP_MNEMONIC(invlpga, "invlpga");
1658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1659 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1660}
1661#else
1662FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1663#endif
1664
1665
1666/** Opcode 0x0f 0x01 0xde. */
1667#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1668FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1669{
1670 IEMOP_MNEMONIC(skinit, "skinit");
1671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1672 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1673}
1674#else
1675FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1676#endif
1677
1678
1679/** Opcode 0x0f 0x01 /4. */
1680FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1681{
1682 IEMOP_MNEMONIC(smsw, "smsw");
1683 IEMOP_HLP_MIN_286();
1684 if (IEM_IS_MODRM_REG_MODE(bRm))
1685 {
1686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1687 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1688 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1689 }
1690
1691 /* Ignore operand size here, memory refs are always 16-bit. */
1692 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1693 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1696 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1697 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1698 IEM_MC_END();
1699}
1700
1701
1702/** Opcode 0x0f 0x01 /6. */
1703FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1704{
1705 /* The operand size is effectively ignored, all is 16-bit and only the
1706 lower 3-bits are used. */
1707 IEMOP_MNEMONIC(lmsw, "lmsw");
1708 IEMOP_HLP_MIN_286();
1709 if (IEM_IS_MODRM_REG_MODE(bRm))
1710 {
1711 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1713 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1714 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1715 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1716 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1717 IEM_MC_END();
1718 }
1719 else
1720 {
1721 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1722 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1723 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1726 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1727 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1728 IEM_MC_END();
1729 }
1730}
1731
1732
1733/** Opcode 0x0f 0x01 /7. */
1734FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1735{
1736 IEMOP_MNEMONIC(invlpg, "invlpg");
1737 IEMOP_HLP_MIN_486();
1738 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
1739 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1742 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1743 IEM_MC_END();
1744}
1745
1746
1747/** Opcode 0x0f 0x01 0xf8. */
1748FNIEMOP_DEF(iemOp_Grp7_swapgs)
1749{
1750 IEMOP_MNEMONIC(swapgs, "swapgs");
1751 IEMOP_HLP_ONLY_64BIT();
1752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1753 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1754}
1755
1756
1757/** Opcode 0x0f 0x01 0xf9. */
1758FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1759{
1760 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1762 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1763 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1764 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1765 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1766 iemCImpl_rdtscp);
1767}
1768
1769
1770/**
1771 * Group 7 jump table, memory variant.
1772 */
1773IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1774{
1775 iemOp_Grp7_sgdt,
1776 iemOp_Grp7_sidt,
1777 iemOp_Grp7_lgdt,
1778 iemOp_Grp7_lidt,
1779 iemOp_Grp7_smsw,
1780 iemOp_InvalidWithRM,
1781 iemOp_Grp7_lmsw,
1782 iemOp_Grp7_invlpg
1783};
1784
1785
1786/** Opcode 0x0f 0x01. */
1787FNIEMOP_DEF(iemOp_Grp7)
1788{
1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1790 if (IEM_IS_MODRM_MEM_MODE(bRm))
1791 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1792
1793 switch (IEM_GET_MODRM_REG_8(bRm))
1794 {
1795 case 0:
1796 switch (IEM_GET_MODRM_RM_8(bRm))
1797 {
1798 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1799 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1800 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1801 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1802 }
1803 IEMOP_RAISE_INVALID_OPCODE_RET();
1804
1805 case 1:
1806 switch (IEM_GET_MODRM_RM_8(bRm))
1807 {
1808 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1809 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1810 }
1811 IEMOP_RAISE_INVALID_OPCODE_RET();
1812
1813 case 2:
1814 switch (IEM_GET_MODRM_RM_8(bRm))
1815 {
1816 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1817 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1818 }
1819 IEMOP_RAISE_INVALID_OPCODE_RET();
1820
1821 case 3:
1822 switch (IEM_GET_MODRM_RM_8(bRm))
1823 {
1824 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1825 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1826 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1827 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1828 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1829 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1830 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1831 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1833 }
1834
1835 case 4:
1836 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1837
1838 case 5:
1839 IEMOP_RAISE_INVALID_OPCODE_RET();
1840
1841 case 6:
1842 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1843
1844 case 7:
1845 switch (IEM_GET_MODRM_RM_8(bRm))
1846 {
1847 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1848 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1849 }
1850 IEMOP_RAISE_INVALID_OPCODE_RET();
1851
1852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1853 }
1854}
1855
1856FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1857{
1858 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1860
1861 if (IEM_IS_MODRM_REG_MODE(bRm))
1862 {
1863 switch (pVCpu->iem.s.enmEffOpSize)
1864 {
1865 case IEMMODE_16BIT:
1866 IEM_MC_BEGIN(3, 0, 0, 0);
1867 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1868 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1869 IEM_MC_ARG(uint16_t, u16Sel, 1);
1870 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1871
1872 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1873 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1874 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1875 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1876
1877 IEM_MC_END();
1878 break;
1879
1880 case IEMMODE_32BIT:
1881 case IEMMODE_64BIT:
1882 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
1883 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1884 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1885 IEM_MC_ARG(uint16_t, u16Sel, 1);
1886 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1887
1888 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1889 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1890 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1891 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1892
1893 IEM_MC_END();
1894 break;
1895
1896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1897 }
1898 }
1899 else
1900 {
1901 switch (pVCpu->iem.s.enmEffOpSize)
1902 {
1903 case IEMMODE_16BIT:
1904 IEM_MC_BEGIN(3, 1, 0, 0);
1905 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1906 IEM_MC_ARG(uint16_t, u16Sel, 1);
1907 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1909
1910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1911 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1912
1913 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1914 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1915 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1916 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1917
1918 IEM_MC_END();
1919 break;
1920
1921 case IEMMODE_32BIT:
1922 case IEMMODE_64BIT:
1923 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
1924 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1925 IEM_MC_ARG(uint16_t, u16Sel, 1);
1926 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1928
1929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1930 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1931/** @todo testcase: make sure it's a 16-bit read. */
1932
1933 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1934 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1935 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1936 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1937
1938 IEM_MC_END();
1939 break;
1940
1941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1942 }
1943 }
1944}
1945
1946
1947
1948/**
1949 * @opcode 0x02
1950 * @opflmodify zf
1951 */
1952FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1953{
1954 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1955 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1956}
1957
1958
1959/**
1960 * @opcode 0x03
1961 * @opflmodify zf
1962 */
1963FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1964{
1965 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1966 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1967}
1968
1969
1970/** Opcode 0x0f 0x05. */
1971FNIEMOP_DEF(iemOp_syscall)
1972{
1973 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1975 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1976 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1977 iemCImpl_syscall);
1978}
1979
1980
1981/** Opcode 0x0f 0x06. */
1982FNIEMOP_DEF(iemOp_clts)
1983{
1984 IEMOP_MNEMONIC(clts, "clts");
1985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1986 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clts);
1987}
1988
1989
1990/** Opcode 0x0f 0x07. */
1991FNIEMOP_DEF(iemOp_sysret)
1992{
1993 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1995 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1996 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1997 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1998}
1999
2000
2001/** Opcode 0x0f 0x08. */
2002FNIEMOP_DEF(iemOp_invd)
2003{
2004 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
2005 IEMOP_HLP_MIN_486();
2006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2007 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
2008}
2009
2010
2011/** Opcode 0x0f 0x09. */
2012FNIEMOP_DEF(iemOp_wbinvd)
2013{
2014 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2015 IEMOP_HLP_MIN_486();
2016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2017 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
2018}
2019
2020
2021/** Opcode 0x0f 0x0b. */
2022FNIEMOP_DEF(iemOp_ud2)
2023{
2024 IEMOP_MNEMONIC(ud2, "ud2");
2025 IEMOP_RAISE_INVALID_OPCODE_RET();
2026}
2027
2028/** Opcode 0x0f 0x0d. */
2029FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2030{
2031 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2032 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2033 {
2034 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2035 IEMOP_RAISE_INVALID_OPCODE_RET();
2036 }
2037
2038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2039 if (IEM_IS_MODRM_REG_MODE(bRm))
2040 {
2041 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2042 IEMOP_RAISE_INVALID_OPCODE_RET();
2043 }
2044
2045 switch (IEM_GET_MODRM_REG_8(bRm))
2046 {
2047 case 2: /* Aliased to /0 for the time being. */
2048 case 4: /* Aliased to /0 for the time being. */
2049 case 5: /* Aliased to /0 for the time being. */
2050 case 6: /* Aliased to /0 for the time being. */
2051 case 7: /* Aliased to /0 for the time being. */
2052 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2053 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2054 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2056 }
2057
2058 IEM_MC_BEGIN(0, 1, 0, 0);
2059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2062 /* Currently a NOP. */
2063 IEM_MC_NOREF(GCPtrEffSrc);
2064 IEM_MC_ADVANCE_RIP_AND_FINISH();
2065 IEM_MC_END();
2066}
2067
2068
2069/** Opcode 0x0f 0x0e. */
2070FNIEMOP_DEF(iemOp_femms)
2071{
2072 IEMOP_MNEMONIC(femms, "femms");
2073
2074 IEM_MC_BEGIN(0, 0, 0, 0);
2075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2076 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2077 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2078 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2079 IEM_MC_FPU_FROM_MMX_MODE();
2080 IEM_MC_ADVANCE_RIP_AND_FINISH();
2081 IEM_MC_END();
2082}
2083
2084
2085/** Opcode 0x0f 0x0f. */
2086FNIEMOP_DEF(iemOp_3Dnow)
2087{
2088 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2089 {
2090 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2091 IEMOP_RAISE_INVALID_OPCODE_RET();
2092 }
2093
2094#ifdef IEM_WITH_3DNOW
2095 /* This is pretty sparse, use switch instead of table. */
2096 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2097 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2098#else
2099 IEMOP_BITCH_ABOUT_STUB();
2100 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2101#endif
2102}
2103
2104
2105/**
2106 * @opcode 0x10
2107 * @oppfx none
2108 * @opcpuid sse
2109 * @opgroup og_sse_simdfp_datamove
2110 * @opxcpttype 4UA
2111 * @optest op1=1 op2=2 -> op1=2
2112 * @optest op1=0 op2=-22 -> op1=-22
2113 */
2114FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2115{
2116 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2118 if (IEM_IS_MODRM_REG_MODE(bRm))
2119 {
2120 /*
2121 * XMM128, XMM128.
2122 */
2123 IEM_MC_BEGIN(0, 0, 0, 0);
2124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2125 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2127 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2128 IEM_GET_MODRM_RM(pVCpu, bRm));
2129 IEM_MC_ADVANCE_RIP_AND_FINISH();
2130 IEM_MC_END();
2131 }
2132 else
2133 {
2134 /*
2135 * XMM128, [mem128].
2136 */
2137 IEM_MC_BEGIN(0, 2, 0, 0);
2138 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2140
2141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2143 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2144 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2145
2146 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2147 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2148
2149 IEM_MC_ADVANCE_RIP_AND_FINISH();
2150 IEM_MC_END();
2151 }
2152
2153}
2154
2155
2156/**
2157 * @opcode 0x10
2158 * @oppfx 0x66
2159 * @opcpuid sse2
2160 * @opgroup og_sse2_pcksclr_datamove
2161 * @opxcpttype 4UA
2162 * @optest op1=1 op2=2 -> op1=2
2163 * @optest op1=0 op2=-42 -> op1=-42
2164 */
2165FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2166{
2167 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2169 if (IEM_IS_MODRM_REG_MODE(bRm))
2170 {
2171 /*
2172 * XMM128, XMM128.
2173 */
2174 IEM_MC_BEGIN(0, 0, 0, 0);
2175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2176 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2177 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2178 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2179 IEM_GET_MODRM_RM(pVCpu, bRm));
2180 IEM_MC_ADVANCE_RIP_AND_FINISH();
2181 IEM_MC_END();
2182 }
2183 else
2184 {
2185 /*
2186 * XMM128, [mem128].
2187 */
2188 IEM_MC_BEGIN(0, 2, 0, 0);
2189 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2191
2192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2194 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2195 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2196
2197 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2198 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2199
2200 IEM_MC_ADVANCE_RIP_AND_FINISH();
2201 IEM_MC_END();
2202 }
2203}
2204
2205
2206/**
2207 * @opcode 0x10
2208 * @oppfx 0xf3
2209 * @opcpuid sse
2210 * @opgroup og_sse_simdfp_datamove
2211 * @opxcpttype 5
2212 * @optest op1=1 op2=2 -> op1=2
2213 * @optest op1=0 op2=-22 -> op1=-22
2214 */
2215FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2216{
2217 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2219 if (IEM_IS_MODRM_REG_MODE(bRm))
2220 {
2221 /*
2222 * XMM32, XMM32.
2223 */
2224 IEM_MC_BEGIN(0, 1, 0, 0);
2225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2226 IEM_MC_LOCAL(uint32_t, uSrc);
2227
2228 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2229 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2230 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2231 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2232
2233 IEM_MC_ADVANCE_RIP_AND_FINISH();
2234 IEM_MC_END();
2235 }
2236 else
2237 {
2238 /*
2239 * XMM128, [mem32].
2240 */
2241 IEM_MC_BEGIN(0, 2, 0, 0);
2242 IEM_MC_LOCAL(uint32_t, uSrc);
2243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2244
2245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2247 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2248 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2249
2250 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2251 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2252
2253 IEM_MC_ADVANCE_RIP_AND_FINISH();
2254 IEM_MC_END();
2255 }
2256}
2257
2258
2259/**
2260 * @opcode 0x10
2261 * @oppfx 0xf2
2262 * @opcpuid sse2
2263 * @opgroup og_sse2_pcksclr_datamove
2264 * @opxcpttype 5
2265 * @optest op1=1 op2=2 -> op1=2
2266 * @optest op1=0 op2=-42 -> op1=-42
2267 */
2268FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2269{
2270 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2272 if (IEM_IS_MODRM_REG_MODE(bRm))
2273 {
2274 /*
2275 * XMM64, XMM64.
2276 */
2277 IEM_MC_BEGIN(0, 1, 0, 0);
2278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2279 IEM_MC_LOCAL(uint64_t, uSrc);
2280
2281 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2282 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2283 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2284 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2285
2286 IEM_MC_ADVANCE_RIP_AND_FINISH();
2287 IEM_MC_END();
2288 }
2289 else
2290 {
2291 /*
2292 * XMM128, [mem64].
2293 */
2294 IEM_MC_BEGIN(0, 2, 0, 0);
2295 IEM_MC_LOCAL(uint64_t, uSrc);
2296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2297
2298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2300 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2301 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2302
2303 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2304 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2305
2306 IEM_MC_ADVANCE_RIP_AND_FINISH();
2307 IEM_MC_END();
2308 }
2309}
2310
2311
2312/**
2313 * @opcode 0x11
2314 * @oppfx none
2315 * @opcpuid sse
2316 * @opgroup og_sse_simdfp_datamove
2317 * @opxcpttype 4UA
2318 * @optest op1=1 op2=2 -> op1=2
2319 * @optest op1=0 op2=-42 -> op1=-42
2320 */
2321FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2322{
2323 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2325 if (IEM_IS_MODRM_REG_MODE(bRm))
2326 {
2327 /*
2328 * XMM128, XMM128.
2329 */
2330 IEM_MC_BEGIN(0, 0, 0, 0);
2331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2332 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2333 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2334 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2335 IEM_GET_MODRM_REG(pVCpu, bRm));
2336 IEM_MC_ADVANCE_RIP_AND_FINISH();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 /*
2342 * [mem128], XMM128.
2343 */
2344 IEM_MC_BEGIN(0, 2, 0, 0);
2345 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2347
2348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2350 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2351 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2352
2353 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2354 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2355
2356 IEM_MC_ADVANCE_RIP_AND_FINISH();
2357 IEM_MC_END();
2358 }
2359}
2360
2361
2362/**
2363 * @opcode 0x11
2364 * @oppfx 0x66
2365 * @opcpuid sse2
2366 * @opgroup og_sse2_pcksclr_datamove
2367 * @opxcpttype 4UA
2368 * @optest op1=1 op2=2 -> op1=2
2369 * @optest op1=0 op2=-42 -> op1=-42
2370 */
2371FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2372{
2373 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2375 if (IEM_IS_MODRM_REG_MODE(bRm))
2376 {
2377 /*
2378 * XMM128, XMM128.
2379 */
2380 IEM_MC_BEGIN(0, 0, 0, 0);
2381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2382 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2383 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2384 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2385 IEM_GET_MODRM_REG(pVCpu, bRm));
2386 IEM_MC_ADVANCE_RIP_AND_FINISH();
2387 IEM_MC_END();
2388 }
2389 else
2390 {
2391 /*
2392 * [mem128], XMM128.
2393 */
2394 IEM_MC_BEGIN(0, 2, 0, 0);
2395 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2397
2398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2400 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2402
2403 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2404 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2405
2406 IEM_MC_ADVANCE_RIP_AND_FINISH();
2407 IEM_MC_END();
2408 }
2409}
2410
2411
2412/**
2413 * @opcode 0x11
2414 * @oppfx 0xf3
2415 * @opcpuid sse
2416 * @opgroup og_sse_simdfp_datamove
2417 * @opxcpttype 5
2418 * @optest op1=1 op2=2 -> op1=2
2419 * @optest op1=0 op2=-22 -> op1=-22
2420 */
2421FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2422{
2423 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2425 if (IEM_IS_MODRM_REG_MODE(bRm))
2426 {
2427 /*
2428 * XMM32, XMM32.
2429 */
2430 IEM_MC_BEGIN(0, 1, 0, 0);
2431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2432 IEM_MC_LOCAL(uint32_t, uSrc);
2433
2434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2436 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2437 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2438
2439 IEM_MC_ADVANCE_RIP_AND_FINISH();
2440 IEM_MC_END();
2441 }
2442 else
2443 {
2444 /*
2445 * [mem32], XMM32.
2446 */
2447 IEM_MC_BEGIN(0, 2, 0, 0);
2448 IEM_MC_LOCAL(uint32_t, uSrc);
2449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2450
2451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2453 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2455
2456 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2457 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2458
2459 IEM_MC_ADVANCE_RIP_AND_FINISH();
2460 IEM_MC_END();
2461 }
2462}
2463
2464
2465/**
2466 * @opcode 0x11
2467 * @oppfx 0xf2
2468 * @opcpuid sse2
2469 * @opgroup og_sse2_pcksclr_datamove
2470 * @opxcpttype 5
2471 * @optest op1=1 op2=2 -> op1=2
2472 * @optest op1=0 op2=-42 -> op1=-42
2473 */
2474FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2475{
2476 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2478 if (IEM_IS_MODRM_REG_MODE(bRm))
2479 {
2480 /*
2481 * XMM64, XMM64.
2482 */
2483 IEM_MC_BEGIN(0, 1, 0, 0);
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2485 IEM_MC_LOCAL(uint64_t, uSrc);
2486
2487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2489 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2490 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2491
2492 IEM_MC_ADVANCE_RIP_AND_FINISH();
2493 IEM_MC_END();
2494 }
2495 else
2496 {
2497 /*
2498 * [mem64], XMM64.
2499 */
2500 IEM_MC_BEGIN(0, 2, 0, 0);
2501 IEM_MC_LOCAL(uint64_t, uSrc);
2502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2503
2504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2508
2509 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2510 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2511
2512 IEM_MC_ADVANCE_RIP_AND_FINISH();
2513 IEM_MC_END();
2514 }
2515}
2516
2517
2518FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2519{
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if (IEM_IS_MODRM_REG_MODE(bRm))
2522 {
2523 /**
2524 * @opcode 0x12
2525 * @opcodesub 11 mr/reg
2526 * @oppfx none
2527 * @opcpuid sse
2528 * @opgroup og_sse_simdfp_datamove
2529 * @opxcpttype 5
2530 * @optest op1=1 op2=2 -> op1=2
2531 * @optest op1=0 op2=-42 -> op1=-42
2532 */
2533 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2534
2535 IEM_MC_BEGIN(0, 1, 0, 0);
2536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2537 IEM_MC_LOCAL(uint64_t, uSrc);
2538
2539 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2540 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2541 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2542 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2543
2544 IEM_MC_ADVANCE_RIP_AND_FINISH();
2545 IEM_MC_END();
2546 }
2547 else
2548 {
2549 /**
2550 * @opdone
2551 * @opcode 0x12
2552 * @opcodesub !11 mr/reg
2553 * @oppfx none
2554 * @opcpuid sse
2555 * @opgroup og_sse_simdfp_datamove
2556 * @opxcpttype 5
2557 * @optest op1=1 op2=2 -> op1=2
2558 * @optest op1=0 op2=-42 -> op1=-42
2559 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2560 */
2561 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2562
2563 IEM_MC_BEGIN(0, 2, 0, 0);
2564 IEM_MC_LOCAL(uint64_t, uSrc);
2565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2566
2567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2569 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2570 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2571
2572 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2573 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2574
2575 IEM_MC_ADVANCE_RIP_AND_FINISH();
2576 IEM_MC_END();
2577 }
2578}
2579
2580
2581/**
2582 * @opcode 0x12
2583 * @opcodesub !11 mr/reg
2584 * @oppfx 0x66
2585 * @opcpuid sse2
2586 * @opgroup og_sse2_pcksclr_datamove
2587 * @opxcpttype 5
2588 * @optest op1=1 op2=2 -> op1=2
2589 * @optest op1=0 op2=-42 -> op1=-42
2590 */
2591FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2592{
2593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2594 if (IEM_IS_MODRM_MEM_MODE(bRm))
2595 {
2596 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2597
2598 IEM_MC_BEGIN(0, 2, 0, 0);
2599 IEM_MC_LOCAL(uint64_t, uSrc);
2600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2601
2602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2604 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2605 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2606
2607 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2608 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2609
2610 IEM_MC_ADVANCE_RIP_AND_FINISH();
2611 IEM_MC_END();
2612 }
2613
2614 /**
2615 * @opdone
2616 * @opmnemonic ud660f12m3
2617 * @opcode 0x12
2618 * @opcodesub 11 mr/reg
2619 * @oppfx 0x66
2620 * @opunused immediate
2621 * @opcpuid sse
2622 * @optest ->
2623 */
2624 else
2625 IEMOP_RAISE_INVALID_OPCODE_RET();
2626}
2627
2628
2629/**
2630 * @opcode 0x12
2631 * @oppfx 0xf3
2632 * @opcpuid sse3
2633 * @opgroup og_sse3_pcksclr_datamove
2634 * @opxcpttype 4
2635 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2636 * op1=0x00000002000000020000000100000001
2637 */
2638FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2639{
2640 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2642 if (IEM_IS_MODRM_REG_MODE(bRm))
2643 {
2644 /*
2645 * XMM, XMM.
2646 */
2647 IEM_MC_BEGIN(0, 1, 0, 0);
2648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2649 IEM_MC_LOCAL(RTUINT128U, uSrc);
2650
2651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2652 IEM_MC_PREPARE_SSE_USAGE();
2653
2654 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2655 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2656 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2657 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2658 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2659
2660 IEM_MC_ADVANCE_RIP_AND_FINISH();
2661 IEM_MC_END();
2662 }
2663 else
2664 {
2665 /*
2666 * XMM, [mem128].
2667 */
2668 IEM_MC_BEGIN(0, 2, 0, 0);
2669 IEM_MC_LOCAL(RTUINT128U, uSrc);
2670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2671
2672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2674 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2675 IEM_MC_PREPARE_SSE_USAGE();
2676
2677 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2678 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2679 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2680 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2681 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2682
2683 IEM_MC_ADVANCE_RIP_AND_FINISH();
2684 IEM_MC_END();
2685 }
2686}
2687
2688
2689/**
2690 * @opcode 0x12
2691 * @oppfx 0xf2
2692 * @opcpuid sse3
2693 * @opgroup og_sse3_pcksclr_datamove
2694 * @opxcpttype 5
2695 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2696 * op1=0x22222222111111112222222211111111
2697 */
2698FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2699{
2700 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2702 if (IEM_IS_MODRM_REG_MODE(bRm))
2703 {
2704 /*
2705 * XMM128, XMM64.
2706 */
2707 IEM_MC_BEGIN(0, 1, 0, 0);
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2709 IEM_MC_LOCAL(uint64_t, uSrc);
2710
2711 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2712 IEM_MC_PREPARE_SSE_USAGE();
2713
2714 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2715 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2716 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2717
2718 IEM_MC_ADVANCE_RIP_AND_FINISH();
2719 IEM_MC_END();
2720 }
2721 else
2722 {
2723 /*
2724 * XMM128, [mem64].
2725 */
2726 IEM_MC_BEGIN(0, 2, 0, 0);
2727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2728 IEM_MC_LOCAL(uint64_t, uSrc);
2729
2730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2732 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2733 IEM_MC_PREPARE_SSE_USAGE();
2734
2735 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2736 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2737 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2738
2739 IEM_MC_ADVANCE_RIP_AND_FINISH();
2740 IEM_MC_END();
2741 }
2742}
2743
2744
2745/**
2746 * @opcode 0x13
2747 * @opcodesub !11 mr/reg
2748 * @oppfx none
2749 * @opcpuid sse
2750 * @opgroup og_sse_simdfp_datamove
2751 * @opxcpttype 5
2752 * @optest op1=1 op2=2 -> op1=2
2753 * @optest op1=0 op2=-42 -> op1=-42
2754 */
2755FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2756{
2757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2758 if (IEM_IS_MODRM_MEM_MODE(bRm))
2759 {
2760 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2761
2762 IEM_MC_BEGIN(0, 2, 0, 0);
2763 IEM_MC_LOCAL(uint64_t, uSrc);
2764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2765
2766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2768 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2769 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2770
2771 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2772 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2773
2774 IEM_MC_ADVANCE_RIP_AND_FINISH();
2775 IEM_MC_END();
2776 }
2777
2778 /**
2779 * @opdone
2780 * @opmnemonic ud0f13m3
2781 * @opcode 0x13
2782 * @opcodesub 11 mr/reg
2783 * @oppfx none
2784 * @opunused immediate
2785 * @opcpuid sse
2786 * @optest ->
2787 */
2788 else
2789 IEMOP_RAISE_INVALID_OPCODE_RET();
2790}
2791
2792
2793/**
2794 * @opcode 0x13
2795 * @opcodesub !11 mr/reg
2796 * @oppfx 0x66
2797 * @opcpuid sse2
2798 * @opgroup og_sse2_pcksclr_datamove
2799 * @opxcpttype 5
2800 * @optest op1=1 op2=2 -> op1=2
2801 * @optest op1=0 op2=-42 -> op1=-42
2802 */
2803FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2804{
2805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2806 if (IEM_IS_MODRM_MEM_MODE(bRm))
2807 {
2808 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2809
2810 IEM_MC_BEGIN(0, 2, 0, 0);
2811 IEM_MC_LOCAL(uint64_t, uSrc);
2812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2813
2814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2816 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2817 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2818
2819 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2820 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2821
2822 IEM_MC_ADVANCE_RIP_AND_FINISH();
2823 IEM_MC_END();
2824 }
2825
2826 /**
2827 * @opdone
2828 * @opmnemonic ud660f13m3
2829 * @opcode 0x13
2830 * @opcodesub 11 mr/reg
2831 * @oppfx 0x66
2832 * @opunused immediate
2833 * @opcpuid sse
2834 * @optest ->
2835 */
2836 else
2837 IEMOP_RAISE_INVALID_OPCODE_RET();
2838}
2839
2840
2841/**
2842 * @opmnemonic udf30f13
2843 * @opcode 0x13
2844 * @oppfx 0xf3
2845 * @opunused intel-modrm
2846 * @opcpuid sse
2847 * @optest ->
2848 * @opdone
2849 */
2850
2851/**
2852 * @opmnemonic udf20f13
2853 * @opcode 0x13
2854 * @oppfx 0xf2
2855 * @opunused intel-modrm
2856 * @opcpuid sse
2857 * @optest ->
2858 * @opdone
2859 */
2860
2861/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2862FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2863{
2864 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2865 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2866}
2867
2868
2869/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2870FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2871{
2872 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2873 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2874}
2875
2876
2877/**
2878 * @opdone
2879 * @opmnemonic udf30f14
2880 * @opcode 0x14
2881 * @oppfx 0xf3
2882 * @opunused intel-modrm
2883 * @opcpuid sse
2884 * @optest ->
2885 * @opdone
2886 */
2887
2888/**
2889 * @opmnemonic udf20f14
2890 * @opcode 0x14
2891 * @oppfx 0xf2
2892 * @opunused intel-modrm
2893 * @opcpuid sse
2894 * @optest ->
2895 * @opdone
2896 */
2897
2898/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2899FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2900{
2901 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2902 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2903}
2904
2905
2906/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2907FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2908{
2909 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2910 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2911}
2912
2913
2914/* Opcode 0xf3 0x0f 0x15 - invalid */
2915/* Opcode 0xf2 0x0f 0x15 - invalid */
2916
2917/**
2918 * @opdone
2919 * @opmnemonic udf30f15
2920 * @opcode 0x15
2921 * @oppfx 0xf3
2922 * @opunused intel-modrm
2923 * @opcpuid sse
2924 * @optest ->
2925 * @opdone
2926 */
2927
2928/**
2929 * @opmnemonic udf20f15
2930 * @opcode 0x15
2931 * @oppfx 0xf2
2932 * @opunused intel-modrm
2933 * @opcpuid sse
2934 * @optest ->
2935 * @opdone
2936 */
2937
2938FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2939{
2940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2941 if (IEM_IS_MODRM_REG_MODE(bRm))
2942 {
2943 /**
2944 * @opcode 0x16
2945 * @opcodesub 11 mr/reg
2946 * @oppfx none
2947 * @opcpuid sse
2948 * @opgroup og_sse_simdfp_datamove
2949 * @opxcpttype 5
2950 * @optest op1=1 op2=2 -> op1=2
2951 * @optest op1=0 op2=-42 -> op1=-42
2952 */
2953 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2954
2955 IEM_MC_BEGIN(0, 1, 0, 0);
2956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2957 IEM_MC_LOCAL(uint64_t, uSrc);
2958
2959 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2960 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2961 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2962 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2963
2964 IEM_MC_ADVANCE_RIP_AND_FINISH();
2965 IEM_MC_END();
2966 }
2967 else
2968 {
2969 /**
2970 * @opdone
2971 * @opcode 0x16
2972 * @opcodesub !11 mr/reg
2973 * @oppfx none
2974 * @opcpuid sse
2975 * @opgroup og_sse_simdfp_datamove
2976 * @opxcpttype 5
2977 * @optest op1=1 op2=2 -> op1=2
2978 * @optest op1=0 op2=-42 -> op1=-42
2979 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2980 */
2981 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2982
2983 IEM_MC_BEGIN(0, 2, 0, 0);
2984 IEM_MC_LOCAL(uint64_t, uSrc);
2985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2986
2987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2990 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2991
2992 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2993 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2994
2995 IEM_MC_ADVANCE_RIP_AND_FINISH();
2996 IEM_MC_END();
2997 }
2998}
2999
3000
3001/**
3002 * @opcode 0x16
3003 * @opcodesub !11 mr/reg
3004 * @oppfx 0x66
3005 * @opcpuid sse2
3006 * @opgroup og_sse2_pcksclr_datamove
3007 * @opxcpttype 5
3008 * @optest op1=1 op2=2 -> op1=2
3009 * @optest op1=0 op2=-42 -> op1=-42
3010 */
3011FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3012{
3013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3014 if (IEM_IS_MODRM_MEM_MODE(bRm))
3015 {
3016 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3017
3018 IEM_MC_BEGIN(0, 2, 0, 0);
3019 IEM_MC_LOCAL(uint64_t, uSrc);
3020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3021
3022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3024 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3025 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3026
3027 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3028 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3029
3030 IEM_MC_ADVANCE_RIP_AND_FINISH();
3031 IEM_MC_END();
3032 }
3033
3034 /**
3035 * @opdone
3036 * @opmnemonic ud660f16m3
3037 * @opcode 0x16
3038 * @opcodesub 11 mr/reg
3039 * @oppfx 0x66
3040 * @opunused immediate
3041 * @opcpuid sse
3042 * @optest ->
3043 */
3044 else
3045 IEMOP_RAISE_INVALID_OPCODE_RET();
3046}
3047
3048
3049/**
3050 * @opcode 0x16
3051 * @oppfx 0xf3
3052 * @opcpuid sse3
3053 * @opgroup og_sse3_pcksclr_datamove
3054 * @opxcpttype 4
3055 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3056 * op1=0x00000002000000020000000100000001
3057 */
3058FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3059{
3060 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3062 if (IEM_IS_MODRM_REG_MODE(bRm))
3063 {
3064 /*
3065 * XMM128, XMM128.
3066 */
3067 IEM_MC_BEGIN(0, 1, 0, 0);
3068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3069 IEM_MC_LOCAL(RTUINT128U, uSrc);
3070
3071 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3072 IEM_MC_PREPARE_SSE_USAGE();
3073
3074 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3075 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3076 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3077 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3078 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3079
3080 IEM_MC_ADVANCE_RIP_AND_FINISH();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * XMM128, [mem128].
3087 */
3088 IEM_MC_BEGIN(0, 2, 0, 0);
3089 IEM_MC_LOCAL(RTUINT128U, uSrc);
3090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3091
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3094 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3095 IEM_MC_PREPARE_SSE_USAGE();
3096
3097 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3098 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3099 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3100 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3101 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3102
3103 IEM_MC_ADVANCE_RIP_AND_FINISH();
3104 IEM_MC_END();
3105 }
3106}
3107
3108/**
3109 * @opdone
3110 * @opmnemonic udf30f16
3111 * @opcode 0x16
3112 * @oppfx 0xf2
3113 * @opunused intel-modrm
3114 * @opcpuid sse
3115 * @optest ->
3116 * @opdone
3117 */
3118
3119
3120/**
3121 * @opcode 0x17
3122 * @opcodesub !11 mr/reg
3123 * @oppfx none
3124 * @opcpuid sse
3125 * @opgroup og_sse_simdfp_datamove
3126 * @opxcpttype 5
3127 * @optest op1=1 op2=2 -> op1=2
3128 * @optest op1=0 op2=-42 -> op1=-42
3129 */
3130FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3131{
3132 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3133 if (IEM_IS_MODRM_MEM_MODE(bRm))
3134 {
3135 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3136
3137 IEM_MC_BEGIN(0, 2, 0, 0);
3138 IEM_MC_LOCAL(uint64_t, uSrc);
3139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3140
3141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3143 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3144 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3145
3146 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3147 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3148
3149 IEM_MC_ADVANCE_RIP_AND_FINISH();
3150 IEM_MC_END();
3151 }
3152
3153 /**
3154 * @opdone
3155 * @opmnemonic ud0f17m3
3156 * @opcode 0x17
3157 * @opcodesub 11 mr/reg
3158 * @oppfx none
3159 * @opunused immediate
3160 * @opcpuid sse
3161 * @optest ->
3162 */
3163 else
3164 IEMOP_RAISE_INVALID_OPCODE_RET();
3165}
3166
3167
3168/**
3169 * @opcode 0x17
3170 * @opcodesub !11 mr/reg
3171 * @oppfx 0x66
3172 * @opcpuid sse2
3173 * @opgroup og_sse2_pcksclr_datamove
3174 * @opxcpttype 5
3175 * @optest op1=1 op2=2 -> op1=2
3176 * @optest op1=0 op2=-42 -> op1=-42
3177 */
3178FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3179{
3180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3181 if (IEM_IS_MODRM_MEM_MODE(bRm))
3182 {
3183 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3184
3185 IEM_MC_BEGIN(0, 2, 0, 0);
3186 IEM_MC_LOCAL(uint64_t, uSrc);
3187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3188
3189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3191 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3192 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3193
3194 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3195 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3196
3197 IEM_MC_ADVANCE_RIP_AND_FINISH();
3198 IEM_MC_END();
3199 }
3200
3201 /**
3202 * @opdone
3203 * @opmnemonic ud660f17m3
3204 * @opcode 0x17
3205 * @opcodesub 11 mr/reg
3206 * @oppfx 0x66
3207 * @opunused immediate
3208 * @opcpuid sse
3209 * @optest ->
3210 */
3211 else
3212 IEMOP_RAISE_INVALID_OPCODE_RET();
3213}
3214
3215
3216/**
3217 * @opdone
3218 * @opmnemonic udf30f17
3219 * @opcode 0x17
3220 * @oppfx 0xf3
3221 * @opunused intel-modrm
3222 * @opcpuid sse
3223 * @optest ->
3224 * @opdone
3225 */
3226
3227/**
3228 * @opmnemonic udf20f17
3229 * @opcode 0x17
3230 * @oppfx 0xf2
3231 * @opunused intel-modrm
3232 * @opcpuid sse
3233 * @optest ->
3234 * @opdone
3235 */
3236
3237
3238/** Opcode 0x0f 0x18. */
3239FNIEMOP_DEF(iemOp_prefetch_Grp16)
3240{
3241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3242 if (IEM_IS_MODRM_MEM_MODE(bRm))
3243 {
3244 switch (IEM_GET_MODRM_REG_8(bRm))
3245 {
3246 case 4: /* Aliased to /0 for the time being according to AMD. */
3247 case 5: /* Aliased to /0 for the time being according to AMD. */
3248 case 6: /* Aliased to /0 for the time being according to AMD. */
3249 case 7: /* Aliased to /0 for the time being according to AMD. */
3250 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3251 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3252 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3253 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3255 }
3256
3257 IEM_MC_BEGIN(0, 1, 0, 0);
3258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 /* Currently a NOP. */
3262 IEM_MC_NOREF(GCPtrEffSrc);
3263 IEM_MC_ADVANCE_RIP_AND_FINISH();
3264 IEM_MC_END();
3265 }
3266 else
3267 IEMOP_RAISE_INVALID_OPCODE_RET();
3268}
3269
3270
3271/** Opcode 0x0f 0x19..0x1f. */
3272FNIEMOP_DEF(iemOp_nop_Ev)
3273{
3274 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3276 if (IEM_IS_MODRM_REG_MODE(bRm))
3277 {
3278 IEM_MC_BEGIN(0, 0, 0, 0);
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 IEM_MC_ADVANCE_RIP_AND_FINISH();
3281 IEM_MC_END();
3282 }
3283 else
3284 {
3285 IEM_MC_BEGIN(0, 1, 0, 0);
3286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3289 /* Currently a NOP. */
3290 IEM_MC_NOREF(GCPtrEffSrc);
3291 IEM_MC_ADVANCE_RIP_AND_FINISH();
3292 IEM_MC_END();
3293 }
3294}
3295
3296
3297/** Opcode 0x0f 0x20. */
3298FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3299{
3300 /* mod is ignored, as is operand size overrides. */
3301/** @todo testcase: check memory encoding. */
3302 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3303 IEMOP_HLP_MIN_386();
3304 if (IEM_IS_64BIT_CODE(pVCpu))
3305 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3306 else
3307 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3308
3309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3310 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3311 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3312 {
3313 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3314 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3315 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3316 iCrReg |= 8;
3317 }
3318 switch (iCrReg)
3319 {
3320 case 0: case 2: case 3: case 4: case 8:
3321 break;
3322 default:
3323 IEMOP_RAISE_INVALID_OPCODE_RET();
3324 }
3325 IEMOP_HLP_DONE_DECODING();
3326
3327 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3328 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3329 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3330}
3331
3332
3333/** Opcode 0x0f 0x21. */
3334FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3335{
3336/** @todo testcase: check memory encoding. */
3337 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3338 IEMOP_HLP_MIN_386();
3339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3342 IEMOP_RAISE_INVALID_OPCODE_RET();
3343 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3344 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3345 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3346}
3347
3348
3349/** Opcode 0x0f 0x22. */
3350FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3351{
3352 /* mod is ignored, as is operand size overrides. */
3353 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3354 IEMOP_HLP_MIN_386();
3355 if (IEM_IS_64BIT_CODE(pVCpu))
3356 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3357 else
3358 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3359
3360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3361 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3362 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3363 {
3364 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3365 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3366 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3367 iCrReg |= 8;
3368 }
3369 switch (iCrReg)
3370 {
3371 case 0: case 2: case 3: case 4: case 8:
3372 break;
3373 default:
3374 IEMOP_RAISE_INVALID_OPCODE_RET();
3375 }
3376 IEMOP_HLP_DONE_DECODING();
3377
3378 if (iCrReg & (2 | 8))
3379 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3380 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3381 else
3382 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3383 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3384}
3385
3386
3387/** Opcode 0x0f 0x23. */
3388FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3389{
3390 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3391 IEMOP_HLP_MIN_386();
3392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3394 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3395 IEMOP_RAISE_INVALID_OPCODE_RET();
3396 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3397 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3398}
3399
3400
3401/** Opcode 0x0f 0x24. */
3402FNIEMOP_DEF(iemOp_mov_Rd_Td)
3403{
3404 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3405 IEMOP_HLP_MIN_386();
3406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3408 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3409 IEMOP_RAISE_INVALID_OPCODE_RET();
3410 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3411 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3412}
3413
3414
3415/** Opcode 0x0f 0x26. */
3416FNIEMOP_DEF(iemOp_mov_Td_Rd)
3417{
3418 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3419 IEMOP_HLP_MIN_386();
3420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3422 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3423 IEMOP_RAISE_INVALID_OPCODE_RET();
3424 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3425}
3426
3427
3428/**
3429 * @opcode 0x28
3430 * @oppfx none
3431 * @opcpuid sse
3432 * @opgroup og_sse_simdfp_datamove
3433 * @opxcpttype 1
3434 * @optest op1=1 op2=2 -> op1=2
3435 * @optest op1=0 op2=-42 -> op1=-42
3436 */
3437FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3438{
3439 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3441 if (IEM_IS_MODRM_REG_MODE(bRm))
3442 {
3443 /*
3444 * Register, register.
3445 */
3446 IEM_MC_BEGIN(0, 0, 0, 0);
3447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3448 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3449 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3450 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3451 IEM_GET_MODRM_RM(pVCpu, bRm));
3452 IEM_MC_ADVANCE_RIP_AND_FINISH();
3453 IEM_MC_END();
3454 }
3455 else
3456 {
3457 /*
3458 * Register, memory.
3459 */
3460 IEM_MC_BEGIN(0, 2, 0, 0);
3461 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3463
3464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3466 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3467 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3468
3469 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3470 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3471
3472 IEM_MC_ADVANCE_RIP_AND_FINISH();
3473 IEM_MC_END();
3474 }
3475}
3476
3477/**
3478 * @opcode 0x28
3479 * @oppfx 66
3480 * @opcpuid sse2
3481 * @opgroup og_sse2_pcksclr_datamove
3482 * @opxcpttype 1
3483 * @optest op1=1 op2=2 -> op1=2
3484 * @optest op1=0 op2=-42 -> op1=-42
3485 */
3486FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3487{
3488 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3490 if (IEM_IS_MODRM_REG_MODE(bRm))
3491 {
3492 /*
3493 * Register, register.
3494 */
3495 IEM_MC_BEGIN(0, 0, 0, 0);
3496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3499 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3500 IEM_GET_MODRM_RM(pVCpu, bRm));
3501 IEM_MC_ADVANCE_RIP_AND_FINISH();
3502 IEM_MC_END();
3503 }
3504 else
3505 {
3506 /*
3507 * Register, memory.
3508 */
3509 IEM_MC_BEGIN(0, 2, 0, 0);
3510 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3512
3513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3515 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3517
3518 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3519 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3520
3521 IEM_MC_ADVANCE_RIP_AND_FINISH();
3522 IEM_MC_END();
3523 }
3524}
3525
3526/* Opcode 0xf3 0x0f 0x28 - invalid */
3527/* Opcode 0xf2 0x0f 0x28 - invalid */
3528
3529/**
3530 * @opcode 0x29
3531 * @oppfx none
3532 * @opcpuid sse
3533 * @opgroup og_sse_simdfp_datamove
3534 * @opxcpttype 1
3535 * @optest op1=1 op2=2 -> op1=2
3536 * @optest op1=0 op2=-42 -> op1=-42
3537 */
3538FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3539{
3540 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3542 if (IEM_IS_MODRM_REG_MODE(bRm))
3543 {
3544 /*
3545 * Register, register.
3546 */
3547 IEM_MC_BEGIN(0, 0, 0, 0);
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3549 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3550 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3551 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3552 IEM_GET_MODRM_REG(pVCpu, bRm));
3553 IEM_MC_ADVANCE_RIP_AND_FINISH();
3554 IEM_MC_END();
3555 }
3556 else
3557 {
3558 /*
3559 * Memory, register.
3560 */
3561 IEM_MC_BEGIN(0, 2, 0, 0);
3562 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3564
3565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3567 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3568 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3569
3570 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3571 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3572
3573 IEM_MC_ADVANCE_RIP_AND_FINISH();
3574 IEM_MC_END();
3575 }
3576}
3577
3578/**
3579 * @opcode 0x29
3580 * @oppfx 66
3581 * @opcpuid sse2
3582 * @opgroup og_sse2_pcksclr_datamove
3583 * @opxcpttype 1
3584 * @optest op1=1 op2=2 -> op1=2
3585 * @optest op1=0 op2=-42 -> op1=-42
3586 */
3587FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3588{
3589 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3591 if (IEM_IS_MODRM_REG_MODE(bRm))
3592 {
3593 /*
3594 * Register, register.
3595 */
3596 IEM_MC_BEGIN(0, 0, 0, 0);
3597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3600 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3601 IEM_GET_MODRM_REG(pVCpu, bRm));
3602 IEM_MC_ADVANCE_RIP_AND_FINISH();
3603 IEM_MC_END();
3604 }
3605 else
3606 {
3607 /*
3608 * Memory, register.
3609 */
3610 IEM_MC_BEGIN(0, 2, 0, 0);
3611 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3613
3614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3616 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3617 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3618
3619 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3620 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3621
3622 IEM_MC_ADVANCE_RIP_AND_FINISH();
3623 IEM_MC_END();
3624 }
3625}
3626
3627/* Opcode 0xf3 0x0f 0x29 - invalid */
3628/* Opcode 0xf2 0x0f 0x29 - invalid */
3629
3630
3631/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3632FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3633{
3634 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3636 if (IEM_IS_MODRM_REG_MODE(bRm))
3637 {
3638 /*
3639 * XMM, MMX
3640 */
3641 IEM_MC_BEGIN(3, 1, 0, 0);
3642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3643 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3644 IEM_MC_LOCAL(X86XMMREG, Dst);
3645 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3646 IEM_MC_ARG(uint64_t, u64Src, 2);
3647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3648 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3649 IEM_MC_PREPARE_FPU_USAGE();
3650 IEM_MC_FPU_TO_MMX_MODE();
3651
3652 IEM_MC_REF_MXCSR(pfMxcsr);
3653 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3654 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3655
3656 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3657 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3658 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3659 } IEM_MC_ELSE() {
3660 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3661 } IEM_MC_ENDIF();
3662
3663 IEM_MC_ADVANCE_RIP_AND_FINISH();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 /*
3669 * XMM, [mem64]
3670 */
3671 IEM_MC_BEGIN(3, 2, 0, 0);
3672 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3673 IEM_MC_LOCAL(X86XMMREG, Dst);
3674 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3675 IEM_MC_ARG(uint64_t, u64Src, 2);
3676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3677
3678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3680 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3681 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3682 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3683
3684 IEM_MC_PREPARE_FPU_USAGE();
3685 IEM_MC_FPU_TO_MMX_MODE();
3686 IEM_MC_REF_MXCSR(pfMxcsr);
3687
3688 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3689 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3690 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3691 } IEM_MC_ELSE() {
3692 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3693 } IEM_MC_ENDIF();
3694
3695 IEM_MC_ADVANCE_RIP_AND_FINISH();
3696 IEM_MC_END();
3697 }
3698}
3699
3700
3701/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3702FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3703{
3704 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3706 if (IEM_IS_MODRM_REG_MODE(bRm))
3707 {
3708 /*
3709 * XMM, MMX
3710 */
3711 IEM_MC_BEGIN(3, 1, 0, 0);
3712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3713 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3714 IEM_MC_LOCAL(X86XMMREG, Dst);
3715 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3716 IEM_MC_ARG(uint64_t, u64Src, 2);
3717 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3718 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3719 IEM_MC_PREPARE_FPU_USAGE();
3720 IEM_MC_FPU_TO_MMX_MODE();
3721
3722 IEM_MC_REF_MXCSR(pfMxcsr);
3723 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3724
3725 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3726 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3727 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3728 } IEM_MC_ELSE() {
3729 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3730 } IEM_MC_ENDIF();
3731
3732 IEM_MC_ADVANCE_RIP_AND_FINISH();
3733 IEM_MC_END();
3734 }
3735 else
3736 {
3737 /*
3738 * XMM, [mem64]
3739 */
3740 IEM_MC_BEGIN(3, 3, 0, 0);
3741 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3742 IEM_MC_LOCAL(X86XMMREG, Dst);
3743 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3744 IEM_MC_ARG(uint64_t, u64Src, 2);
3745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3746
3747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3749 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3750 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3751 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3752
3753 /* Doesn't cause a transition to MMX mode. */
3754 IEM_MC_PREPARE_SSE_USAGE();
3755 IEM_MC_REF_MXCSR(pfMxcsr);
3756
3757 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3758 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3759 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3760 } IEM_MC_ELSE() {
3761 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3762 } IEM_MC_ENDIF();
3763
3764 IEM_MC_ADVANCE_RIP_AND_FINISH();
3765 IEM_MC_END();
3766 }
3767}
3768
3769
3770/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3771FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3772{
3773 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3774
3775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3776 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3777 {
3778 if (IEM_IS_MODRM_REG_MODE(bRm))
3779 {
3780 /* XMM, greg64 */
3781 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3782 IEM_MC_LOCAL(uint32_t, fMxcsr);
3783 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3784 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3785 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3786 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3787
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3789 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3790 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3791
3792 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3793 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3794 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3795 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3796 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3797 } IEM_MC_ELSE() {
3798 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3799 } IEM_MC_ENDIF();
3800
3801 IEM_MC_ADVANCE_RIP_AND_FINISH();
3802 IEM_MC_END();
3803 }
3804 else
3805 {
3806 /* XMM, [mem64] */
3807 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3809 IEM_MC_LOCAL(uint32_t, fMxcsr);
3810 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3811 IEM_MC_LOCAL(int64_t, i64Src);
3812 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3813 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3814 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3815
3816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3819 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3820
3821 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3822 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3823 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3824 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3825 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3826 } IEM_MC_ELSE() {
3827 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3828 } IEM_MC_ENDIF();
3829
3830 IEM_MC_ADVANCE_RIP_AND_FINISH();
3831 IEM_MC_END();
3832 }
3833 }
3834 else
3835 {
3836 if (IEM_IS_MODRM_REG_MODE(bRm))
3837 {
3838 /* greg, XMM */
3839 IEM_MC_BEGIN(3, 2, 0, 0);
3840 IEM_MC_LOCAL(uint32_t, fMxcsr);
3841 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3842 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3843 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3844 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3845
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3847 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3848 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3849
3850 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3851 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3852 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3853 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3854 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3855 } IEM_MC_ELSE() {
3856 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3857 } IEM_MC_ENDIF();
3858
3859 IEM_MC_ADVANCE_RIP_AND_FINISH();
3860 IEM_MC_END();
3861 }
3862 else
3863 {
3864 /* greg, [mem32] */
3865 IEM_MC_BEGIN(3, 4, 0, 0);
3866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3867 IEM_MC_LOCAL(uint32_t, fMxcsr);
3868 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3869 IEM_MC_LOCAL(int32_t, i32Src);
3870 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3871 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3872 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3873
3874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3876 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3877 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3878
3879 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3880 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3881 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3882 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3883 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3884 } IEM_MC_ELSE() {
3885 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3886 } IEM_MC_ENDIF();
3887
3888 IEM_MC_ADVANCE_RIP_AND_FINISH();
3889 IEM_MC_END();
3890 }
3891 }
3892}
3893
3894
3895/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3896FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3897{
3898 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3899
3900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3901 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3902 {
3903 if (IEM_IS_MODRM_REG_MODE(bRm))
3904 {
3905 /* XMM, greg64 */
3906 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3907 IEM_MC_LOCAL(uint32_t, fMxcsr);
3908 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3909 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3910 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3911 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3912
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3914 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3915 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3916
3917 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3918 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3919 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3920 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3921 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3922 } IEM_MC_ELSE() {
3923 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3924 } IEM_MC_ENDIF();
3925
3926 IEM_MC_ADVANCE_RIP_AND_FINISH();
3927 IEM_MC_END();
3928 }
3929 else
3930 {
3931 /* XMM, [mem64] */
3932 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3934 IEM_MC_LOCAL(uint32_t, fMxcsr);
3935 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3936 IEM_MC_LOCAL(int64_t, i64Src);
3937 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3938 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3939 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3940
3941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3944 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3945
3946 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3947 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3948 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3949 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3950 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3951 } IEM_MC_ELSE() {
3952 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3953 } IEM_MC_ENDIF();
3954
3955 IEM_MC_ADVANCE_RIP_AND_FINISH();
3956 IEM_MC_END();
3957 }
3958 }
3959 else
3960 {
3961 if (IEM_IS_MODRM_REG_MODE(bRm))
3962 {
3963 /* XMM, greg32 */
3964 IEM_MC_BEGIN(3, 2, 0, 0);
3965 IEM_MC_LOCAL(uint32_t, fMxcsr);
3966 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3967 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3968 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3969 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3970
3971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3972 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3973 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3974
3975 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3976 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3977 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3978 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3979 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3980 } IEM_MC_ELSE() {
3981 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3982 } IEM_MC_ENDIF();
3983
3984 IEM_MC_ADVANCE_RIP_AND_FINISH();
3985 IEM_MC_END();
3986 }
3987 else
3988 {
3989 /* XMM, [mem32] */
3990 IEM_MC_BEGIN(3, 4, 0, 0);
3991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3992 IEM_MC_LOCAL(uint32_t, fMxcsr);
3993 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3994 IEM_MC_LOCAL(int32_t, i32Src);
3995 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3996 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3997 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3998
3999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4001 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4002 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4003
4004 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4005 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
4006 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4007 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4008 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4009 } IEM_MC_ELSE() {
4010 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
4011 } IEM_MC_ENDIF();
4012
4013 IEM_MC_ADVANCE_RIP_AND_FINISH();
4014 IEM_MC_END();
4015 }
4016 }
4017}
4018
4019
4020/**
4021 * @opcode 0x2b
4022 * @opcodesub !11 mr/reg
4023 * @oppfx none
4024 * @opcpuid sse
4025 * @opgroup og_sse1_cachect
4026 * @opxcpttype 1
4027 * @optest op1=1 op2=2 -> op1=2
4028 * @optest op1=0 op2=-42 -> op1=-42
4029 */
4030FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4031{
4032 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4034 if (IEM_IS_MODRM_MEM_MODE(bRm))
4035 {
4036 /*
4037 * memory, register.
4038 */
4039 IEM_MC_BEGIN(0, 2, 0, 0);
4040 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4042
4043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4045 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4046 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4047
4048 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4049 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4050
4051 IEM_MC_ADVANCE_RIP_AND_FINISH();
4052 IEM_MC_END();
4053 }
4054 /* The register, register encoding is invalid. */
4055 else
4056 IEMOP_RAISE_INVALID_OPCODE_RET();
4057}
4058
4059/**
4060 * @opcode 0x2b
4061 * @opcodesub !11 mr/reg
4062 * @oppfx 0x66
4063 * @opcpuid sse2
4064 * @opgroup og_sse2_cachect
4065 * @opxcpttype 1
4066 * @optest op1=1 op2=2 -> op1=2
4067 * @optest op1=0 op2=-42 -> op1=-42
4068 */
4069FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4070{
4071 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4073 if (IEM_IS_MODRM_MEM_MODE(bRm))
4074 {
4075 /*
4076 * memory, register.
4077 */
4078 IEM_MC_BEGIN(0, 2, 0, 0);
4079 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4081
4082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4086
4087 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4088 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4089
4090 IEM_MC_ADVANCE_RIP_AND_FINISH();
4091 IEM_MC_END();
4092 }
4093 /* The register, register encoding is invalid. */
4094 else
4095 IEMOP_RAISE_INVALID_OPCODE_RET();
4096}
4097/* Opcode 0xf3 0x0f 0x2b - invalid */
4098/* Opcode 0xf2 0x0f 0x2b - invalid */
4099
4100
4101/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4102FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4103{
4104 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4106 if (IEM_IS_MODRM_REG_MODE(bRm))
4107 {
4108 /*
4109 * Register, register.
4110 */
4111 IEM_MC_BEGIN(3, 1, 0, 0);
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4113 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4114 IEM_MC_LOCAL(uint64_t, u64Dst);
4115 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4116 IEM_MC_ARG(uint64_t, u64Src, 2);
4117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4118 IEM_MC_PREPARE_FPU_USAGE();
4119 IEM_MC_FPU_TO_MMX_MODE();
4120
4121 IEM_MC_REF_MXCSR(pfMxcsr);
4122 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4123
4124 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4125 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4126 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4127 } IEM_MC_ELSE() {
4128 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4129 } IEM_MC_ENDIF();
4130
4131 IEM_MC_ADVANCE_RIP_AND_FINISH();
4132 IEM_MC_END();
4133 }
4134 else
4135 {
4136 /*
4137 * Register, memory.
4138 */
4139 IEM_MC_BEGIN(3, 2, 0, 0);
4140 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4141 IEM_MC_LOCAL(uint64_t, u64Dst);
4142 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4143 IEM_MC_ARG(uint64_t, u64Src, 2);
4144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4145
4146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4148 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4149 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4150
4151 IEM_MC_PREPARE_FPU_USAGE();
4152 IEM_MC_FPU_TO_MMX_MODE();
4153 IEM_MC_REF_MXCSR(pfMxcsr);
4154
4155 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4156 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4157 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4158 } IEM_MC_ELSE() {
4159 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4160 } IEM_MC_ENDIF();
4161
4162 IEM_MC_ADVANCE_RIP_AND_FINISH();
4163 IEM_MC_END();
4164 }
4165}
4166
4167
4168/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4169FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4170{
4171 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4173 if (IEM_IS_MODRM_REG_MODE(bRm))
4174 {
4175 /*
4176 * Register, register.
4177 */
4178 IEM_MC_BEGIN(3, 1, 0, 0);
4179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4180 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4181 IEM_MC_LOCAL(uint64_t, u64Dst);
4182 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4183 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4184 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4185 IEM_MC_PREPARE_FPU_USAGE();
4186 IEM_MC_FPU_TO_MMX_MODE();
4187
4188 IEM_MC_REF_MXCSR(pfMxcsr);
4189 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4190
4191 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4192 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4193 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4194 } IEM_MC_ELSE() {
4195 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4196 } IEM_MC_ENDIF();
4197
4198 IEM_MC_ADVANCE_RIP_AND_FINISH();
4199 IEM_MC_END();
4200 }
4201 else
4202 {
4203 /*
4204 * Register, memory.
4205 */
4206 IEM_MC_BEGIN(3, 3, 0, 0);
4207 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4208 IEM_MC_LOCAL(uint64_t, u64Dst);
4209 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4210 IEM_MC_LOCAL(X86XMMREG, uSrc);
4211 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4213
4214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4216 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4217 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4218
4219 IEM_MC_PREPARE_FPU_USAGE();
4220 IEM_MC_FPU_TO_MMX_MODE();
4221
4222 IEM_MC_REF_MXCSR(pfMxcsr);
4223
4224 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4225 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4226 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4227 } IEM_MC_ELSE() {
4228 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4229 } IEM_MC_ENDIF();
4230
4231 IEM_MC_ADVANCE_RIP_AND_FINISH();
4232 IEM_MC_END();
4233 }
4234}
4235
4236
4237/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4238FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4239{
4240 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4241
4242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4243 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4244 {
4245 if (IEM_IS_MODRM_REG_MODE(bRm))
4246 {
4247 /* greg64, XMM */
4248 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4249 IEM_MC_LOCAL(uint32_t, fMxcsr);
4250 IEM_MC_LOCAL(int64_t, i64Dst);
4251 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4252 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4253 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4254
4255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4256 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4257 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4258
4259 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4260 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4261 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4262 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4263 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4264 } IEM_MC_ELSE() {
4265 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4266 } IEM_MC_ENDIF();
4267
4268 IEM_MC_ADVANCE_RIP_AND_FINISH();
4269 IEM_MC_END();
4270 }
4271 else
4272 {
4273 /* greg64, [mem64] */
4274 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4276 IEM_MC_LOCAL(uint32_t, fMxcsr);
4277 IEM_MC_LOCAL(int64_t, i64Dst);
4278 IEM_MC_LOCAL(uint32_t, u32Src);
4279 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4280 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4281 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4282
4283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4285 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4286 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4287
4288 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4289 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4290 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4291 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4292 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4293 } IEM_MC_ELSE() {
4294 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4295 } IEM_MC_ENDIF();
4296
4297 IEM_MC_ADVANCE_RIP_AND_FINISH();
4298 IEM_MC_END();
4299 }
4300 }
4301 else
4302 {
4303 if (IEM_IS_MODRM_REG_MODE(bRm))
4304 {
4305 /* greg, XMM */
4306 IEM_MC_BEGIN(3, 2, 0, 0);
4307 IEM_MC_LOCAL(uint32_t, fMxcsr);
4308 IEM_MC_LOCAL(int32_t, i32Dst);
4309 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4310 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4311 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4312
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4314 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4315 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4316
4317 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4318 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4319 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4320 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4321 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4322 } IEM_MC_ELSE() {
4323 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4324 } IEM_MC_ENDIF();
4325
4326 IEM_MC_ADVANCE_RIP_AND_FINISH();
4327 IEM_MC_END();
4328 }
4329 else
4330 {
4331 /* greg, [mem] */
4332 IEM_MC_BEGIN(3, 4, 0, 0);
4333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4334 IEM_MC_LOCAL(uint32_t, fMxcsr);
4335 IEM_MC_LOCAL(int32_t, i32Dst);
4336 IEM_MC_LOCAL(uint32_t, u32Src);
4337 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4338 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4339 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4340
4341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4343 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4344 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4345
4346 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4347 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4348 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4349 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4350 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4351 } IEM_MC_ELSE() {
4352 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4353 } IEM_MC_ENDIF();
4354
4355 IEM_MC_ADVANCE_RIP_AND_FINISH();
4356 IEM_MC_END();
4357 }
4358 }
4359}
4360
4361
4362/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4363FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4364{
4365 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4366
4367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4368 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4369 {
4370 if (IEM_IS_MODRM_REG_MODE(bRm))
4371 {
4372 /* greg64, XMM */
4373 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4374 IEM_MC_LOCAL(uint32_t, fMxcsr);
4375 IEM_MC_LOCAL(int64_t, i64Dst);
4376 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4377 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4378 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4379
4380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4381 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4382 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4383
4384 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4385 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4386 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4387 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4388 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4389 } IEM_MC_ELSE() {
4390 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4391 } IEM_MC_ENDIF();
4392
4393 IEM_MC_ADVANCE_RIP_AND_FINISH();
4394 IEM_MC_END();
4395 }
4396 else
4397 {
4398 /* greg64, [mem64] */
4399 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4401 IEM_MC_LOCAL(uint32_t, fMxcsr);
4402 IEM_MC_LOCAL(int64_t, i64Dst);
4403 IEM_MC_LOCAL(uint64_t, u64Src);
4404 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4405 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4406 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4407
4408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4410 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4411 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4412
4413 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4414 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4415 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4416 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4417 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4418 } IEM_MC_ELSE() {
4419 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4420 } IEM_MC_ENDIF();
4421
4422 IEM_MC_ADVANCE_RIP_AND_FINISH();
4423 IEM_MC_END();
4424 }
4425 }
4426 else
4427 {
4428 if (IEM_IS_MODRM_REG_MODE(bRm))
4429 {
4430 /* greg, XMM */
4431 IEM_MC_BEGIN(3, 2, 0, 0);
4432 IEM_MC_LOCAL(uint32_t, fMxcsr);
4433 IEM_MC_LOCAL(int32_t, i32Dst);
4434 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4435 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4436 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4437
4438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4440 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4441
4442 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4443 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4444 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4445 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4446 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4447 } IEM_MC_ELSE() {
4448 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4449 } IEM_MC_ENDIF();
4450
4451 IEM_MC_ADVANCE_RIP_AND_FINISH();
4452 IEM_MC_END();
4453 }
4454 else
4455 {
4456 /* greg32, [mem32] */
4457 IEM_MC_BEGIN(3, 4, 0, 0);
4458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4459 IEM_MC_LOCAL(uint32_t, fMxcsr);
4460 IEM_MC_LOCAL(int32_t, i32Dst);
4461 IEM_MC_LOCAL(uint64_t, u64Src);
4462 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4463 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4464 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4465
4466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4468 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4469 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4470
4471 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4472 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4473 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4474 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4475 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4476 } IEM_MC_ELSE() {
4477 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4478 } IEM_MC_ENDIF();
4479
4480 IEM_MC_ADVANCE_RIP_AND_FINISH();
4481 IEM_MC_END();
4482 }
4483 }
4484}
4485
4486
4487/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4488FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4489{
4490 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4492 if (IEM_IS_MODRM_REG_MODE(bRm))
4493 {
4494 /*
4495 * Register, register.
4496 */
4497 IEM_MC_BEGIN(3, 1, 0, 0);
4498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4499 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4500 IEM_MC_LOCAL(uint64_t, u64Dst);
4501 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4502 IEM_MC_ARG(uint64_t, u64Src, 2);
4503
4504 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4505 IEM_MC_PREPARE_FPU_USAGE();
4506 IEM_MC_FPU_TO_MMX_MODE();
4507
4508 IEM_MC_REF_MXCSR(pfMxcsr);
4509 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4510
4511 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4512 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4513 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4514 } IEM_MC_ELSE() {
4515 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4516 } IEM_MC_ENDIF();
4517
4518 IEM_MC_ADVANCE_RIP_AND_FINISH();
4519 IEM_MC_END();
4520 }
4521 else
4522 {
4523 /*
4524 * Register, memory.
4525 */
4526 IEM_MC_BEGIN(3, 2, 0, 0);
4527 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4528 IEM_MC_LOCAL(uint64_t, u64Dst);
4529 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4530 IEM_MC_ARG(uint64_t, u64Src, 2);
4531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4532
4533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4535 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4536 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4537
4538 IEM_MC_PREPARE_FPU_USAGE();
4539 IEM_MC_FPU_TO_MMX_MODE();
4540 IEM_MC_REF_MXCSR(pfMxcsr);
4541
4542 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4543 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4544 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4545 } IEM_MC_ELSE() {
4546 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4547 } IEM_MC_ENDIF();
4548
4549 IEM_MC_ADVANCE_RIP_AND_FINISH();
4550 IEM_MC_END();
4551 }
4552}
4553
4554
4555/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4556FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4557{
4558 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4560 if (IEM_IS_MODRM_REG_MODE(bRm))
4561 {
4562 /*
4563 * Register, register.
4564 */
4565 IEM_MC_BEGIN(3, 1, 0, 0);
4566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4567 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4568 IEM_MC_LOCAL(uint64_t, u64Dst);
4569 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4570 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4571
4572 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4573 IEM_MC_PREPARE_FPU_USAGE();
4574 IEM_MC_FPU_TO_MMX_MODE();
4575
4576 IEM_MC_REF_MXCSR(pfMxcsr);
4577 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4578
4579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4580 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4581 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4582 } IEM_MC_ELSE() {
4583 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4584 } IEM_MC_ENDIF();
4585
4586 IEM_MC_ADVANCE_RIP_AND_FINISH();
4587 IEM_MC_END();
4588 }
4589 else
4590 {
4591 /*
4592 * Register, memory.
4593 */
4594 IEM_MC_BEGIN(3, 3, 0, 0);
4595 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4596 IEM_MC_LOCAL(uint64_t, u64Dst);
4597 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4598 IEM_MC_LOCAL(X86XMMREG, uSrc);
4599 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4601
4602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4604 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4605 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4606
4607 IEM_MC_PREPARE_FPU_USAGE();
4608 IEM_MC_FPU_TO_MMX_MODE();
4609
4610 IEM_MC_REF_MXCSR(pfMxcsr);
4611
4612 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4613 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4614 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4615 } IEM_MC_ELSE() {
4616 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4617 } IEM_MC_ENDIF();
4618
4619 IEM_MC_ADVANCE_RIP_AND_FINISH();
4620 IEM_MC_END();
4621 }
4622}
4623
4624
4625/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4626FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4627{
4628 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4629
4630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4631 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4632 {
4633 if (IEM_IS_MODRM_REG_MODE(bRm))
4634 {
4635 /* greg64, XMM */
4636 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4637 IEM_MC_LOCAL(uint32_t, fMxcsr);
4638 IEM_MC_LOCAL(int64_t, i64Dst);
4639 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4640 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4641 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4642
4643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4644 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4645 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4646
4647 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4648 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4649 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4650 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4651 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4652 } IEM_MC_ELSE() {
4653 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4654 } IEM_MC_ENDIF();
4655
4656 IEM_MC_ADVANCE_RIP_AND_FINISH();
4657 IEM_MC_END();
4658 }
4659 else
4660 {
4661 /* greg64, [mem64] */
4662 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4664 IEM_MC_LOCAL(uint32_t, fMxcsr);
4665 IEM_MC_LOCAL(int64_t, i64Dst);
4666 IEM_MC_LOCAL(uint32_t, u32Src);
4667 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4668 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4669 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4670
4671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4674 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4675
4676 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4677 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4678 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4679 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4680 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4681 } IEM_MC_ELSE() {
4682 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4683 } IEM_MC_ENDIF();
4684
4685 IEM_MC_ADVANCE_RIP_AND_FINISH();
4686 IEM_MC_END();
4687 }
4688 }
4689 else
4690 {
4691 if (IEM_IS_MODRM_REG_MODE(bRm))
4692 {
4693 /* greg, XMM */
4694 IEM_MC_BEGIN(3, 2, 0, 0);
4695 IEM_MC_LOCAL(uint32_t, fMxcsr);
4696 IEM_MC_LOCAL(int32_t, i32Dst);
4697 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4698 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4699 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4700
4701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4702 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4703 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4704
4705 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4706 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4707 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4708 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4709 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4710 } IEM_MC_ELSE() {
4711 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4712 } IEM_MC_ENDIF();
4713
4714 IEM_MC_ADVANCE_RIP_AND_FINISH();
4715 IEM_MC_END();
4716 }
4717 else
4718 {
4719 /* greg, [mem] */
4720 IEM_MC_BEGIN(3, 4, 0, 0);
4721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4722 IEM_MC_LOCAL(uint32_t, fMxcsr);
4723 IEM_MC_LOCAL(int32_t, i32Dst);
4724 IEM_MC_LOCAL(uint32_t, u32Src);
4725 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4726 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4727 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4728
4729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4731 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4732 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4733
4734 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4735 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4736 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4737 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4738 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4739 } IEM_MC_ELSE() {
4740 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4741 } IEM_MC_ENDIF();
4742
4743 IEM_MC_ADVANCE_RIP_AND_FINISH();
4744 IEM_MC_END();
4745 }
4746 }
4747}
4748
4749
4750/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4751FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4752{
4753 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4754
4755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4756 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4757 {
4758 if (IEM_IS_MODRM_REG_MODE(bRm))
4759 {
4760 /* greg64, XMM */
4761 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4762 IEM_MC_LOCAL(uint32_t, fMxcsr);
4763 IEM_MC_LOCAL(int64_t, i64Dst);
4764 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4765 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4766 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4767
4768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4769 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4770 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4771
4772 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4773 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4774 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4775 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4776 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4777 } IEM_MC_ELSE() {
4778 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4779 } IEM_MC_ENDIF();
4780
4781 IEM_MC_ADVANCE_RIP_AND_FINISH();
4782 IEM_MC_END();
4783 }
4784 else
4785 {
4786 /* greg64, [mem64] */
4787 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4789 IEM_MC_LOCAL(uint32_t, fMxcsr);
4790 IEM_MC_LOCAL(int64_t, i64Dst);
4791 IEM_MC_LOCAL(uint64_t, u64Src);
4792 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4793 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4794 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4795
4796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4798 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4799 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4800
4801 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4802 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4803 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4804 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4805 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4806 } IEM_MC_ELSE() {
4807 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4808 } IEM_MC_ENDIF();
4809
4810 IEM_MC_ADVANCE_RIP_AND_FINISH();
4811 IEM_MC_END();
4812 }
4813 }
4814 else
4815 {
4816 if (IEM_IS_MODRM_REG_MODE(bRm))
4817 {
4818 /* greg32, XMM */
4819 IEM_MC_BEGIN(3, 2, 0, 0);
4820 IEM_MC_LOCAL(uint32_t, fMxcsr);
4821 IEM_MC_LOCAL(int32_t, i32Dst);
4822 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4823 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4824 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4825
4826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4827 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4828 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4829
4830 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4831 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4832 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4833 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4834 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4835 } IEM_MC_ELSE() {
4836 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4837 } IEM_MC_ENDIF();
4838
4839 IEM_MC_ADVANCE_RIP_AND_FINISH();
4840 IEM_MC_END();
4841 }
4842 else
4843 {
4844 /* greg32, [mem64] */
4845 IEM_MC_BEGIN(3, 4, 0, 0);
4846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4847 IEM_MC_LOCAL(uint32_t, fMxcsr);
4848 IEM_MC_LOCAL(int32_t, i32Dst);
4849 IEM_MC_LOCAL(uint64_t, u64Src);
4850 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4851 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4852 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4853
4854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4856 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4857 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4858
4859 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4860 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4861 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4862 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4863 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4864 } IEM_MC_ELSE() {
4865 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4866 } IEM_MC_ENDIF();
4867
4868 IEM_MC_ADVANCE_RIP_AND_FINISH();
4869 IEM_MC_END();
4870 }
4871 }
4872}
4873
4874
4875/**
4876 * @opcode 0x2e
4877 * @oppfx none
4878 * @opflmodify cf,pf,af,zf,sf,ov
4879 * @opflclear af,sf,of
4880 */
4881FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4882{
4883 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4885 if (IEM_IS_MODRM_REG_MODE(bRm))
4886 {
4887 /*
4888 * Register, register.
4889 */
4890 IEM_MC_BEGIN(4, 1, 0, 0);
4891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4892 IEM_MC_LOCAL(uint32_t, fEFlags);
4893 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4894 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4895 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4896 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4897 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4898 IEM_MC_PREPARE_SSE_USAGE();
4899 IEM_MC_FETCH_EFLAGS(fEFlags);
4900 IEM_MC_REF_MXCSR(pfMxcsr);
4901 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4902 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4903 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4904 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4905 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4906 } IEM_MC_ELSE() {
4907 IEM_MC_COMMIT_EFLAGS(fEFlags);
4908 } IEM_MC_ENDIF();
4909
4910 IEM_MC_ADVANCE_RIP_AND_FINISH();
4911 IEM_MC_END();
4912 }
4913 else
4914 {
4915 /*
4916 * Register, memory.
4917 */
4918 IEM_MC_BEGIN(4, 3, 0, 0);
4919 IEM_MC_LOCAL(uint32_t, fEFlags);
4920 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4921 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4922 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4923 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4924 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4926
4927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4929 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4930 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4931
4932 IEM_MC_PREPARE_SSE_USAGE();
4933 IEM_MC_FETCH_EFLAGS(fEFlags);
4934 IEM_MC_REF_MXCSR(pfMxcsr);
4935 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4936 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4937 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4938 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4939 } IEM_MC_ELSE() {
4940 IEM_MC_COMMIT_EFLAGS(fEFlags);
4941 } IEM_MC_ENDIF();
4942
4943 IEM_MC_ADVANCE_RIP_AND_FINISH();
4944 IEM_MC_END();
4945 }
4946}
4947
4948
4949/**
4950 * @opcode 0x2e
4951 * @oppfx 0x66
4952 * @opflmodify cf,pf,af,zf,sf,ov
4953 * @opflclear af,sf,of
4954 */
4955FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4956{
4957 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4959 if (IEM_IS_MODRM_REG_MODE(bRm))
4960 {
4961 /*
4962 * Register, register.
4963 */
4964 IEM_MC_BEGIN(4, 1, 0, 0);
4965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4966 IEM_MC_LOCAL(uint32_t, fEFlags);
4967 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4968 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4969 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4970 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4971 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4972 IEM_MC_PREPARE_SSE_USAGE();
4973 IEM_MC_FETCH_EFLAGS(fEFlags);
4974 IEM_MC_REF_MXCSR(pfMxcsr);
4975 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4976 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4977 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4978 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4979 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4980 } IEM_MC_ELSE() {
4981 IEM_MC_COMMIT_EFLAGS(fEFlags);
4982 } IEM_MC_ENDIF();
4983
4984 IEM_MC_ADVANCE_RIP_AND_FINISH();
4985 IEM_MC_END();
4986 }
4987 else
4988 {
4989 /*
4990 * Register, memory.
4991 */
4992 IEM_MC_BEGIN(4, 3, 0, 0);
4993 IEM_MC_LOCAL(uint32_t, fEFlags);
4994 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4995 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4996 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4997 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4998 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5000
5001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5003 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5004 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5005
5006 IEM_MC_PREPARE_SSE_USAGE();
5007 IEM_MC_FETCH_EFLAGS(fEFlags);
5008 IEM_MC_REF_MXCSR(pfMxcsr);
5009 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5010 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5011 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5012 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5013 } IEM_MC_ELSE() {
5014 IEM_MC_COMMIT_EFLAGS(fEFlags);
5015 } IEM_MC_ENDIF();
5016
5017 IEM_MC_ADVANCE_RIP_AND_FINISH();
5018 IEM_MC_END();
5019 }
5020}
5021
5022
5023/* Opcode 0xf3 0x0f 0x2e - invalid */
5024/* Opcode 0xf2 0x0f 0x2e - invalid */
5025
5026
5027/**
5028 * @opcode 0x2e
5029 * @oppfx none
5030 * @opflmodify cf,pf,af,zf,sf,ov
5031 * @opflclear af,sf,of
5032 */
5033FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5034{
5035 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5037 if (IEM_IS_MODRM_REG_MODE(bRm))
5038 {
5039 /*
5040 * Register, register.
5041 */
5042 IEM_MC_BEGIN(4, 1, 0, 0);
5043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5044 IEM_MC_LOCAL(uint32_t, fEFlags);
5045 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5046 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5047 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5048 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5049 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5050 IEM_MC_PREPARE_SSE_USAGE();
5051 IEM_MC_FETCH_EFLAGS(fEFlags);
5052 IEM_MC_REF_MXCSR(pfMxcsr);
5053 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5054 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5055 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5056 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5057 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5058 } IEM_MC_ELSE() {
5059 IEM_MC_COMMIT_EFLAGS(fEFlags);
5060 } IEM_MC_ENDIF();
5061
5062 IEM_MC_ADVANCE_RIP_AND_FINISH();
5063 IEM_MC_END();
5064 }
5065 else
5066 {
5067 /*
5068 * Register, memory.
5069 */
5070 IEM_MC_BEGIN(4, 3, 0, 0);
5071 IEM_MC_LOCAL(uint32_t, fEFlags);
5072 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5073 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5074 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5075 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5076 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5078
5079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5081 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5082 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5083
5084 IEM_MC_PREPARE_SSE_USAGE();
5085 IEM_MC_FETCH_EFLAGS(fEFlags);
5086 IEM_MC_REF_MXCSR(pfMxcsr);
5087 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5088 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5089 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5090 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5091 } IEM_MC_ELSE() {
5092 IEM_MC_COMMIT_EFLAGS(fEFlags);
5093 } IEM_MC_ENDIF();
5094
5095 IEM_MC_ADVANCE_RIP_AND_FINISH();
5096 IEM_MC_END();
5097 }
5098}
5099
5100
5101/**
5102 * @opcode 0x2f
5103 * @oppfx 0x66
5104 * @opflmodify cf,pf,af,zf,sf,ov
5105 * @opflclear af,sf,of
5106 */
5107FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5108{
5109 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5111 if (IEM_IS_MODRM_REG_MODE(bRm))
5112 {
5113 /*
5114 * Register, register.
5115 */
5116 IEM_MC_BEGIN(4, 1, 0, 0);
5117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5118 IEM_MC_LOCAL(uint32_t, fEFlags);
5119 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5120 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5121 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5122 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5124 IEM_MC_PREPARE_SSE_USAGE();
5125 IEM_MC_FETCH_EFLAGS(fEFlags);
5126 IEM_MC_REF_MXCSR(pfMxcsr);
5127 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5128 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5129 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5130 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5131 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5132 } IEM_MC_ELSE() {
5133 IEM_MC_COMMIT_EFLAGS(fEFlags);
5134 } IEM_MC_ENDIF();
5135
5136 IEM_MC_ADVANCE_RIP_AND_FINISH();
5137 IEM_MC_END();
5138 }
5139 else
5140 {
5141 /*
5142 * Register, memory.
5143 */
5144 IEM_MC_BEGIN(4, 3, 0, 0);
5145 IEM_MC_LOCAL(uint32_t, fEFlags);
5146 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5147 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5148 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5149 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5150 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5152
5153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5155 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5156 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5157
5158 IEM_MC_PREPARE_SSE_USAGE();
5159 IEM_MC_FETCH_EFLAGS(fEFlags);
5160 IEM_MC_REF_MXCSR(pfMxcsr);
5161 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5162 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5163 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5164 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5165 } IEM_MC_ELSE() {
5166 IEM_MC_COMMIT_EFLAGS(fEFlags);
5167 } IEM_MC_ENDIF();
5168
5169 IEM_MC_ADVANCE_RIP_AND_FINISH();
5170 IEM_MC_END();
5171 }
5172}
5173
5174
5175/* Opcode 0xf3 0x0f 0x2f - invalid */
5176/* Opcode 0xf2 0x0f 0x2f - invalid */
5177
5178/** Opcode 0x0f 0x30. */
5179FNIEMOP_DEF(iemOp_wrmsr)
5180{
5181 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5183 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
5184}
5185
5186
5187/** Opcode 0x0f 0x31. */
5188FNIEMOP_DEF(iemOp_rdtsc)
5189{
5190 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5192 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5193 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5194 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5195 iemCImpl_rdtsc);
5196}
5197
5198
5199/** Opcode 0x0f 0x33. */
5200FNIEMOP_DEF(iemOp_rdmsr)
5201{
5202 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5204 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5206 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5207 iemCImpl_rdmsr);
5208}
5209
5210
5211/** Opcode 0x0f 0x34. */
5212FNIEMOP_DEF(iemOp_rdpmc)
5213{
5214 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5216 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5217 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5218 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5219 iemCImpl_rdpmc);
5220}
5221
5222
5223/** Opcode 0x0f 0x34. */
5224FNIEMOP_DEF(iemOp_sysenter)
5225{
5226 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5228 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
5229 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
5230 iemCImpl_sysenter);
5231}
5232
5233/** Opcode 0x0f 0x35. */
5234FNIEMOP_DEF(iemOp_sysexit)
5235{
5236 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5238 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
5239 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
5240 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5241}
5242
5243/** Opcode 0x0f 0x37. */
5244FNIEMOP_STUB(iemOp_getsec);
5245
5246
5247/** Opcode 0x0f 0x38. */
5248FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5249{
5250#ifdef IEM_WITH_THREE_0F_38
5251 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5252 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5253#else
5254 IEMOP_BITCH_ABOUT_STUB();
5255 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5256#endif
5257}
5258
5259
5260/** Opcode 0x0f 0x3a. */
5261FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5262{
5263#ifdef IEM_WITH_THREE_0F_3A
5264 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5265 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5266#else
5267 IEMOP_BITCH_ABOUT_STUB();
5268 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5269#endif
5270}
5271
5272
5273/**
5274 * Implements a conditional move.
5275 *
5276 * Wish there was an obvious way to do this where we could share and reduce
5277 * code bloat.
5278 *
5279 * @param a_Cnd The conditional "microcode" operation.
5280 */
5281#define CMOV_X(a_Cnd) \
5282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5283 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5284 { \
5285 switch (pVCpu->iem.s.enmEffOpSize) \
5286 { \
5287 case IEMMODE_16BIT: \
5288 IEM_MC_BEGIN(0, 1, 0, 0); \
5289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5290 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5291 a_Cnd { \
5292 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5293 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5294 } IEM_MC_ENDIF(); \
5295 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5296 IEM_MC_END(); \
5297 break; \
5298 \
5299 case IEMMODE_32BIT: \
5300 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0); \
5301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5302 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5303 a_Cnd { \
5304 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5305 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5306 } IEM_MC_ELSE() { \
5307 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5308 } IEM_MC_ENDIF(); \
5309 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5310 IEM_MC_END(); \
5311 break; \
5312 \
5313 case IEMMODE_64BIT: \
5314 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0); \
5315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5316 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5317 a_Cnd { \
5318 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5319 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5320 } IEM_MC_ENDIF(); \
5321 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5322 IEM_MC_END(); \
5323 break; \
5324 \
5325 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5326 } \
5327 } \
5328 else \
5329 { \
5330 switch (pVCpu->iem.s.enmEffOpSize) \
5331 { \
5332 case IEMMODE_16BIT: \
5333 IEM_MC_BEGIN(0, 2, 0, 0); \
5334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5335 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5338 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5339 a_Cnd { \
5340 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5341 } IEM_MC_ENDIF(); \
5342 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5343 IEM_MC_END(); \
5344 break; \
5345 \
5346 case IEMMODE_32BIT: \
5347 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0); \
5348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5349 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5352 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5353 a_Cnd { \
5354 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5355 } IEM_MC_ELSE() { \
5356 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5357 } IEM_MC_ENDIF(); \
5358 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5359 IEM_MC_END(); \
5360 break; \
5361 \
5362 case IEMMODE_64BIT: \
5363 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0); \
5364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5365 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5368 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5369 a_Cnd { \
5370 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5371 } IEM_MC_ENDIF(); \
5372 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5373 IEM_MC_END(); \
5374 break; \
5375 \
5376 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5377 } \
5378 } do {} while (0)
5379
5380
5381
5382/**
5383 * @opcode 0x40
5384 * @opfltest of
5385 */
5386FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5387{
5388 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5389 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5390}
5391
5392
5393/**
5394 * @opcode 0x41
5395 * @opfltest of
5396 */
5397FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5398{
5399 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5400 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5401}
5402
5403
5404/**
5405 * @opcode 0x42
5406 * @opfltest cf
5407 */
5408FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5409{
5410 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5411 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5412}
5413
5414
5415/**
5416 * @opcode 0x43
5417 * @opfltest cf
5418 */
5419FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5420{
5421 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5422 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5423}
5424
5425
5426/**
5427 * @opcode 0x44
5428 * @opfltest zf
5429 */
5430FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5431{
5432 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5433 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5434}
5435
5436
5437/**
5438 * @opcode 0x45
5439 * @opfltest zf
5440 */
5441FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5442{
5443 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5444 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5445}
5446
5447
5448/**
5449 * @opcode 0x46
5450 * @opfltest cf,zf
5451 */
5452FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5453{
5454 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5455 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5456}
5457
5458
5459/**
5460 * @opcode 0x47
5461 * @opfltest cf,zf
5462 */
5463FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5464{
5465 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5466 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5467}
5468
5469
5470/**
5471 * @opcode 0x48
5472 * @opfltest sf
5473 */
5474FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5475{
5476 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5477 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5478}
5479
5480
5481/**
5482 * @opcode 0x49
5483 * @opfltest sf
5484 */
5485FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5486{
5487 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5488 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5489}
5490
5491
5492/**
5493 * @opcode 0x4a
5494 * @opfltest pf
5495 */
5496FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5497{
5498 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5499 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5500}
5501
5502
5503/**
5504 * @opcode 0x4b
5505 * @opfltest pf
5506 */
5507FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5508{
5509 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5510 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5511}
5512
5513
5514/**
5515 * @opcode 0x4c
5516 * @opfltest sf,of
5517 */
5518FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5519{
5520 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5521 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5522}
5523
5524
5525/**
5526 * @opcode 0x4d
5527 * @opfltest sf,of
5528 */
5529FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5530{
5531 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5532 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5533}
5534
5535
5536/**
5537 * @opcode 0x4e
5538 * @opfltest zf,sf,of
5539 */
5540FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5541{
5542 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5543 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5544}
5545
5546
5547/**
5548 * @opcode 0x4e
5549 * @opfltest zf,sf,of
5550 */
5551FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5552{
5553 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5554 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5555}
5556
5557#undef CMOV_X
5558
5559/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5560FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5561{
5562 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5564 if (IEM_IS_MODRM_REG_MODE(bRm))
5565 {
5566 /*
5567 * Register, register.
5568 */
5569 IEM_MC_BEGIN(2, 1, 0, 0);
5570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5571 IEM_MC_LOCAL(uint8_t, u8Dst);
5572 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5573 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5574 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5575 IEM_MC_PREPARE_SSE_USAGE();
5576 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5577 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5578 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5579 IEM_MC_ADVANCE_RIP_AND_FINISH();
5580 IEM_MC_END();
5581 }
5582 /* No memory operand. */
5583 else
5584 IEMOP_RAISE_INVALID_OPCODE_RET();
5585}
5586
5587
5588/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5589FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5590{
5591 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5593 if (IEM_IS_MODRM_REG_MODE(bRm))
5594 {
5595 /*
5596 * Register, register.
5597 */
5598 IEM_MC_BEGIN(2, 1, 0, 0);
5599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5600 IEM_MC_LOCAL(uint8_t, u8Dst);
5601 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5602 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5603 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5604 IEM_MC_PREPARE_SSE_USAGE();
5605 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5606 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5607 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5608 IEM_MC_ADVANCE_RIP_AND_FINISH();
5609 IEM_MC_END();
5610 }
5611 /* No memory operand. */
5612 else
5613 IEMOP_RAISE_INVALID_OPCODE_RET();
5614
5615}
5616
5617
5618/* Opcode 0xf3 0x0f 0x50 - invalid */
5619/* Opcode 0xf2 0x0f 0x50 - invalid */
5620
5621
5622/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5623FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5624{
5625 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5626 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5627}
5628
5629
5630/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5631FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5632{
5633 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5634 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5635}
5636
5637
5638/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5639FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5640{
5641 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5642 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5643}
5644
5645
5646/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5647FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5648{
5649 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5650 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5651}
5652
5653
5654/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5655FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5656{
5657 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5658 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5659}
5660
5661
5662/* Opcode 0x66 0x0f 0x52 - invalid */
5663
5664
5665/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5666FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5667{
5668 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5669 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5670}
5671
5672
5673/* Opcode 0xf2 0x0f 0x52 - invalid */
5674
5675
5676/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5677FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5678{
5679 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5680 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5681}
5682
5683
5684/* Opcode 0x66 0x0f 0x53 - invalid */
5685
5686
5687/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5688FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5689{
5690 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5691 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5692}
5693
5694
5695/* Opcode 0xf2 0x0f 0x53 - invalid */
5696
5697
5698/** Opcode 0x0f 0x54 - andps Vps, Wps */
5699FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5700{
5701 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5702 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5703}
5704
5705
5706/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5707FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5708{
5709 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5710 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5711}
5712
5713
5714/* Opcode 0xf3 0x0f 0x54 - invalid */
5715/* Opcode 0xf2 0x0f 0x54 - invalid */
5716
5717
5718/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5719FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5720{
5721 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5722 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5723}
5724
5725
5726/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5727FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5728{
5729 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5730 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5731}
5732
5733
5734/* Opcode 0xf3 0x0f 0x55 - invalid */
5735/* Opcode 0xf2 0x0f 0x55 - invalid */
5736
5737
5738/** Opcode 0x0f 0x56 - orps Vps, Wps */
5739FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5740{
5741 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5742 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5743}
5744
5745
5746/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5747FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5748{
5749 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5750 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5751}
5752
5753
5754/* Opcode 0xf3 0x0f 0x56 - invalid */
5755/* Opcode 0xf2 0x0f 0x56 - invalid */
5756
5757
5758/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5759FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5760{
5761 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5762 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5763}
5764
5765
5766/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5767FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5768{
5769 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5770 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5771}
5772
5773
5774/* Opcode 0xf3 0x0f 0x57 - invalid */
5775/* Opcode 0xf2 0x0f 0x57 - invalid */
5776
5777/** Opcode 0x0f 0x58 - addps Vps, Wps */
5778FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5779{
5780 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5781 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5782}
5783
5784
5785/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5786FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5787{
5788 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5789 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5790}
5791
5792
5793/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5794FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5795{
5796 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5797 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5798}
5799
5800
5801/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5802FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5803{
5804 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5805 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5806}
5807
5808
5809/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5810FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5811{
5812 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5813 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5814}
5815
5816
5817/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5818FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5819{
5820 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5821 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5822}
5823
5824
5825/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5826FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5827{
5828 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5829 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5830}
5831
5832
5833/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5834FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5835{
5836 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5837 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5838}
5839
5840
5841/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5842FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5843{
5844 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5845 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5846}
5847
5848
5849/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5850FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5851{
5852 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5853 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5854}
5855
5856
5857/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5858FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5859{
5860 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5861 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5862}
5863
5864
5865/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5866FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5867{
5868 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5869 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5870}
5871
5872
5873/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5874FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5875{
5876 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5877 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5878}
5879
5880
5881/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5882FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5883{
5884 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5885 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5886}
5887
5888
5889/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5890FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5891{
5892 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5893 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5894}
5895
5896
5897/* Opcode 0xf2 0x0f 0x5b - invalid */
5898
5899
5900/** Opcode 0x0f 0x5c - subps Vps, Wps */
5901FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5902{
5903 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5904 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5905}
5906
5907
5908/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5909FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5910{
5911 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5912 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5913}
5914
5915
5916/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5917FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5918{
5919 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5920 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5921}
5922
5923
5924/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5925FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5926{
5927 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5928 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5929}
5930
5931
5932/** Opcode 0x0f 0x5d - minps Vps, Wps */
5933FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5934{
5935 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5936 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5937}
5938
5939
5940/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5941FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5942{
5943 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5944 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5945}
5946
5947
5948/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5949FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5950{
5951 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5952 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5953}
5954
5955
5956/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5957FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5958{
5959 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5960 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5961}
5962
5963
5964/** Opcode 0x0f 0x5e - divps Vps, Wps */
5965FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5966{
5967 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5968 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5969}
5970
5971
5972/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5973FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5974{
5975 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5976 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5977}
5978
5979
5980/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5981FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5982{
5983 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5984 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5985}
5986
5987
5988/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5989FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5990{
5991 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5992 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5993}
5994
5995
5996/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5997FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5998{
5999 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6000 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
6001}
6002
6003
6004/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
6005FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
6006{
6007 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6008 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
6009}
6010
6011
6012/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
6013FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
6014{
6015 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
6016 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
6017}
6018
6019
6020/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
6021FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
6022{
6023 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
6024 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
6025}
6026
6027
6028/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
6029FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
6030{
6031 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6032 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
6033}
6034
6035
6036/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
6037FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
6038{
6039 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6040 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
6041}
6042
6043
6044/* Opcode 0xf3 0x0f 0x60 - invalid */
6045
6046
6047/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
6048FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
6049{
6050 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
6051 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6052 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
6053}
6054
6055
6056/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
6057FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
6058{
6059 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6060 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
6061}
6062
6063
6064/* Opcode 0xf3 0x0f 0x61 - invalid */
6065
6066
6067/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
6068FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
6069{
6070 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6071 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
6072}
6073
6074
6075/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
6076FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
6077{
6078 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6079 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
6080}
6081
6082
6083/* Opcode 0xf3 0x0f 0x62 - invalid */
6084
6085
6086
6087/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
6088FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
6089{
6090 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6091 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
6092}
6093
6094
6095/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
6096FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
6097{
6098 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6099 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
6100}
6101
6102
6103/* Opcode 0xf3 0x0f 0x63 - invalid */
6104
6105
6106/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
6107FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
6108{
6109 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6110 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6111}
6112
6113
6114/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6115FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6116{
6117 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6118 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6119}
6120
6121
6122/* Opcode 0xf3 0x0f 0x64 - invalid */
6123
6124
6125/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6126FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6127{
6128 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6129 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6130}
6131
6132
6133/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6134FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6135{
6136 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6137 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6138}
6139
6140
6141/* Opcode 0xf3 0x0f 0x65 - invalid */
6142
6143
6144/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6145FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6146{
6147 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6148 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6149}
6150
6151
6152/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6153FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6154{
6155 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6156 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6157}
6158
6159
6160/* Opcode 0xf3 0x0f 0x66 - invalid */
6161
6162
6163/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6164FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6165{
6166 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6167 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6168}
6169
6170
6171/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6172FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6173{
6174 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6175 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6176}
6177
6178
6179/* Opcode 0xf3 0x0f 0x67 - invalid */
6180
6181
6182/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6183 * @note Intel and AMD both uses Qd for the second parameter, however they
6184 * both list it as a mmX/mem64 operand and intel describes it as being
6185 * loaded as a qword, so it should be Qq, shouldn't it? */
6186FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6187{
6188 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6189 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6190}
6191
6192
6193/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6194FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6195{
6196 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6197 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6198}
6199
6200
6201/* Opcode 0xf3 0x0f 0x68 - invalid */
6202
6203
6204/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6205 * @note Intel and AMD both uses Qd for the second parameter, however they
6206 * both list it as a mmX/mem64 operand and intel describes it as being
6207 * loaded as a qword, so it should be Qq, shouldn't it? */
6208FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6209{
6210 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6211 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6212}
6213
6214
6215/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6216FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6217{
6218 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6219 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6220
6221}
6222
6223
6224/* Opcode 0xf3 0x0f 0x69 - invalid */
6225
6226
6227/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6228 * @note Intel and AMD both uses Qd for the second parameter, however they
6229 * both list it as a mmX/mem64 operand and intel describes it as being
6230 * loaded as a qword, so it should be Qq, shouldn't it? */
6231FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6232{
6233 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6234 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6235}
6236
6237
6238/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6239FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6240{
6241 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6242 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6243}
6244
6245
6246/* Opcode 0xf3 0x0f 0x6a - invalid */
6247
6248
6249/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6250FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6251{
6252 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6253 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6254}
6255
6256
6257/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6258FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6259{
6260 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6261 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6262}
6263
6264
6265/* Opcode 0xf3 0x0f 0x6b - invalid */
6266
6267
6268/* Opcode 0x0f 0x6c - invalid */
6269
6270
6271/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6272FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6273{
6274 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6275 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6276}
6277
6278
6279/* Opcode 0xf3 0x0f 0x6c - invalid */
6280/* Opcode 0xf2 0x0f 0x6c - invalid */
6281
6282
6283/* Opcode 0x0f 0x6d - invalid */
6284
6285
6286/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6287FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6288{
6289 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6290 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6291}
6292
6293
6294/* Opcode 0xf3 0x0f 0x6d - invalid */
6295
6296
6297FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6298{
6299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6300 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6301 {
6302 /**
6303 * @opcode 0x6e
6304 * @opcodesub rex.w=1
6305 * @oppfx none
6306 * @opcpuid mmx
6307 * @opgroup og_mmx_datamove
6308 * @opxcpttype 5
6309 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6310 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6311 */
6312 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6313 if (IEM_IS_MODRM_REG_MODE(bRm))
6314 {
6315 /* MMX, greg64 */
6316 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6318 IEM_MC_LOCAL(uint64_t, u64Tmp);
6319
6320 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6321 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6322 IEM_MC_FPU_TO_MMX_MODE();
6323
6324 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6325 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6326
6327 IEM_MC_ADVANCE_RIP_AND_FINISH();
6328 IEM_MC_END();
6329 }
6330 else
6331 {
6332 /* MMX, [mem64] */
6333 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6335 IEM_MC_LOCAL(uint64_t, u64Tmp);
6336
6337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6339 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6340 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6341 IEM_MC_FPU_TO_MMX_MODE();
6342
6343 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6344 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6345
6346 IEM_MC_ADVANCE_RIP_AND_FINISH();
6347 IEM_MC_END();
6348 }
6349 }
6350 else
6351 {
6352 /**
6353 * @opdone
6354 * @opcode 0x6e
6355 * @opcodesub rex.w=0
6356 * @oppfx none
6357 * @opcpuid mmx
6358 * @opgroup og_mmx_datamove
6359 * @opxcpttype 5
6360 * @opfunction iemOp_movd_q_Pd_Ey
6361 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6362 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6363 */
6364 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6365 if (IEM_IS_MODRM_REG_MODE(bRm))
6366 {
6367 /* MMX, greg32 */
6368 IEM_MC_BEGIN(0, 1, 0, 0);
6369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6370 IEM_MC_LOCAL(uint32_t, u32Tmp);
6371
6372 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6373 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6374 IEM_MC_FPU_TO_MMX_MODE();
6375
6376 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6377 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6378
6379 IEM_MC_ADVANCE_RIP_AND_FINISH();
6380 IEM_MC_END();
6381 }
6382 else
6383 {
6384 /* MMX, [mem32] */
6385 IEM_MC_BEGIN(0, 2, 0, 0);
6386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6387 IEM_MC_LOCAL(uint32_t, u32Tmp);
6388
6389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6391 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6392 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6393 IEM_MC_FPU_TO_MMX_MODE();
6394
6395 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6396 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6397
6398 IEM_MC_ADVANCE_RIP_AND_FINISH();
6399 IEM_MC_END();
6400 }
6401 }
6402}
6403
6404FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6405{
6406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6407 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6408 {
6409 /**
6410 * @opcode 0x6e
6411 * @opcodesub rex.w=1
6412 * @oppfx 0x66
6413 * @opcpuid sse2
6414 * @opgroup og_sse2_simdint_datamove
6415 * @opxcpttype 5
6416 * @optest 64-bit / op1=1 op2=2 -> op1=2
6417 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6418 */
6419 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6420 if (IEM_IS_MODRM_REG_MODE(bRm))
6421 {
6422 /* XMM, greg64 */
6423 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6425 IEM_MC_LOCAL(uint64_t, u64Tmp);
6426
6427 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6428 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6429
6430 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6431 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6432
6433 IEM_MC_ADVANCE_RIP_AND_FINISH();
6434 IEM_MC_END();
6435 }
6436 else
6437 {
6438 /* XMM, [mem64] */
6439 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6441 IEM_MC_LOCAL(uint64_t, u64Tmp);
6442
6443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6445 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6446 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6447
6448 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6449 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6450
6451 IEM_MC_ADVANCE_RIP_AND_FINISH();
6452 IEM_MC_END();
6453 }
6454 }
6455 else
6456 {
6457 /**
6458 * @opdone
6459 * @opcode 0x6e
6460 * @opcodesub rex.w=0
6461 * @oppfx 0x66
6462 * @opcpuid sse2
6463 * @opgroup og_sse2_simdint_datamove
6464 * @opxcpttype 5
6465 * @opfunction iemOp_movd_q_Vy_Ey
6466 * @optest op1=1 op2=2 -> op1=2
6467 * @optest op1=0 op2=-42 -> op1=-42
6468 */
6469 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6470 if (IEM_IS_MODRM_REG_MODE(bRm))
6471 {
6472 /* XMM, greg32 */
6473 IEM_MC_BEGIN(0, 1, 0, 0);
6474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6475 IEM_MC_LOCAL(uint32_t, u32Tmp);
6476
6477 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6478 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6479
6480 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6481 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6482
6483 IEM_MC_ADVANCE_RIP_AND_FINISH();
6484 IEM_MC_END();
6485 }
6486 else
6487 {
6488 /* XMM, [mem32] */
6489 IEM_MC_BEGIN(0, 2, 0, 0);
6490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6491 IEM_MC_LOCAL(uint32_t, u32Tmp);
6492
6493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6495 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6496 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6497
6498 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6499 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6500
6501 IEM_MC_ADVANCE_RIP_AND_FINISH();
6502 IEM_MC_END();
6503 }
6504 }
6505}
6506
6507/* Opcode 0xf3 0x0f 0x6e - invalid */
6508
6509
6510/**
6511 * @opcode 0x6f
6512 * @oppfx none
6513 * @opcpuid mmx
6514 * @opgroup og_mmx_datamove
6515 * @opxcpttype 5
6516 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6517 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6518 */
6519FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6520{
6521 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6523 if (IEM_IS_MODRM_REG_MODE(bRm))
6524 {
6525 /*
6526 * Register, register.
6527 */
6528 IEM_MC_BEGIN(0, 1, 0, 0);
6529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6530 IEM_MC_LOCAL(uint64_t, u64Tmp);
6531
6532 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6533 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6534 IEM_MC_FPU_TO_MMX_MODE();
6535
6536 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6537 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6538
6539 IEM_MC_ADVANCE_RIP_AND_FINISH();
6540 IEM_MC_END();
6541 }
6542 else
6543 {
6544 /*
6545 * Register, memory.
6546 */
6547 IEM_MC_BEGIN(0, 2, 0, 0);
6548 IEM_MC_LOCAL(uint64_t, u64Tmp);
6549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6550
6551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6553 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6554 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6555 IEM_MC_FPU_TO_MMX_MODE();
6556
6557 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6558 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6559
6560 IEM_MC_ADVANCE_RIP_AND_FINISH();
6561 IEM_MC_END();
6562 }
6563}
6564
6565/**
6566 * @opcode 0x6f
6567 * @oppfx 0x66
6568 * @opcpuid sse2
6569 * @opgroup og_sse2_simdint_datamove
6570 * @opxcpttype 1
6571 * @optest op1=1 op2=2 -> op1=2
6572 * @optest op1=0 op2=-42 -> op1=-42
6573 */
6574FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6575{
6576 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6578 if (IEM_IS_MODRM_REG_MODE(bRm))
6579 {
6580 /*
6581 * Register, register.
6582 */
6583 IEM_MC_BEGIN(0, 0, 0, 0);
6584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6585
6586 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6587 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6588
6589 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6590 IEM_GET_MODRM_RM(pVCpu, bRm));
6591 IEM_MC_ADVANCE_RIP_AND_FINISH();
6592 IEM_MC_END();
6593 }
6594 else
6595 {
6596 /*
6597 * Register, memory.
6598 */
6599 IEM_MC_BEGIN(0, 2, 0, 0);
6600 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6602
6603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6605 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6607
6608 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6609 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6610
6611 IEM_MC_ADVANCE_RIP_AND_FINISH();
6612 IEM_MC_END();
6613 }
6614}
6615
6616/**
6617 * @opcode 0x6f
6618 * @oppfx 0xf3
6619 * @opcpuid sse2
6620 * @opgroup og_sse2_simdint_datamove
6621 * @opxcpttype 4UA
6622 * @optest op1=1 op2=2 -> op1=2
6623 * @optest op1=0 op2=-42 -> op1=-42
6624 */
6625FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6626{
6627 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6629 if (IEM_IS_MODRM_REG_MODE(bRm))
6630 {
6631 /*
6632 * Register, register.
6633 */
6634 IEM_MC_BEGIN(0, 0, 0, 0);
6635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6636 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6638 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6639 IEM_GET_MODRM_RM(pVCpu, bRm));
6640 IEM_MC_ADVANCE_RIP_AND_FINISH();
6641 IEM_MC_END();
6642 }
6643 else
6644 {
6645 /*
6646 * Register, memory.
6647 */
6648 IEM_MC_BEGIN(0, 2, 0, 0);
6649 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6651
6652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6654 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6656 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6657 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6658
6659 IEM_MC_ADVANCE_RIP_AND_FINISH();
6660 IEM_MC_END();
6661 }
6662}
6663
6664
6665/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6666FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6667{
6668 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6670 if (IEM_IS_MODRM_REG_MODE(bRm))
6671 {
6672 /*
6673 * Register, register.
6674 */
6675 IEM_MC_BEGIN(3, 0, 0, 0);
6676 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6678 IEM_MC_ARG(uint64_t *, pDst, 0);
6679 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6680 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6681 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6682 IEM_MC_PREPARE_FPU_USAGE();
6683 IEM_MC_FPU_TO_MMX_MODE();
6684
6685 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6686 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6687 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6688 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6689
6690 IEM_MC_ADVANCE_RIP_AND_FINISH();
6691 IEM_MC_END();
6692 }
6693 else
6694 {
6695 /*
6696 * Register, memory.
6697 */
6698 IEM_MC_BEGIN(3, 2, 0, 0);
6699 IEM_MC_ARG(uint64_t *, pDst, 0);
6700 IEM_MC_LOCAL(uint64_t, uSrc);
6701 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6703
6704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6705 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6706 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6708 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6709 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6710
6711 IEM_MC_PREPARE_FPU_USAGE();
6712 IEM_MC_FPU_TO_MMX_MODE();
6713
6714 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6716 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6717
6718 IEM_MC_ADVANCE_RIP_AND_FINISH();
6719 IEM_MC_END();
6720 }
6721}
6722
6723
6724/**
6725 * Common worker for SSE2 instructions on the forms:
6726 * pshufd xmm1, xmm2/mem128, imm8
6727 * pshufhw xmm1, xmm2/mem128, imm8
6728 * pshuflw xmm1, xmm2/mem128, imm8
6729 *
6730 * Proper alignment of the 128-bit operand is enforced.
6731 * Exceptions type 4. SSE2 cpuid checks.
6732 */
6733FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6734{
6735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6736 if (IEM_IS_MODRM_REG_MODE(bRm))
6737 {
6738 /*
6739 * Register, register.
6740 */
6741 IEM_MC_BEGIN(3, 0, 0, 0);
6742 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6744 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6745 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6746 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6747 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6748 IEM_MC_PREPARE_SSE_USAGE();
6749 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6750 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6751 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6752 IEM_MC_ADVANCE_RIP_AND_FINISH();
6753 IEM_MC_END();
6754 }
6755 else
6756 {
6757 /*
6758 * Register, memory.
6759 */
6760 IEM_MC_BEGIN(3, 2, 0, 0);
6761 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6762 IEM_MC_LOCAL(RTUINT128U, uSrc);
6763 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6765
6766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6767 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6768 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6770 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6771
6772 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6773 IEM_MC_PREPARE_SSE_USAGE();
6774 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6775 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6776
6777 IEM_MC_ADVANCE_RIP_AND_FINISH();
6778 IEM_MC_END();
6779 }
6780}
6781
6782
6783/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6784FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6785{
6786 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6787 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6788}
6789
6790
6791/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6792FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6793{
6794 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6795 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6796}
6797
6798
6799/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6800FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6801{
6802 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6803 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6804}
6805
6806
6807/**
6808 * Common worker for MMX instructions of the form:
6809 * psrlw mm, imm8
6810 * psraw mm, imm8
6811 * psllw mm, imm8
6812 * psrld mm, imm8
6813 * psrad mm, imm8
6814 * pslld mm, imm8
6815 * psrlq mm, imm8
6816 * psllq mm, imm8
6817 *
6818 */
6819FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6820{
6821 if (IEM_IS_MODRM_REG_MODE(bRm))
6822 {
6823 /*
6824 * Register, immediate.
6825 */
6826 IEM_MC_BEGIN(2, 0, 0, 0);
6827 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6829 IEM_MC_ARG(uint64_t *, pDst, 0);
6830 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6831 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6832 IEM_MC_PREPARE_FPU_USAGE();
6833 IEM_MC_FPU_TO_MMX_MODE();
6834
6835 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6836 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6837 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6838
6839 IEM_MC_ADVANCE_RIP_AND_FINISH();
6840 IEM_MC_END();
6841 }
6842 else
6843 {
6844 /*
6845 * Register, memory not supported.
6846 */
6847 /// @todo Caller already enforced register mode?!
6848 AssertFailedReturn(VINF_SUCCESS);
6849 }
6850}
6851
6852
6853/**
6854 * Common worker for SSE2 instructions of the form:
6855 * psrlw xmm, imm8
6856 * psraw xmm, imm8
6857 * psllw xmm, imm8
6858 * psrld xmm, imm8
6859 * psrad xmm, imm8
6860 * pslld xmm, imm8
6861 * psrlq xmm, imm8
6862 * psllq xmm, imm8
6863 *
6864 */
6865FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6866{
6867 if (IEM_IS_MODRM_REG_MODE(bRm))
6868 {
6869 /*
6870 * Register, immediate.
6871 */
6872 IEM_MC_BEGIN(2, 0, 0, 0);
6873 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6875 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6876 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6877 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6878 IEM_MC_PREPARE_SSE_USAGE();
6879 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6880 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6881 IEM_MC_ADVANCE_RIP_AND_FINISH();
6882 IEM_MC_END();
6883 }
6884 else
6885 {
6886 /*
6887 * Register, memory.
6888 */
6889 /// @todo Caller already enforced register mode?!
6890 AssertFailedReturn(VINF_SUCCESS);
6891 }
6892}
6893
6894
6895/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6896FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6897{
6898// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6899 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6900}
6901
6902
6903/** Opcode 0x66 0x0f 0x71 11/2. */
6904FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6905{
6906// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6907 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6908}
6909
6910
6911/** Opcode 0x0f 0x71 11/4. */
6912FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6913{
6914// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6915 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6916}
6917
6918
6919/** Opcode 0x66 0x0f 0x71 11/4. */
6920FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6921{
6922// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6923 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6924}
6925
6926
6927/** Opcode 0x0f 0x71 11/6. */
6928FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6929{
6930// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6931 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6932}
6933
6934
6935/** Opcode 0x66 0x0f 0x71 11/6. */
6936FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6937{
6938// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6939 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6940}
6941
6942
6943/**
6944 * Group 12 jump table for register variant.
6945 */
6946IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6947{
6948 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6949 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6950 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6951 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6952 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6953 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6954 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6955 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6956};
6957AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6958
6959
6960/** Opcode 0x0f 0x71. */
6961FNIEMOP_DEF(iemOp_Grp12)
6962{
6963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6964 if (IEM_IS_MODRM_REG_MODE(bRm))
6965 /* register, register */
6966 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6967 + pVCpu->iem.s.idxPrefix], bRm);
6968 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6969}
6970
6971
6972/** Opcode 0x0f 0x72 11/2. */
6973FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6974{
6975// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6976 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6977}
6978
6979
6980/** Opcode 0x66 0x0f 0x72 11/2. */
6981FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6982{
6983// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6984 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6985}
6986
6987
6988/** Opcode 0x0f 0x72 11/4. */
6989FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6990{
6991// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6992 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6993}
6994
6995
6996/** Opcode 0x66 0x0f 0x72 11/4. */
6997FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6998{
6999// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7000 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
7001}
7002
7003
7004/** Opcode 0x0f 0x72 11/6. */
7005FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
7006{
7007// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
7008 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
7009}
7010
7011/** Opcode 0x66 0x0f 0x72 11/6. */
7012FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
7013{
7014// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7015 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
7016}
7017
7018
7019/**
7020 * Group 13 jump table for register variant.
7021 */
7022IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
7023{
7024 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7025 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7026 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7027 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7028 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7029 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7030 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7031 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
7032};
7033AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
7034
7035/** Opcode 0x0f 0x72. */
7036FNIEMOP_DEF(iemOp_Grp13)
7037{
7038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7039 if (IEM_IS_MODRM_REG_MODE(bRm))
7040 /* register, register */
7041 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7042 + pVCpu->iem.s.idxPrefix], bRm);
7043 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7044}
7045
7046
7047/** Opcode 0x0f 0x73 11/2. */
7048FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
7049{
7050// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
7051 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
7052}
7053
7054
7055/** Opcode 0x66 0x0f 0x73 11/2. */
7056FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
7057{
7058// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7059 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
7060}
7061
7062
7063/** Opcode 0x66 0x0f 0x73 11/3. */
7064FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
7065{
7066// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7067 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
7068}
7069
7070
7071/** Opcode 0x0f 0x73 11/6. */
7072FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
7073{
7074// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
7075 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
7076}
7077
7078
7079/** Opcode 0x66 0x0f 0x73 11/6. */
7080FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
7081{
7082// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7083 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
7084}
7085
7086
7087/** Opcode 0x66 0x0f 0x73 11/7. */
7088FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
7089{
7090// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7091 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
7092}
7093
7094/**
7095 * Group 14 jump table for register variant.
7096 */
7097IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
7098{
7099 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7100 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7101 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7102 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7103 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7104 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7105 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7106 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7107};
7108AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
7109
7110
7111/** Opcode 0x0f 0x73. */
7112FNIEMOP_DEF(iemOp_Grp14)
7113{
7114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7115 if (IEM_IS_MODRM_REG_MODE(bRm))
7116 /* register, register */
7117 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7118 + pVCpu->iem.s.idxPrefix], bRm);
7119 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7120}
7121
7122
7123/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7124FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7125{
7126 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7127 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7128}
7129
7130
7131/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7132FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7133{
7134 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7135 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7136}
7137
7138
7139/* Opcode 0xf3 0x0f 0x74 - invalid */
7140/* Opcode 0xf2 0x0f 0x74 - invalid */
7141
7142
7143/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7144FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7145{
7146 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7147 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7148}
7149
7150
7151/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7152FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7153{
7154 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7155 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7156}
7157
7158
7159/* Opcode 0xf3 0x0f 0x75 - invalid */
7160/* Opcode 0xf2 0x0f 0x75 - invalid */
7161
7162
7163/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7164FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7165{
7166 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7167 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7168}
7169
7170
7171/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7172FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7173{
7174 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7175 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7176}
7177
7178
7179/* Opcode 0xf3 0x0f 0x76 - invalid */
7180/* Opcode 0xf2 0x0f 0x76 - invalid */
7181
7182
7183/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7184FNIEMOP_DEF(iemOp_emms)
7185{
7186 IEMOP_MNEMONIC(emms, "emms");
7187 IEM_MC_BEGIN(0, 0, 0, 0);
7188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7190 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7191 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7192 IEM_MC_FPU_FROM_MMX_MODE();
7193 IEM_MC_ADVANCE_RIP_AND_FINISH();
7194 IEM_MC_END();
7195}
7196
7197/* Opcode 0x66 0x0f 0x77 - invalid */
7198/* Opcode 0xf3 0x0f 0x77 - invalid */
7199/* Opcode 0xf2 0x0f 0x77 - invalid */
7200
7201/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7202#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7203FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7204{
7205 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7206 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7207 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7208 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7209
7210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7211 if (IEM_IS_MODRM_REG_MODE(bRm))
7212 {
7213 /*
7214 * Register, register.
7215 */
7216 if (enmEffOpSize == IEMMODE_64BIT)
7217 {
7218 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7219 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7220 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7221 IEM_MC_ARG(uint64_t, u64Enc, 1);
7222 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7223 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7224 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7225 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
7226 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7227 IEM_MC_END();
7228 }
7229 else
7230 {
7231 IEM_MC_BEGIN(2, 0, 0, 0);
7232 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7234 IEM_MC_ARG(uint32_t, u32Enc, 1);
7235 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7236 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7237 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7238 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
7239 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7240 IEM_MC_END();
7241 }
7242 }
7243 else
7244 {
7245 /*
7246 * Memory, register.
7247 */
7248 if (enmEffOpSize == IEMMODE_64BIT)
7249 {
7250 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7251 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7253 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7254 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7255 IEM_MC_ARG(uint64_t, u64Enc, 2);
7256 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7257 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7258 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7259 IEM_MC_END();
7260 }
7261 else
7262 {
7263 IEM_MC_BEGIN(3, 0, 0, 0);
7264 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7266 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7267 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7268 IEM_MC_ARG(uint32_t, u32Enc, 2);
7269 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7270 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7271 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7272 IEM_MC_END();
7273 }
7274 }
7275}
7276#else
7277FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7278#endif
7279
7280/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7281FNIEMOP_STUB(iemOp_AmdGrp17);
7282/* Opcode 0xf3 0x0f 0x78 - invalid */
7283/* Opcode 0xf2 0x0f 0x78 - invalid */
7284
7285/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7286#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7287FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7288{
7289 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7290 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7291 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7292 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7293
7294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7295 if (IEM_IS_MODRM_REG_MODE(bRm))
7296 {
7297 /*
7298 * Register, register.
7299 */
7300 if (enmEffOpSize == IEMMODE_64BIT)
7301 {
7302 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7303 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7304 IEM_MC_ARG(uint64_t, u64Val, 0);
7305 IEM_MC_ARG(uint64_t, u64Enc, 1);
7306 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7307 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7308 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7309 IEM_MC_END();
7310 }
7311 else
7312 {
7313 IEM_MC_BEGIN(2, 0, 0, 0);
7314 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7315 IEM_MC_ARG(uint32_t, u32Val, 0);
7316 IEM_MC_ARG(uint32_t, u32Enc, 1);
7317 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7318 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7319 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7320 IEM_MC_END();
7321 }
7322 }
7323 else
7324 {
7325 /*
7326 * Register, memory.
7327 */
7328 if (enmEffOpSize == IEMMODE_64BIT)
7329 {
7330 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7331 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7333 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7334 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7335 IEM_MC_ARG(uint64_t, u64Enc, 2);
7336 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7337 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7338 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7339 IEM_MC_END();
7340 }
7341 else
7342 {
7343 IEM_MC_BEGIN(3, 0, 0, 0);
7344 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7346 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7347 IEM_MC_ARG(uint32_t, u32Enc, 2);
7348 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7349 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7350 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7351 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7352 IEM_MC_END();
7353 }
7354 }
7355}
7356#else
7357FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7358#endif
7359/* Opcode 0x66 0x0f 0x79 - invalid */
7360/* Opcode 0xf3 0x0f 0x79 - invalid */
7361/* Opcode 0xf2 0x0f 0x79 - invalid */
7362
7363/* Opcode 0x0f 0x7a - invalid */
7364/* Opcode 0x66 0x0f 0x7a - invalid */
7365/* Opcode 0xf3 0x0f 0x7a - invalid */
7366/* Opcode 0xf2 0x0f 0x7a - invalid */
7367
7368/* Opcode 0x0f 0x7b - invalid */
7369/* Opcode 0x66 0x0f 0x7b - invalid */
7370/* Opcode 0xf3 0x0f 0x7b - invalid */
7371/* Opcode 0xf2 0x0f 0x7b - invalid */
7372
7373/* Opcode 0x0f 0x7c - invalid */
7374
7375
7376/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7377FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7378{
7379 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7380 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7381}
7382
7383
7384/* Opcode 0xf3 0x0f 0x7c - invalid */
7385
7386
7387/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7388FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7389{
7390 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7391 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7392}
7393
7394
7395/* Opcode 0x0f 0x7d - invalid */
7396
7397
7398/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7399FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7400{
7401 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7402 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7403}
7404
7405
7406/* Opcode 0xf3 0x0f 0x7d - invalid */
7407
7408
7409/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7410FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7411{
7412 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7413 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7414}
7415
7416
7417/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7418FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7419{
7420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7421 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7422 {
7423 /**
7424 * @opcode 0x7e
7425 * @opcodesub rex.w=1
7426 * @oppfx none
7427 * @opcpuid mmx
7428 * @opgroup og_mmx_datamove
7429 * @opxcpttype 5
7430 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7431 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7432 */
7433 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7434 if (IEM_IS_MODRM_REG_MODE(bRm))
7435 {
7436 /* greg64, MMX */
7437 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7439 IEM_MC_LOCAL(uint64_t, u64Tmp);
7440
7441 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7442 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7443 IEM_MC_FPU_TO_MMX_MODE();
7444
7445 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7446 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7447
7448 IEM_MC_ADVANCE_RIP_AND_FINISH();
7449 IEM_MC_END();
7450 }
7451 else
7452 {
7453 /* [mem64], MMX */
7454 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7456 IEM_MC_LOCAL(uint64_t, u64Tmp);
7457
7458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7460 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7461 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7462 IEM_MC_FPU_TO_MMX_MODE();
7463
7464 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7465 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7466
7467 IEM_MC_ADVANCE_RIP_AND_FINISH();
7468 IEM_MC_END();
7469 }
7470 }
7471 else
7472 {
7473 /**
7474 * @opdone
7475 * @opcode 0x7e
7476 * @opcodesub rex.w=0
7477 * @oppfx none
7478 * @opcpuid mmx
7479 * @opgroup og_mmx_datamove
7480 * @opxcpttype 5
7481 * @opfunction iemOp_movd_q_Pd_Ey
7482 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7483 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7484 */
7485 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7486 if (IEM_IS_MODRM_REG_MODE(bRm))
7487 {
7488 /* greg32, MMX */
7489 IEM_MC_BEGIN(0, 1, 0, 0);
7490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7491 IEM_MC_LOCAL(uint32_t, u32Tmp);
7492
7493 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7494 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7495 IEM_MC_FPU_TO_MMX_MODE();
7496
7497 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7498 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7499
7500 IEM_MC_ADVANCE_RIP_AND_FINISH();
7501 IEM_MC_END();
7502 }
7503 else
7504 {
7505 /* [mem32], MMX */
7506 IEM_MC_BEGIN(0, 2, 0, 0);
7507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7508 IEM_MC_LOCAL(uint32_t, u32Tmp);
7509
7510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7512 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7513 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7514 IEM_MC_FPU_TO_MMX_MODE();
7515
7516 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7517 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7518
7519 IEM_MC_ADVANCE_RIP_AND_FINISH();
7520 IEM_MC_END();
7521 }
7522 }
7523}
7524
7525
7526FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7527{
7528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7529 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7530 {
7531 /**
7532 * @opcode 0x7e
7533 * @opcodesub rex.w=1
7534 * @oppfx 0x66
7535 * @opcpuid sse2
7536 * @opgroup og_sse2_simdint_datamove
7537 * @opxcpttype 5
7538 * @optest 64-bit / op1=1 op2=2 -> op1=2
7539 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7540 */
7541 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7542 if (IEM_IS_MODRM_REG_MODE(bRm))
7543 {
7544 /* greg64, XMM */
7545 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7547 IEM_MC_LOCAL(uint64_t, u64Tmp);
7548
7549 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7550 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7551
7552 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7553 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7554
7555 IEM_MC_ADVANCE_RIP_AND_FINISH();
7556 IEM_MC_END();
7557 }
7558 else
7559 {
7560 /* [mem64], XMM */
7561 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7563 IEM_MC_LOCAL(uint64_t, u64Tmp);
7564
7565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7567 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7568 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7569
7570 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7571 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7572
7573 IEM_MC_ADVANCE_RIP_AND_FINISH();
7574 IEM_MC_END();
7575 }
7576 }
7577 else
7578 {
7579 /**
7580 * @opdone
7581 * @opcode 0x7e
7582 * @opcodesub rex.w=0
7583 * @oppfx 0x66
7584 * @opcpuid sse2
7585 * @opgroup og_sse2_simdint_datamove
7586 * @opxcpttype 5
7587 * @opfunction iemOp_movd_q_Vy_Ey
7588 * @optest op1=1 op2=2 -> op1=2
7589 * @optest op1=0 op2=-42 -> op1=-42
7590 */
7591 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7592 if (IEM_IS_MODRM_REG_MODE(bRm))
7593 {
7594 /* greg32, XMM */
7595 IEM_MC_BEGIN(0, 1, 0, 0);
7596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7597 IEM_MC_LOCAL(uint32_t, u32Tmp);
7598
7599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7600 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7601
7602 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7603 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7604
7605 IEM_MC_ADVANCE_RIP_AND_FINISH();
7606 IEM_MC_END();
7607 }
7608 else
7609 {
7610 /* [mem32], XMM */
7611 IEM_MC_BEGIN(0, 2, 0, 0);
7612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7613 IEM_MC_LOCAL(uint32_t, u32Tmp);
7614
7615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7617 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7618 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7619
7620 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7621 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7622
7623 IEM_MC_ADVANCE_RIP_AND_FINISH();
7624 IEM_MC_END();
7625 }
7626 }
7627}
7628
7629/**
7630 * @opcode 0x7e
7631 * @oppfx 0xf3
7632 * @opcpuid sse2
7633 * @opgroup og_sse2_pcksclr_datamove
7634 * @opxcpttype none
7635 * @optest op1=1 op2=2 -> op1=2
7636 * @optest op1=0 op2=-42 -> op1=-42
7637 */
7638FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7639{
7640 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7642 if (IEM_IS_MODRM_REG_MODE(bRm))
7643 {
7644 /*
7645 * XMM128, XMM64.
7646 */
7647 IEM_MC_BEGIN(0, 2, 0, 0);
7648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7649 IEM_MC_LOCAL(uint64_t, uSrc);
7650
7651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7653
7654 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7655 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7656
7657 IEM_MC_ADVANCE_RIP_AND_FINISH();
7658 IEM_MC_END();
7659 }
7660 else
7661 {
7662 /*
7663 * XMM128, [mem64].
7664 */
7665 IEM_MC_BEGIN(0, 2, 0, 0);
7666 IEM_MC_LOCAL(uint64_t, uSrc);
7667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7668
7669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7671 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7672 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7673
7674 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7675 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7676
7677 IEM_MC_ADVANCE_RIP_AND_FINISH();
7678 IEM_MC_END();
7679 }
7680}
7681
7682/* Opcode 0xf2 0x0f 0x7e - invalid */
7683
7684
7685/** Opcode 0x0f 0x7f - movq Qq, Pq */
7686FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7687{
7688 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7690 if (IEM_IS_MODRM_REG_MODE(bRm))
7691 {
7692 /*
7693 * MMX, MMX.
7694 */
7695 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7696 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7697 IEM_MC_BEGIN(0, 1, 0, 0);
7698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7699 IEM_MC_LOCAL(uint64_t, u64Tmp);
7700 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7701 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7702 IEM_MC_FPU_TO_MMX_MODE();
7703
7704 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7705 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7706
7707 IEM_MC_ADVANCE_RIP_AND_FINISH();
7708 IEM_MC_END();
7709 }
7710 else
7711 {
7712 /*
7713 * [mem64], MMX.
7714 */
7715 IEM_MC_BEGIN(0, 2, 0, 0);
7716 IEM_MC_LOCAL(uint64_t, u64Tmp);
7717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7718
7719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7721 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7722 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7723 IEM_MC_FPU_TO_MMX_MODE();
7724
7725 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7726 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7727
7728 IEM_MC_ADVANCE_RIP_AND_FINISH();
7729 IEM_MC_END();
7730 }
7731}
7732
7733/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7734FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7735{
7736 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7738 if (IEM_IS_MODRM_REG_MODE(bRm))
7739 {
7740 /*
7741 * XMM, XMM.
7742 */
7743 IEM_MC_BEGIN(0, 0, 0, 0);
7744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7745 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7746 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7747 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7748 IEM_GET_MODRM_REG(pVCpu, bRm));
7749 IEM_MC_ADVANCE_RIP_AND_FINISH();
7750 IEM_MC_END();
7751 }
7752 else
7753 {
7754 /*
7755 * [mem128], XMM.
7756 */
7757 IEM_MC_BEGIN(0, 2, 0, 0);
7758 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7760
7761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7763 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7764 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7765
7766 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7767 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7768
7769 IEM_MC_ADVANCE_RIP_AND_FINISH();
7770 IEM_MC_END();
7771 }
7772}
7773
7774/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7775FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7776{
7777 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7779 if (IEM_IS_MODRM_REG_MODE(bRm))
7780 {
7781 /*
7782 * XMM, XMM.
7783 */
7784 IEM_MC_BEGIN(0, 0, 0, 0);
7785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7786 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7787 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7788 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7789 IEM_GET_MODRM_REG(pVCpu, bRm));
7790 IEM_MC_ADVANCE_RIP_AND_FINISH();
7791 IEM_MC_END();
7792 }
7793 else
7794 {
7795 /*
7796 * [mem128], XMM.
7797 */
7798 IEM_MC_BEGIN(0, 2, 0, 0);
7799 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7801
7802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7804 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7805 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7806
7807 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7808 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7809
7810 IEM_MC_ADVANCE_RIP_AND_FINISH();
7811 IEM_MC_END();
7812 }
7813}
7814
7815/* Opcode 0xf2 0x0f 0x7f - invalid */
7816
7817
7818/**
7819 * @opcode 0x80
7820 * @opfltest of
7821 */
7822FNIEMOP_DEF(iemOp_jo_Jv)
7823{
7824 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7825 IEMOP_HLP_MIN_386();
7826 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7827 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7828 {
7829 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7830 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7832 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7833 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7834 } IEM_MC_ELSE() {
7835 IEM_MC_ADVANCE_RIP_AND_FINISH();
7836 } IEM_MC_ENDIF();
7837 IEM_MC_END();
7838 }
7839 else
7840 {
7841 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7842 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7845 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7846 } IEM_MC_ELSE() {
7847 IEM_MC_ADVANCE_RIP_AND_FINISH();
7848 } IEM_MC_ENDIF();
7849 IEM_MC_END();
7850 }
7851}
7852
7853
7854/**
7855 * @opcode 0x81
7856 * @opfltest of
7857 */
7858FNIEMOP_DEF(iemOp_jno_Jv)
7859{
7860 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7861 IEMOP_HLP_MIN_386();
7862 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7863 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7864 {
7865 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7866 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7869 IEM_MC_ADVANCE_RIP_AND_FINISH();
7870 } IEM_MC_ELSE() {
7871 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7872 } IEM_MC_ENDIF();
7873 IEM_MC_END();
7874 }
7875 else
7876 {
7877 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7878 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7881 IEM_MC_ADVANCE_RIP_AND_FINISH();
7882 } IEM_MC_ELSE() {
7883 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7884 } IEM_MC_ENDIF();
7885 IEM_MC_END();
7886 }
7887}
7888
7889
7890/**
7891 * @opcode 0x82
7892 * @opfltest cf
7893 */
7894FNIEMOP_DEF(iemOp_jc_Jv)
7895{
7896 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7897 IEMOP_HLP_MIN_386();
7898 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7899 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7900 {
7901 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7902 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7905 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7906 } IEM_MC_ELSE() {
7907 IEM_MC_ADVANCE_RIP_AND_FINISH();
7908 } IEM_MC_ENDIF();
7909 IEM_MC_END();
7910 }
7911 else
7912 {
7913 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7914 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7916 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7917 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7918 } IEM_MC_ELSE() {
7919 IEM_MC_ADVANCE_RIP_AND_FINISH();
7920 } IEM_MC_ENDIF();
7921 IEM_MC_END();
7922 }
7923}
7924
7925
7926/**
7927 * @opcode 0x83
7928 * @opfltest cf
7929 */
7930FNIEMOP_DEF(iemOp_jnc_Jv)
7931{
7932 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7933 IEMOP_HLP_MIN_386();
7934 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7935 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7936 {
7937 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7938 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7941 IEM_MC_ADVANCE_RIP_AND_FINISH();
7942 } IEM_MC_ELSE() {
7943 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7944 } IEM_MC_ENDIF();
7945 IEM_MC_END();
7946 }
7947 else
7948 {
7949 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7950 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7953 IEM_MC_ADVANCE_RIP_AND_FINISH();
7954 } IEM_MC_ELSE() {
7955 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7956 } IEM_MC_ENDIF();
7957 IEM_MC_END();
7958 }
7959}
7960
7961
7962/**
7963 * @opcode 0x84
7964 * @opfltest zf
7965 */
7966FNIEMOP_DEF(iemOp_je_Jv)
7967{
7968 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7969 IEMOP_HLP_MIN_386();
7970 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7971 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7972 {
7973 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7974 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7977 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7978 } IEM_MC_ELSE() {
7979 IEM_MC_ADVANCE_RIP_AND_FINISH();
7980 } IEM_MC_ENDIF();
7981 IEM_MC_END();
7982 }
7983 else
7984 {
7985 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7986 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7988 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7989 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7990 } IEM_MC_ELSE() {
7991 IEM_MC_ADVANCE_RIP_AND_FINISH();
7992 } IEM_MC_ENDIF();
7993 IEM_MC_END();
7994 }
7995}
7996
7997
7998/**
7999 * @opcode 0x85
8000 * @opfltest zf
8001 */
8002FNIEMOP_DEF(iemOp_jne_Jv)
8003{
8004 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
8005 IEMOP_HLP_MIN_386();
8006 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8007 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8008 {
8009 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8010 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8013 IEM_MC_ADVANCE_RIP_AND_FINISH();
8014 } IEM_MC_ELSE() {
8015 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8016 } IEM_MC_ENDIF();
8017 IEM_MC_END();
8018 }
8019 else
8020 {
8021 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8022 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8025 IEM_MC_ADVANCE_RIP_AND_FINISH();
8026 } IEM_MC_ELSE() {
8027 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8028 } IEM_MC_ENDIF();
8029 IEM_MC_END();
8030 }
8031}
8032
8033
8034/**
8035 * @opcode 0x86
8036 * @opfltest cf,zf
8037 */
8038FNIEMOP_DEF(iemOp_jbe_Jv)
8039{
8040 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
8041 IEMOP_HLP_MIN_386();
8042 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8043 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8044 {
8045 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8046 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8048 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8049 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8050 } IEM_MC_ELSE() {
8051 IEM_MC_ADVANCE_RIP_AND_FINISH();
8052 } IEM_MC_ENDIF();
8053 IEM_MC_END();
8054 }
8055 else
8056 {
8057 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8058 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8060 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8061 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8062 } IEM_MC_ELSE() {
8063 IEM_MC_ADVANCE_RIP_AND_FINISH();
8064 } IEM_MC_ENDIF();
8065 IEM_MC_END();
8066 }
8067}
8068
8069
8070/**
8071 * @opcode 0x87
8072 * @opfltest cf,zf
8073 */
8074FNIEMOP_DEF(iemOp_jnbe_Jv)
8075{
8076 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
8077 IEMOP_HLP_MIN_386();
8078 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8079 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8080 {
8081 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8082 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8084 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8085 IEM_MC_ADVANCE_RIP_AND_FINISH();
8086 } IEM_MC_ELSE() {
8087 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8088 } IEM_MC_ENDIF();
8089 IEM_MC_END();
8090 }
8091 else
8092 {
8093 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8094 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8096 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8097 IEM_MC_ADVANCE_RIP_AND_FINISH();
8098 } IEM_MC_ELSE() {
8099 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8100 } IEM_MC_ENDIF();
8101 IEM_MC_END();
8102 }
8103}
8104
8105
8106/**
8107 * @opcode 0x88
8108 * @opfltest sf
8109 */
8110FNIEMOP_DEF(iemOp_js_Jv)
8111{
8112 IEMOP_MNEMONIC(js_Jv, "js Jv");
8113 IEMOP_HLP_MIN_386();
8114 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8115 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8116 {
8117 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8118 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8121 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8122 } IEM_MC_ELSE() {
8123 IEM_MC_ADVANCE_RIP_AND_FINISH();
8124 } IEM_MC_ENDIF();
8125 IEM_MC_END();
8126 }
8127 else
8128 {
8129 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8130 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8132 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8133 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8134 } IEM_MC_ELSE() {
8135 IEM_MC_ADVANCE_RIP_AND_FINISH();
8136 } IEM_MC_ENDIF();
8137 IEM_MC_END();
8138 }
8139}
8140
8141
8142/**
8143 * @opcode 0x89
8144 * @opfltest sf
8145 */
8146FNIEMOP_DEF(iemOp_jns_Jv)
8147{
8148 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8149 IEMOP_HLP_MIN_386();
8150 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8151 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8152 {
8153 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8154 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8157 IEM_MC_ADVANCE_RIP_AND_FINISH();
8158 } IEM_MC_ELSE() {
8159 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8160 } IEM_MC_ENDIF();
8161 IEM_MC_END();
8162 }
8163 else
8164 {
8165 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8166 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8168 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8169 IEM_MC_ADVANCE_RIP_AND_FINISH();
8170 } IEM_MC_ELSE() {
8171 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8172 } IEM_MC_ENDIF();
8173 IEM_MC_END();
8174 }
8175}
8176
8177
8178/**
8179 * @opcode 0x8a
8180 * @opfltest pf
8181 */
8182FNIEMOP_DEF(iemOp_jp_Jv)
8183{
8184 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8185 IEMOP_HLP_MIN_386();
8186 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8187 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8188 {
8189 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8190 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8193 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8194 } IEM_MC_ELSE() {
8195 IEM_MC_ADVANCE_RIP_AND_FINISH();
8196 } IEM_MC_ENDIF();
8197 IEM_MC_END();
8198 }
8199 else
8200 {
8201 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8202 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8205 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8206 } IEM_MC_ELSE() {
8207 IEM_MC_ADVANCE_RIP_AND_FINISH();
8208 } IEM_MC_ENDIF();
8209 IEM_MC_END();
8210 }
8211}
8212
8213
8214/**
8215 * @opcode 0x8b
8216 * @opfltest pf
8217 */
8218FNIEMOP_DEF(iemOp_jnp_Jv)
8219{
8220 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8221 IEMOP_HLP_MIN_386();
8222 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8223 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8224 {
8225 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8226 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8229 IEM_MC_ADVANCE_RIP_AND_FINISH();
8230 } IEM_MC_ELSE() {
8231 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8232 } IEM_MC_ENDIF();
8233 IEM_MC_END();
8234 }
8235 else
8236 {
8237 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8238 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8241 IEM_MC_ADVANCE_RIP_AND_FINISH();
8242 } IEM_MC_ELSE() {
8243 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8244 } IEM_MC_ENDIF();
8245 IEM_MC_END();
8246 }
8247}
8248
8249
8250/**
8251 * @opcode 0x8c
8252 * @opfltest sf,of
8253 */
8254FNIEMOP_DEF(iemOp_jl_Jv)
8255{
8256 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8257 IEMOP_HLP_MIN_386();
8258 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8259 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8260 {
8261 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8262 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8264 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8265 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8266 } IEM_MC_ELSE() {
8267 IEM_MC_ADVANCE_RIP_AND_FINISH();
8268 } IEM_MC_ENDIF();
8269 IEM_MC_END();
8270 }
8271 else
8272 {
8273 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8274 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8276 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8277 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8278 } IEM_MC_ELSE() {
8279 IEM_MC_ADVANCE_RIP_AND_FINISH();
8280 } IEM_MC_ENDIF();
8281 IEM_MC_END();
8282 }
8283}
8284
8285
8286/**
8287 * @opcode 0x8d
8288 * @opfltest sf,of
8289 */
8290FNIEMOP_DEF(iemOp_jnl_Jv)
8291{
8292 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8293 IEMOP_HLP_MIN_386();
8294 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8295 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8296 {
8297 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8298 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8300 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8301 IEM_MC_ADVANCE_RIP_AND_FINISH();
8302 } IEM_MC_ELSE() {
8303 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8304 } IEM_MC_ENDIF();
8305 IEM_MC_END();
8306 }
8307 else
8308 {
8309 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8310 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8312 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8313 IEM_MC_ADVANCE_RIP_AND_FINISH();
8314 } IEM_MC_ELSE() {
8315 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8316 } IEM_MC_ENDIF();
8317 IEM_MC_END();
8318 }
8319}
8320
8321
8322/**
8323 * @opcode 0x8e
8324 * @opfltest zf,sf,of
8325 */
8326FNIEMOP_DEF(iemOp_jle_Jv)
8327{
8328 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8329 IEMOP_HLP_MIN_386();
8330 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8331 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8332 {
8333 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8334 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8336 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8337 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8338 } IEM_MC_ELSE() {
8339 IEM_MC_ADVANCE_RIP_AND_FINISH();
8340 } IEM_MC_ENDIF();
8341 IEM_MC_END();
8342 }
8343 else
8344 {
8345 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8346 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8348 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8349 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8350 } IEM_MC_ELSE() {
8351 IEM_MC_ADVANCE_RIP_AND_FINISH();
8352 } IEM_MC_ENDIF();
8353 IEM_MC_END();
8354 }
8355}
8356
8357
8358/**
8359 * @opcode 0x8f
8360 * @opfltest zf,sf,of
8361 */
8362FNIEMOP_DEF(iemOp_jnle_Jv)
8363{
8364 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8365 IEMOP_HLP_MIN_386();
8366 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8367 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8368 {
8369 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8370 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8372 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8373 IEM_MC_ADVANCE_RIP_AND_FINISH();
8374 } IEM_MC_ELSE() {
8375 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8376 } IEM_MC_ENDIF();
8377 IEM_MC_END();
8378 }
8379 else
8380 {
8381 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8382 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8384 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8385 IEM_MC_ADVANCE_RIP_AND_FINISH();
8386 } IEM_MC_ELSE() {
8387 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8388 } IEM_MC_ENDIF();
8389 IEM_MC_END();
8390 }
8391}
8392
8393
8394/**
8395 * @opcode 0x90
8396 * @opfltest of
8397 */
8398FNIEMOP_DEF(iemOp_seto_Eb)
8399{
8400 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8401 IEMOP_HLP_MIN_386();
8402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8403
8404 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8405 * any way. AMD says it's "unused", whatever that means. We're
8406 * ignoring for now. */
8407 if (IEM_IS_MODRM_REG_MODE(bRm))
8408 {
8409 /* register target */
8410 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8413 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8414 } IEM_MC_ELSE() {
8415 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8416 } IEM_MC_ENDIF();
8417 IEM_MC_ADVANCE_RIP_AND_FINISH();
8418 IEM_MC_END();
8419 }
8420 else
8421 {
8422 /* memory target */
8423 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8427 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8428 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8429 } IEM_MC_ELSE() {
8430 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8431 } IEM_MC_ENDIF();
8432 IEM_MC_ADVANCE_RIP_AND_FINISH();
8433 IEM_MC_END();
8434 }
8435}
8436
8437
8438/**
8439 * @opcode 0x91
8440 * @opfltest of
8441 */
8442FNIEMOP_DEF(iemOp_setno_Eb)
8443{
8444 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8445 IEMOP_HLP_MIN_386();
8446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8447
8448 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8449 * any way. AMD says it's "unused", whatever that means. We're
8450 * ignoring for now. */
8451 if (IEM_IS_MODRM_REG_MODE(bRm))
8452 {
8453 /* register target */
8454 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8457 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8458 } IEM_MC_ELSE() {
8459 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8460 } IEM_MC_ENDIF();
8461 IEM_MC_ADVANCE_RIP_AND_FINISH();
8462 IEM_MC_END();
8463 }
8464 else
8465 {
8466 /* memory target */
8467 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8472 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8473 } IEM_MC_ELSE() {
8474 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8475 } IEM_MC_ENDIF();
8476 IEM_MC_ADVANCE_RIP_AND_FINISH();
8477 IEM_MC_END();
8478 }
8479}
8480
8481
8482/**
8483 * @opcode 0x92
8484 * @opfltest cf
8485 */
8486FNIEMOP_DEF(iemOp_setc_Eb)
8487{
8488 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8489 IEMOP_HLP_MIN_386();
8490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8491
8492 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8493 * any way. AMD says it's "unused", whatever that means. We're
8494 * ignoring for now. */
8495 if (IEM_IS_MODRM_REG_MODE(bRm))
8496 {
8497 /* register target */
8498 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8501 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8502 } IEM_MC_ELSE() {
8503 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8504 } IEM_MC_ENDIF();
8505 IEM_MC_ADVANCE_RIP_AND_FINISH();
8506 IEM_MC_END();
8507 }
8508 else
8509 {
8510 /* memory target */
8511 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8516 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8517 } IEM_MC_ELSE() {
8518 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8519 } IEM_MC_ENDIF();
8520 IEM_MC_ADVANCE_RIP_AND_FINISH();
8521 IEM_MC_END();
8522 }
8523}
8524
8525
8526/**
8527 * @opcode 0x93
8528 * @opfltest cf
8529 */
8530FNIEMOP_DEF(iemOp_setnc_Eb)
8531{
8532 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8533 IEMOP_HLP_MIN_386();
8534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8535
8536 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8537 * any way. AMD says it's "unused", whatever that means. We're
8538 * ignoring for now. */
8539 if (IEM_IS_MODRM_REG_MODE(bRm))
8540 {
8541 /* register target */
8542 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8544 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8545 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8546 } IEM_MC_ELSE() {
8547 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8548 } IEM_MC_ENDIF();
8549 IEM_MC_ADVANCE_RIP_AND_FINISH();
8550 IEM_MC_END();
8551 }
8552 else
8553 {
8554 /* memory target */
8555 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8560 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8561 } IEM_MC_ELSE() {
8562 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8563 } IEM_MC_ENDIF();
8564 IEM_MC_ADVANCE_RIP_AND_FINISH();
8565 IEM_MC_END();
8566 }
8567}
8568
8569
8570/**
8571 * @opcode 0x94
8572 * @opfltest zf
8573 */
8574FNIEMOP_DEF(iemOp_sete_Eb)
8575{
8576 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8577 IEMOP_HLP_MIN_386();
8578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8579
8580 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8581 * any way. AMD says it's "unused", whatever that means. We're
8582 * ignoring for now. */
8583 if (IEM_IS_MODRM_REG_MODE(bRm))
8584 {
8585 /* register target */
8586 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8588 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8589 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8590 } IEM_MC_ELSE() {
8591 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8592 } IEM_MC_ENDIF();
8593 IEM_MC_ADVANCE_RIP_AND_FINISH();
8594 IEM_MC_END();
8595 }
8596 else
8597 {
8598 /* memory target */
8599 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8603 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8604 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8605 } IEM_MC_ELSE() {
8606 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8607 } IEM_MC_ENDIF();
8608 IEM_MC_ADVANCE_RIP_AND_FINISH();
8609 IEM_MC_END();
8610 }
8611}
8612
8613
8614/**
8615 * @opcode 0x95
8616 * @opfltest zf
8617 */
8618FNIEMOP_DEF(iemOp_setne_Eb)
8619{
8620 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8621 IEMOP_HLP_MIN_386();
8622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8623
8624 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8625 * any way. AMD says it's "unused", whatever that means. We're
8626 * ignoring for now. */
8627 if (IEM_IS_MODRM_REG_MODE(bRm))
8628 {
8629 /* register target */
8630 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8632 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8633 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8634 } IEM_MC_ELSE() {
8635 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8636 } IEM_MC_ENDIF();
8637 IEM_MC_ADVANCE_RIP_AND_FINISH();
8638 IEM_MC_END();
8639 }
8640 else
8641 {
8642 /* memory target */
8643 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8648 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8649 } IEM_MC_ELSE() {
8650 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8651 } IEM_MC_ENDIF();
8652 IEM_MC_ADVANCE_RIP_AND_FINISH();
8653 IEM_MC_END();
8654 }
8655}
8656
8657
8658/**
8659 * @opcode 0x96
8660 * @opfltest cf,zf
8661 */
8662FNIEMOP_DEF(iemOp_setbe_Eb)
8663{
8664 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8665 IEMOP_HLP_MIN_386();
8666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8667
8668 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8669 * any way. AMD says it's "unused", whatever that means. We're
8670 * ignoring for now. */
8671 if (IEM_IS_MODRM_REG_MODE(bRm))
8672 {
8673 /* register target */
8674 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8676 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8677 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8678 } IEM_MC_ELSE() {
8679 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8680 } IEM_MC_ENDIF();
8681 IEM_MC_ADVANCE_RIP_AND_FINISH();
8682 IEM_MC_END();
8683 }
8684 else
8685 {
8686 /* memory target */
8687 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8691 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8692 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8693 } IEM_MC_ELSE() {
8694 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8695 } IEM_MC_ENDIF();
8696 IEM_MC_ADVANCE_RIP_AND_FINISH();
8697 IEM_MC_END();
8698 }
8699}
8700
8701
8702/**
8703 * @opcode 0x97
8704 * @opfltest cf,zf
8705 */
8706FNIEMOP_DEF(iemOp_setnbe_Eb)
8707{
8708 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8709 IEMOP_HLP_MIN_386();
8710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8711
8712 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8713 * any way. AMD says it's "unused", whatever that means. We're
8714 * ignoring for now. */
8715 if (IEM_IS_MODRM_REG_MODE(bRm))
8716 {
8717 /* register target */
8718 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8721 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8722 } IEM_MC_ELSE() {
8723 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8724 } IEM_MC_ENDIF();
8725 IEM_MC_ADVANCE_RIP_AND_FINISH();
8726 IEM_MC_END();
8727 }
8728 else
8729 {
8730 /* memory target */
8731 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8735 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8736 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8737 } IEM_MC_ELSE() {
8738 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8739 } IEM_MC_ENDIF();
8740 IEM_MC_ADVANCE_RIP_AND_FINISH();
8741 IEM_MC_END();
8742 }
8743}
8744
8745
8746/**
8747 * @opcode 0x98
8748 * @opfltest sf
8749 */
8750FNIEMOP_DEF(iemOp_sets_Eb)
8751{
8752 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8753 IEMOP_HLP_MIN_386();
8754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8755
8756 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8757 * any way. AMD says it's "unused", whatever that means. We're
8758 * ignoring for now. */
8759 if (IEM_IS_MODRM_REG_MODE(bRm))
8760 {
8761 /* register target */
8762 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8765 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8766 } IEM_MC_ELSE() {
8767 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8768 } IEM_MC_ENDIF();
8769 IEM_MC_ADVANCE_RIP_AND_FINISH();
8770 IEM_MC_END();
8771 }
8772 else
8773 {
8774 /* memory target */
8775 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8779 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8780 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8781 } IEM_MC_ELSE() {
8782 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8783 } IEM_MC_ENDIF();
8784 IEM_MC_ADVANCE_RIP_AND_FINISH();
8785 IEM_MC_END();
8786 }
8787}
8788
8789
8790/**
8791 * @opcode 0x99
8792 * @opfltest sf
8793 */
8794FNIEMOP_DEF(iemOp_setns_Eb)
8795{
8796 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8797 IEMOP_HLP_MIN_386();
8798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8799
8800 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8801 * any way. AMD says it's "unused", whatever that means. We're
8802 * ignoring for now. */
8803 if (IEM_IS_MODRM_REG_MODE(bRm))
8804 {
8805 /* register target */
8806 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8808 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8809 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8810 } IEM_MC_ELSE() {
8811 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8812 } IEM_MC_ENDIF();
8813 IEM_MC_ADVANCE_RIP_AND_FINISH();
8814 IEM_MC_END();
8815 }
8816 else
8817 {
8818 /* memory target */
8819 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8824 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8825 } IEM_MC_ELSE() {
8826 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8827 } IEM_MC_ENDIF();
8828 IEM_MC_ADVANCE_RIP_AND_FINISH();
8829 IEM_MC_END();
8830 }
8831}
8832
8833
8834/**
8835 * @opcode 0x9a
8836 * @opfltest pf
8837 */
8838FNIEMOP_DEF(iemOp_setp_Eb)
8839{
8840 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8841 IEMOP_HLP_MIN_386();
8842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8843
8844 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8845 * any way. AMD says it's "unused", whatever that means. We're
8846 * ignoring for now. */
8847 if (IEM_IS_MODRM_REG_MODE(bRm))
8848 {
8849 /* register target */
8850 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8852 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8853 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8854 } IEM_MC_ELSE() {
8855 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8856 } IEM_MC_ENDIF();
8857 IEM_MC_ADVANCE_RIP_AND_FINISH();
8858 IEM_MC_END();
8859 }
8860 else
8861 {
8862 /* memory target */
8863 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8868 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8869 } IEM_MC_ELSE() {
8870 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8871 } IEM_MC_ENDIF();
8872 IEM_MC_ADVANCE_RIP_AND_FINISH();
8873 IEM_MC_END();
8874 }
8875}
8876
8877
8878/**
8879 * @opcode 0x9b
8880 * @opfltest pf
8881 */
8882FNIEMOP_DEF(iemOp_setnp_Eb)
8883{
8884 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8885 IEMOP_HLP_MIN_386();
8886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8887
8888 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8889 * any way. AMD says it's "unused", whatever that means. We're
8890 * ignoring for now. */
8891 if (IEM_IS_MODRM_REG_MODE(bRm))
8892 {
8893 /* register target */
8894 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8896 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8897 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8898 } IEM_MC_ELSE() {
8899 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8900 } IEM_MC_ENDIF();
8901 IEM_MC_ADVANCE_RIP_AND_FINISH();
8902 IEM_MC_END();
8903 }
8904 else
8905 {
8906 /* memory target */
8907 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8911 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8912 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8913 } IEM_MC_ELSE() {
8914 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8915 } IEM_MC_ENDIF();
8916 IEM_MC_ADVANCE_RIP_AND_FINISH();
8917 IEM_MC_END();
8918 }
8919}
8920
8921
8922/**
8923 * @opcode 0x9c
8924 * @opfltest sf,of
8925 */
8926FNIEMOP_DEF(iemOp_setl_Eb)
8927{
8928 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8929 IEMOP_HLP_MIN_386();
8930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8931
8932 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8933 * any way. AMD says it's "unused", whatever that means. We're
8934 * ignoring for now. */
8935 if (IEM_IS_MODRM_REG_MODE(bRm))
8936 {
8937 /* register target */
8938 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8940 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8941 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8942 } IEM_MC_ELSE() {
8943 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8944 } IEM_MC_ENDIF();
8945 IEM_MC_ADVANCE_RIP_AND_FINISH();
8946 IEM_MC_END();
8947 }
8948 else
8949 {
8950 /* memory target */
8951 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8955 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8956 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8957 } IEM_MC_ELSE() {
8958 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8959 } IEM_MC_ENDIF();
8960 IEM_MC_ADVANCE_RIP_AND_FINISH();
8961 IEM_MC_END();
8962 }
8963}
8964
8965
8966/**
8967 * @opcode 0x9d
8968 * @opfltest sf,of
8969 */
8970FNIEMOP_DEF(iemOp_setnl_Eb)
8971{
8972 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8973 IEMOP_HLP_MIN_386();
8974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8975
8976 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8977 * any way. AMD says it's "unused", whatever that means. We're
8978 * ignoring for now. */
8979 if (IEM_IS_MODRM_REG_MODE(bRm))
8980 {
8981 /* register target */
8982 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8984 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8985 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8986 } IEM_MC_ELSE() {
8987 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8988 } IEM_MC_ENDIF();
8989 IEM_MC_ADVANCE_RIP_AND_FINISH();
8990 IEM_MC_END();
8991 }
8992 else
8993 {
8994 /* memory target */
8995 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8999 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9000 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9001 } IEM_MC_ELSE() {
9002 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
9003 } IEM_MC_ENDIF();
9004 IEM_MC_ADVANCE_RIP_AND_FINISH();
9005 IEM_MC_END();
9006 }
9007}
9008
9009
9010/**
9011 * @opcode 0x9e
9012 * @opfltest zf,sf,of
9013 */
9014FNIEMOP_DEF(iemOp_setle_Eb)
9015{
9016 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
9017 IEMOP_HLP_MIN_386();
9018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9019
9020 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
9021 * any way. AMD says it's "unused", whatever that means. We're
9022 * ignoring for now. */
9023 if (IEM_IS_MODRM_REG_MODE(bRm))
9024 {
9025 /* register target */
9026 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
9027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9028 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9029 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
9030 } IEM_MC_ELSE() {
9031 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
9032 } IEM_MC_ENDIF();
9033 IEM_MC_ADVANCE_RIP_AND_FINISH();
9034 IEM_MC_END();
9035 }
9036 else
9037 {
9038 /* memory target */
9039 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
9040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9043 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9044 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
9045 } IEM_MC_ELSE() {
9046 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9047 } IEM_MC_ENDIF();
9048 IEM_MC_ADVANCE_RIP_AND_FINISH();
9049 IEM_MC_END();
9050 }
9051}
9052
9053
9054/**
9055 * @opcode 0x9f
9056 * @opfltest zf,sf,of
9057 */
9058FNIEMOP_DEF(iemOp_setnle_Eb)
9059{
9060 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
9061 IEMOP_HLP_MIN_386();
9062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9063
9064 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
9065 * any way. AMD says it's "unused", whatever that means. We're
9066 * ignoring for now. */
9067 if (IEM_IS_MODRM_REG_MODE(bRm))
9068 {
9069 /* register target */
9070 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
9071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9072 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9073 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
9074 } IEM_MC_ELSE() {
9075 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
9076 } IEM_MC_ENDIF();
9077 IEM_MC_ADVANCE_RIP_AND_FINISH();
9078 IEM_MC_END();
9079 }
9080 else
9081 {
9082 /* memory target */
9083 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
9084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9087 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9088 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9089 } IEM_MC_ELSE() {
9090 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
9091 } IEM_MC_ENDIF();
9092 IEM_MC_ADVANCE_RIP_AND_FINISH();
9093 IEM_MC_END();
9094 }
9095}
9096
9097
9098/** Opcode 0x0f 0xa0. */
9099FNIEMOP_DEF(iemOp_push_fs)
9100{
9101 IEMOP_MNEMONIC(push_fs, "push fs");
9102 IEMOP_HLP_MIN_386();
9103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9104 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
9105}
9106
9107
9108/** Opcode 0x0f 0xa1. */
9109FNIEMOP_DEF(iemOp_pop_fs)
9110{
9111 IEMOP_MNEMONIC(pop_fs, "pop fs");
9112 IEMOP_HLP_MIN_386();
9113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9114 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9115 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9116 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9117 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9118 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9119 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9120 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
9121 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
9122}
9123
9124
9125/** Opcode 0x0f 0xa2. */
9126FNIEMOP_DEF(iemOp_cpuid)
9127{
9128 IEMOP_MNEMONIC(cpuid, "cpuid");
9129 IEMOP_HLP_MIN_486(); /* not all 486es. */
9130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9131 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
9132 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
9133 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
9134 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
9135 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
9136 iemCImpl_cpuid);
9137}
9138
9139
9140/**
9141 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
9142 * iemOp_bts_Ev_Gv.
9143 */
9144
9145#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9147 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9148 \
9149 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9150 { \
9151 /* register destination. */ \
9152 switch (pVCpu->iem.s.enmEffOpSize) \
9153 { \
9154 case IEMMODE_16BIT: \
9155 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9157 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9158 IEM_MC_ARG(uint16_t, u16Src, 1); \
9159 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9160 \
9161 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9162 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9163 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9164 IEM_MC_REF_EFLAGS(pEFlags); \
9165 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9166 \
9167 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9168 IEM_MC_END(); \
9169 break; \
9170 \
9171 case IEMMODE_32BIT: \
9172 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9174 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9175 IEM_MC_ARG(uint32_t, u32Src, 1); \
9176 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9177 \
9178 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9179 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9180 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9181 IEM_MC_REF_EFLAGS(pEFlags); \
9182 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9183 \
9184 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9185 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9186 IEM_MC_END(); \
9187 break; \
9188 \
9189 case IEMMODE_64BIT: \
9190 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9192 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9193 IEM_MC_ARG(uint64_t, u64Src, 1); \
9194 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9195 \
9196 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9197 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9198 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9199 IEM_MC_REF_EFLAGS(pEFlags); \
9200 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9201 \
9202 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9203 IEM_MC_END(); \
9204 break; \
9205 \
9206 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9207 } \
9208 } \
9209 else \
9210 { \
9211 /* memory destination. */ \
9212 /** @todo test negative bit offsets! */ \
9213 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
9214 { \
9215 switch (pVCpu->iem.s.enmEffOpSize) \
9216 { \
9217 case IEMMODE_16BIT: \
9218 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9221 IEMOP_HLP_DONE_DECODING(); \
9222 \
9223 IEM_MC_ARG(uint16_t, u16Src, 1); \
9224 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9225 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9226 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9227 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9228 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9229 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9230 \
9231 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9232 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9233 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9234 \
9235 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9236 IEM_MC_FETCH_EFLAGS(EFlags); \
9237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9238 \
9239 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9240 IEM_MC_COMMIT_EFLAGS(EFlags); \
9241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9242 IEM_MC_END(); \
9243 break; \
9244 \
9245 case IEMMODE_32BIT: \
9246 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9249 IEMOP_HLP_DONE_DECODING(); \
9250 \
9251 IEM_MC_ARG(uint32_t, u32Src, 1); \
9252 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9253 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9254 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9255 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9256 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9257 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9258 \
9259 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9260 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9261 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9262 \
9263 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9264 IEM_MC_FETCH_EFLAGS(EFlags); \
9265 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9266 \
9267 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9268 IEM_MC_COMMIT_EFLAGS(EFlags); \
9269 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9270 IEM_MC_END(); \
9271 break; \
9272 \
9273 case IEMMODE_64BIT: \
9274 IEM_MC_BEGIN(3, 5, IEM_MC_F_64BIT, 0); \
9275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9277 IEMOP_HLP_DONE_DECODING(); \
9278 \
9279 IEM_MC_ARG(uint64_t, u64Src, 1); \
9280 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9281 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9282 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9283 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9284 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9285 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9286 \
9287 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9288 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9289 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9290 \
9291 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9292 IEM_MC_FETCH_EFLAGS(EFlags); \
9293 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9294 \
9295 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9296 IEM_MC_COMMIT_EFLAGS(EFlags); \
9297 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9298 IEM_MC_END(); \
9299 break; \
9300 \
9301 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9302 } \
9303 } \
9304 else \
9305 { \
9306 (void)0
9307/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9308#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9309 switch (pVCpu->iem.s.enmEffOpSize) \
9310 { \
9311 case IEMMODE_16BIT: \
9312 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9315 IEMOP_HLP_DONE_DECODING(); \
9316 \
9317 IEM_MC_ARG(uint16_t, u16Src, 1); \
9318 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9319 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9320 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9321 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9322 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9323 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9324 \
9325 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9326 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9327 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9328 \
9329 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9330 IEM_MC_FETCH_EFLAGS(EFlags); \
9331 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9332 \
9333 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9334 IEM_MC_COMMIT_EFLAGS(EFlags); \
9335 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9336 IEM_MC_END(); \
9337 break; \
9338 \
9339 case IEMMODE_32BIT: \
9340 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9343 IEMOP_HLP_DONE_DECODING(); \
9344 \
9345 IEM_MC_ARG(uint32_t, u32Src, 1); \
9346 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9347 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9348 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9349 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9350 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9351 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9352 \
9353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9354 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9355 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9356 \
9357 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9358 IEM_MC_FETCH_EFLAGS(EFlags); \
9359 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9360 \
9361 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9362 IEM_MC_COMMIT_EFLAGS(EFlags); \
9363 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9364 IEM_MC_END(); \
9365 break; \
9366 \
9367 case IEMMODE_64BIT: \
9368 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9371 IEMOP_HLP_DONE_DECODING(); \
9372 \
9373 IEM_MC_ARG(uint64_t, u64Src, 1); \
9374 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9375 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9376 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9377 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9378 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9379 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9380 \
9381 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9382 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9383 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9384 \
9385 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9386 IEM_MC_FETCH_EFLAGS(EFlags); \
9387 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9388 \
9389 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9390 IEM_MC_COMMIT_EFLAGS(EFlags); \
9391 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9392 IEM_MC_END(); \
9393 break; \
9394 \
9395 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9396 } \
9397 } \
9398 } \
9399 (void)0
9400
9401/* Read-only version (bt). */
9402#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9404 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9405 \
9406 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9407 { \
9408 /* register destination. */ \
9409 switch (pVCpu->iem.s.enmEffOpSize) \
9410 { \
9411 case IEMMODE_16BIT: \
9412 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9414 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9415 IEM_MC_ARG(uint16_t, u16Src, 1); \
9416 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9417 \
9418 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9419 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9420 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9421 IEM_MC_REF_EFLAGS(pEFlags); \
9422 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9423 \
9424 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9425 IEM_MC_END(); \
9426 break; \
9427 \
9428 case IEMMODE_32BIT: \
9429 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9431 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9432 IEM_MC_ARG(uint32_t, u32Src, 1); \
9433 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9434 \
9435 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9436 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9437 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9438 IEM_MC_REF_EFLAGS(pEFlags); \
9439 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9440 \
9441 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9442 IEM_MC_END(); \
9443 break; \
9444 \
9445 case IEMMODE_64BIT: \
9446 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9448 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9449 IEM_MC_ARG(uint64_t, u64Src, 1); \
9450 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9451 \
9452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9453 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9454 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9455 IEM_MC_REF_EFLAGS(pEFlags); \
9456 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9457 \
9458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9459 IEM_MC_END(); \
9460 break; \
9461 \
9462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9463 } \
9464 } \
9465 else \
9466 { \
9467 /* memory destination. */ \
9468 /** @todo test negative bit offsets! */ \
9469 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9470 { \
9471 switch (pVCpu->iem.s.enmEffOpSize) \
9472 { \
9473 case IEMMODE_16BIT: \
9474 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9477 IEMOP_HLP_DONE_DECODING(); \
9478 \
9479 IEM_MC_ARG(uint16_t, u16Src, 1); \
9480 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9481 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9482 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9483 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9484 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9485 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9486 \
9487 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9488 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9489 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9490 \
9491 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9492 IEM_MC_FETCH_EFLAGS(EFlags); \
9493 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9494 \
9495 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9496 IEM_MC_COMMIT_EFLAGS(EFlags); \
9497 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9498 IEM_MC_END(); \
9499 break; \
9500 \
9501 case IEMMODE_32BIT: \
9502 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9505 IEMOP_HLP_DONE_DECODING(); \
9506 \
9507 IEM_MC_ARG(uint32_t, u32Src, 1); \
9508 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9509 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9510 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9511 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9512 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9513 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9514 \
9515 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9516 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9517 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9518 \
9519 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9520 IEM_MC_FETCH_EFLAGS(EFlags); \
9521 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9522 \
9523 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9524 IEM_MC_COMMIT_EFLAGS(EFlags); \
9525 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9526 IEM_MC_END(); \
9527 break; \
9528 \
9529 case IEMMODE_64BIT: \
9530 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9533 IEMOP_HLP_DONE_DECODING(); \
9534 \
9535 IEM_MC_ARG(uint64_t, u64Src, 1); \
9536 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9537 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9538 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9539 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9540 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9541 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9542 \
9543 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9544 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9545 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9546 \
9547 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9548 IEM_MC_FETCH_EFLAGS(EFlags); \
9549 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9550 \
9551 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9552 IEM_MC_COMMIT_EFLAGS(EFlags); \
9553 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9554 IEM_MC_END(); \
9555 break; \
9556 \
9557 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9558 } \
9559 } \
9560 else \
9561 { \
9562 IEMOP_HLP_DONE_DECODING(); \
9563 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9564 } \
9565 } \
9566 (void)0
9567
9568
9569/**
9570 * @opcode 0xa3
9571 * @oppfx n/a
9572 * @opflclass bitmap
9573 */
9574FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9575{
9576 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9577 IEMOP_HLP_MIN_386();
9578 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9579}
9580
9581
9582/**
9583 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9584 */
9585#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9586 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9587 \
9588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9589 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9590 \
9591 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9592 { \
9593 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9594 \
9595 switch (pVCpu->iem.s.enmEffOpSize) \
9596 { \
9597 case IEMMODE_16BIT: \
9598 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0); \
9599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9600 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9601 IEM_MC_ARG(uint16_t, u16Src, 1); \
9602 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9603 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9604 \
9605 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9606 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9607 IEM_MC_REF_EFLAGS(pEFlags); \
9608 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9609 \
9610 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9611 IEM_MC_END(); \
9612 break; \
9613 \
9614 case IEMMODE_32BIT: \
9615 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0); \
9616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9617 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9618 IEM_MC_ARG(uint32_t, u32Src, 1); \
9619 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9620 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9621 \
9622 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9623 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9624 IEM_MC_REF_EFLAGS(pEFlags); \
9625 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9626 \
9627 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9628 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9629 IEM_MC_END(); \
9630 break; \
9631 \
9632 case IEMMODE_64BIT: \
9633 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0); \
9634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9635 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9636 IEM_MC_ARG(uint64_t, u64Src, 1); \
9637 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9638 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9639 \
9640 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9641 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9642 IEM_MC_REF_EFLAGS(pEFlags); \
9643 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9644 \
9645 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9646 IEM_MC_END(); \
9647 break; \
9648 \
9649 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9650 } \
9651 } \
9652 else \
9653 { \
9654 switch (pVCpu->iem.s.enmEffOpSize) \
9655 { \
9656 case IEMMODE_16BIT: \
9657 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0); \
9658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9660 \
9661 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9663 \
9664 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9665 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9666 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9667 \
9668 IEM_MC_ARG(uint16_t, u16Src, 1); \
9669 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9670 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9671 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9672 IEM_MC_FETCH_EFLAGS(EFlags); \
9673 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9674 \
9675 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9676 IEM_MC_COMMIT_EFLAGS(EFlags); \
9677 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9678 IEM_MC_END(); \
9679 break; \
9680 \
9681 case IEMMODE_32BIT: \
9682 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0); \
9683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9685 \
9686 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9688 \
9689 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9690 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9691 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9692 \
9693 IEM_MC_ARG(uint32_t, u32Src, 1); \
9694 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9695 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9696 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9697 IEM_MC_FETCH_EFLAGS(EFlags); \
9698 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9699 \
9700 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9701 IEM_MC_COMMIT_EFLAGS(EFlags); \
9702 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9703 IEM_MC_END(); \
9704 break; \
9705 \
9706 case IEMMODE_64BIT: \
9707 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0); \
9708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9710 \
9711 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9713 \
9714 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9715 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9716 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9717 \
9718 IEM_MC_ARG(uint64_t, u64Src, 1); \
9719 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9720 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9721 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9722 IEM_MC_FETCH_EFLAGS(EFlags); \
9723 \
9724 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9725 \
9726 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9727 IEM_MC_COMMIT_EFLAGS(EFlags); \
9728 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9729 IEM_MC_END(); \
9730 break; \
9731 \
9732 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9733 } \
9734 } (void)0
9735
9736
9737/**
9738 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9739 */
9740#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9741 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9742 \
9743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9744 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9745 \
9746 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9747 { \
9748 switch (pVCpu->iem.s.enmEffOpSize) \
9749 { \
9750 case IEMMODE_16BIT: \
9751 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0); \
9752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9753 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9754 IEM_MC_ARG(uint16_t, u16Src, 1); \
9755 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9756 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9757 \
9758 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9759 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9760 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9761 IEM_MC_REF_EFLAGS(pEFlags); \
9762 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9763 \
9764 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9765 IEM_MC_END(); \
9766 break; \
9767 \
9768 case IEMMODE_32BIT: \
9769 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0); \
9770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9771 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9772 IEM_MC_ARG(uint32_t, u32Src, 1); \
9773 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9774 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9775 \
9776 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9777 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9778 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9779 IEM_MC_REF_EFLAGS(pEFlags); \
9780 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9781 \
9782 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9783 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9784 IEM_MC_END(); \
9785 break; \
9786 \
9787 case IEMMODE_64BIT: \
9788 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0); \
9789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9790 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9791 IEM_MC_ARG(uint64_t, u64Src, 1); \
9792 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9793 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9794 \
9795 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9796 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9797 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9798 IEM_MC_REF_EFLAGS(pEFlags); \
9799 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9800 \
9801 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9802 IEM_MC_END(); \
9803 break; \
9804 \
9805 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9806 } \
9807 } \
9808 else \
9809 { \
9810 switch (pVCpu->iem.s.enmEffOpSize) \
9811 { \
9812 case IEMMODE_16BIT: \
9813 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0); \
9814 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9815 IEM_MC_ARG(uint16_t, u16Src, 1); \
9816 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9817 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9819 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9820 \
9821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9823 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9824 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9825 IEM_MC_FETCH_EFLAGS(EFlags); \
9826 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9827 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9828 \
9829 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9830 IEM_MC_COMMIT_EFLAGS(EFlags); \
9831 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9832 IEM_MC_END(); \
9833 break; \
9834 \
9835 case IEMMODE_32BIT: \
9836 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0); \
9837 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9838 IEM_MC_ARG(uint32_t, u32Src, 1); \
9839 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9840 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9842 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9843 \
9844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9846 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9847 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9848 IEM_MC_FETCH_EFLAGS(EFlags); \
9849 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9850 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9851 \
9852 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9853 IEM_MC_COMMIT_EFLAGS(EFlags); \
9854 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9855 IEM_MC_END(); \
9856 break; \
9857 \
9858 case IEMMODE_64BIT: \
9859 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0); \
9860 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9861 IEM_MC_ARG(uint64_t, u64Src, 1); \
9862 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9863 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9865 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9866 \
9867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9869 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9870 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9871 IEM_MC_FETCH_EFLAGS(EFlags); \
9872 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9873 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9874 \
9875 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9876 IEM_MC_COMMIT_EFLAGS(EFlags); \
9877 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9878 IEM_MC_END(); \
9879 break; \
9880 \
9881 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9882 } \
9883 } (void)0
9884
9885
9886/**
9887 * @opcode 0xa4
9888 * @opflclass shift_count
9889 */
9890FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9891{
9892 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9893 IEMOP_HLP_MIN_386();
9894 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9895}
9896
9897
9898/**
9899 * @opcode 0xa5
9900 * @opflclass shift_count
9901 */
9902FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9903{
9904 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9905 IEMOP_HLP_MIN_386();
9906 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9907}
9908
9909
9910/** Opcode 0x0f 0xa8. */
9911FNIEMOP_DEF(iemOp_push_gs)
9912{
9913 IEMOP_MNEMONIC(push_gs, "push gs");
9914 IEMOP_HLP_MIN_386();
9915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9916 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9917}
9918
9919
9920/** Opcode 0x0f 0xa9. */
9921FNIEMOP_DEF(iemOp_pop_gs)
9922{
9923 IEMOP_MNEMONIC(pop_gs, "pop gs");
9924 IEMOP_HLP_MIN_386();
9925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9926 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9927 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9928 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9929 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9930 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9931 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9932 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9933 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9934}
9935
9936
9937/** Opcode 0x0f 0xaa. */
9938FNIEMOP_DEF(iemOp_rsm)
9939{
9940 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9941 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9943 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9944 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9945 iemCImpl_rsm);
9946}
9947
9948
9949
9950/**
9951 * @opcode 0xab
9952 * @oppfx n/a
9953 * @opflclass bitmap
9954 */
9955FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9956{
9957 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9958 IEMOP_HLP_MIN_386();
9959 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9960 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9961}
9962
9963
9964/**
9965 * @opcode 0xac
9966 * @opflclass shift_count
9967 */
9968FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9969{
9970 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9971 IEMOP_HLP_MIN_386();
9972 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9973}
9974
9975
9976/**
9977 * @opcode 0xad
9978 * @opflclass shift_count
9979 */
9980FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9981{
9982 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9983 IEMOP_HLP_MIN_386();
9984 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9985}
9986
9987
9988/** Opcode 0x0f 0xae mem/0. */
9989FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9990{
9991 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9992 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9993 IEMOP_RAISE_INVALID_OPCODE_RET();
9994
9995 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9996 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9999 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10000 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10001 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
10002 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
10003 IEM_MC_END();
10004}
10005
10006
10007/** Opcode 0x0f 0xae mem/1. */
10008FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
10009{
10010 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
10011 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
10012 IEMOP_RAISE_INVALID_OPCODE_RET();
10013
10014 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
10015 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10018 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10019 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10020 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
10021 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
10022 IEM_MC_END();
10023}
10024
10025
10026/**
10027 * @opmaps grp15
10028 * @opcode !11/2
10029 * @oppfx none
10030 * @opcpuid sse
10031 * @opgroup og_sse_mxcsrsm
10032 * @opxcpttype 5
10033 * @optest op1=0 -> mxcsr=0
10034 * @optest op1=0x2083 -> mxcsr=0x2083
10035 * @optest op1=0xfffffffe -> value.xcpt=0xd
10036 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
10037 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
10038 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
10039 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
10040 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
10041 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
10042 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
10043 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
10044 */
10045FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
10046{
10047 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10048 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
10049 IEMOP_RAISE_INVALID_OPCODE_RET();
10050
10051 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
10052 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10055 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10056 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10057 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
10058 IEM_MC_END();
10059}
10060
10061
10062/**
10063 * @opmaps grp15
10064 * @opcode !11/3
10065 * @oppfx none
10066 * @opcpuid sse
10067 * @opgroup og_sse_mxcsrsm
10068 * @opxcpttype 5
10069 * @optest mxcsr=0 -> op1=0
10070 * @optest mxcsr=0x2083 -> op1=0x2083
10071 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
10072 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
10073 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
10074 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
10075 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
10076 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
10077 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
10078 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
10079 */
10080FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
10081{
10082 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10083 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
10084 IEMOP_RAISE_INVALID_OPCODE_RET();
10085
10086 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
10087 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10090 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10091 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10092 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
10093 IEM_MC_END();
10094}
10095
10096
10097/**
10098 * @opmaps grp15
10099 * @opcode !11/4
10100 * @oppfx none
10101 * @opcpuid xsave
10102 * @opgroup og_system
10103 * @opxcpttype none
10104 */
10105FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
10106{
10107 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
10108 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
10109 IEMOP_RAISE_INVALID_OPCODE_RET();
10110
10111 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
10112 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10115 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10116 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10117 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
10118 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
10119 IEM_MC_END();
10120}
10121
10122
10123/**
10124 * @opmaps grp15
10125 * @opcode !11/5
10126 * @oppfx none
10127 * @opcpuid xsave
10128 * @opgroup og_system
10129 * @opxcpttype none
10130 */
10131FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
10132{
10133 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
10134 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
10135 IEMOP_RAISE_INVALID_OPCODE_RET();
10136
10137 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
10138 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10141 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10142 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10143 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
10144 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
10145 IEM_MC_END();
10146}
10147
10148/** Opcode 0x0f 0xae mem/6. */
10149FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
10150
10151/**
10152 * @opmaps grp15
10153 * @opcode !11/7
10154 * @oppfx none
10155 * @opcpuid clfsh
10156 * @opgroup og_cachectl
10157 * @optest op1=1 ->
10158 */
10159FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
10160{
10161 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10162 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
10163 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
10164
10165 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10166 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10169 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10170 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
10171 IEM_MC_END();
10172}
10173
10174/**
10175 * @opmaps grp15
10176 * @opcode !11/7
10177 * @oppfx 0x66
10178 * @opcpuid clflushopt
10179 * @opgroup og_cachectl
10180 * @optest op1=1 ->
10181 */
10182FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
10183{
10184 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10185 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
10186 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
10187
10188 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10189 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10192 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10193 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
10194 IEM_MC_END();
10195}
10196
10197
10198/** Opcode 0x0f 0xae 11b/5. */
10199FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
10200{
10201 RT_NOREF_PV(bRm);
10202 IEMOP_MNEMONIC(lfence, "lfence");
10203 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10205#ifdef RT_ARCH_ARM64
10206 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
10207#else
10208 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10209 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
10210 else
10211 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10212#endif
10213 IEM_MC_ADVANCE_RIP_AND_FINISH();
10214 IEM_MC_END();
10215}
10216
10217
10218/** Opcode 0x0f 0xae 11b/6. */
10219FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
10220{
10221 RT_NOREF_PV(bRm);
10222 IEMOP_MNEMONIC(mfence, "mfence");
10223 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10225#ifdef RT_ARCH_ARM64
10226 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
10227#else
10228 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10229 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
10230 else
10231 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10232#endif
10233 IEM_MC_ADVANCE_RIP_AND_FINISH();
10234 IEM_MC_END();
10235}
10236
10237
10238/** Opcode 0x0f 0xae 11b/7. */
10239FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
10240{
10241 RT_NOREF_PV(bRm);
10242 IEMOP_MNEMONIC(sfence, "sfence");
10243 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10245#ifdef RT_ARCH_ARM64
10246 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10247#else
10248 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10249 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10250 else
10251 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10252#endif
10253 IEM_MC_ADVANCE_RIP_AND_FINISH();
10254 IEM_MC_END();
10255}
10256
10257
10258/** Opcode 0xf3 0x0f 0xae 11b/0. */
10259FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
10260{
10261 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
10262 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10263 {
10264 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10266 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10267 IEM_MC_LOCAL(uint64_t, u64Dst);
10268 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10269 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10270 IEM_MC_ADVANCE_RIP_AND_FINISH();
10271 IEM_MC_END();
10272 }
10273 else
10274 {
10275 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10277 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10278 IEM_MC_LOCAL(uint32_t, u32Dst);
10279 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10280 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10281 IEM_MC_ADVANCE_RIP_AND_FINISH();
10282 IEM_MC_END();
10283 }
10284}
10285
10286
10287/** Opcode 0xf3 0x0f 0xae 11b/1. */
10288FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10289{
10290 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10291 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10292 {
10293 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10295 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10296 IEM_MC_LOCAL(uint64_t, u64Dst);
10297 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10298 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10299 IEM_MC_ADVANCE_RIP_AND_FINISH();
10300 IEM_MC_END();
10301 }
10302 else
10303 {
10304 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10306 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10307 IEM_MC_LOCAL(uint32_t, u32Dst);
10308 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10309 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10310 IEM_MC_ADVANCE_RIP_AND_FINISH();
10311 IEM_MC_END();
10312 }
10313}
10314
10315
10316/** Opcode 0xf3 0x0f 0xae 11b/2. */
10317FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10318{
10319 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10320 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10321 {
10322 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10324 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10325 IEM_MC_LOCAL(uint64_t, u64Dst);
10326 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10327 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10328 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10329 IEM_MC_ADVANCE_RIP_AND_FINISH();
10330 IEM_MC_END();
10331 }
10332 else
10333 {
10334 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10336 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10337 IEM_MC_LOCAL(uint32_t, u32Dst);
10338 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10339 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10340 IEM_MC_ADVANCE_RIP_AND_FINISH();
10341 IEM_MC_END();
10342 }
10343}
10344
10345
10346/** Opcode 0xf3 0x0f 0xae 11b/3. */
10347FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10348{
10349 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10350 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10351 {
10352 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10354 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10355 IEM_MC_LOCAL(uint64_t, u64Dst);
10356 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10357 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10358 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10359 IEM_MC_ADVANCE_RIP_AND_FINISH();
10360 IEM_MC_END();
10361 }
10362 else
10363 {
10364 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10366 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10367 IEM_MC_LOCAL(uint32_t, u32Dst);
10368 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10369 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10370 IEM_MC_ADVANCE_RIP_AND_FINISH();
10371 IEM_MC_END();
10372 }
10373}
10374
10375
10376/**
10377 * Group 15 jump table for register variant.
10378 */
10379IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10380{ /* pfx: none, 066h, 0f3h, 0f2h */
10381 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10382 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10383 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10384 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10385 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10386 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10387 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10388 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10389};
10390AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10391
10392
10393/**
10394 * Group 15 jump table for memory variant.
10395 */
10396IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10397{ /* pfx: none, 066h, 0f3h, 0f2h */
10398 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10399 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10400 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10401 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10402 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10403 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10404 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10405 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10406};
10407AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10408
10409
10410/** Opcode 0x0f 0xae. */
10411FNIEMOP_DEF(iemOp_Grp15)
10412{
10413 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10415 if (IEM_IS_MODRM_REG_MODE(bRm))
10416 /* register, register */
10417 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10418 + pVCpu->iem.s.idxPrefix], bRm);
10419 /* memory, register */
10420 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10421 + pVCpu->iem.s.idxPrefix], bRm);
10422}
10423
10424
10425/**
10426 * @opcode 0xaf
10427 * @opflclass multiply
10428 */
10429FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10430{
10431 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10432 IEMOP_HLP_MIN_386();
10433 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10434 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10435 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_MIN_386);
10436}
10437
10438
10439/**
10440 * @opcode 0xb0
10441 * @opflclass arithmetic
10442 */
10443FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10444{
10445 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10446 IEMOP_HLP_MIN_486();
10447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10448
10449 if (IEM_IS_MODRM_REG_MODE(bRm))
10450 {
10451 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10453 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10454 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10455 IEM_MC_ARG(uint8_t, u8Src, 2);
10456 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10457
10458 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10459 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10460 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10461 IEM_MC_REF_EFLAGS(pEFlags);
10462 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10463
10464 IEM_MC_ADVANCE_RIP_AND_FINISH();
10465 IEM_MC_END();
10466 }
10467 else
10468 {
10469#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10470 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \
10471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10473 IEMOP_HLP_DONE_DECODING(); \
10474 \
10475 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10476 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10477 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10478 \
10479 IEM_MC_ARG(uint8_t, u8Src, 2); \
10480 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10481 \
10482 IEM_MC_LOCAL(uint8_t, u8Al); \
10483 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10484 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10485 \
10486 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10487 IEM_MC_FETCH_EFLAGS(EFlags); \
10488 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10489 \
10490 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10491 IEM_MC_COMMIT_EFLAGS(EFlags); \
10492 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10493 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10494 IEM_MC_END()
10495
10496 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10497 {
10498 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10499 }
10500 else
10501 {
10502 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10503 }
10504 }
10505}
10506
10507/**
10508 * @opcode 0xb1
10509 * @opflclass arithmetic
10510 */
10511FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10512{
10513 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10514 IEMOP_HLP_MIN_486();
10515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10516
10517 if (IEM_IS_MODRM_REG_MODE(bRm))
10518 {
10519 switch (pVCpu->iem.s.enmEffOpSize)
10520 {
10521 case IEMMODE_16BIT:
10522 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10524 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10525 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10526 IEM_MC_ARG(uint16_t, u16Src, 2);
10527 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10528
10529 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10530 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10531 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10532 IEM_MC_REF_EFLAGS(pEFlags);
10533 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10534
10535 IEM_MC_ADVANCE_RIP_AND_FINISH();
10536 IEM_MC_END();
10537 break;
10538
10539 case IEMMODE_32BIT:
10540 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10542 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10543 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10544 IEM_MC_ARG(uint32_t, u32Src, 2);
10545 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10546
10547 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10548 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10549 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10550 IEM_MC_REF_EFLAGS(pEFlags);
10551 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10552
10553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10554 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10555 } IEM_MC_ELSE() {
10556 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10557 } IEM_MC_ENDIF();
10558
10559 IEM_MC_ADVANCE_RIP_AND_FINISH();
10560 IEM_MC_END();
10561 break;
10562
10563 case IEMMODE_64BIT:
10564 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
10565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10566 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10567 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10568 IEM_MC_ARG(uint64_t, u64Src, 2);
10569 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10570
10571 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10572 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10573 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10574 IEM_MC_REF_EFLAGS(pEFlags);
10575 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10576
10577 IEM_MC_ADVANCE_RIP_AND_FINISH();
10578 IEM_MC_END();
10579 break;
10580
10581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10582 }
10583 }
10584 else
10585 {
10586#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10587 do { \
10588 switch (pVCpu->iem.s.enmEffOpSize) \
10589 { \
10590 case IEMMODE_16BIT: \
10591 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \
10592 \
10593 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10596 IEMOP_HLP_DONE_DECODING(); \
10597 \
10598 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10599 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10600 \
10601 IEM_MC_ARG(uint16_t, u16Src, 2); \
10602 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10603 \
10604 IEM_MC_LOCAL(uint16_t, u16Ax); \
10605 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10606 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10607 \
10608 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10609 IEM_MC_FETCH_EFLAGS(EFlags); \
10610 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10611 \
10612 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10613 IEM_MC_COMMIT_EFLAGS(EFlags); \
10614 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10615 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10616 IEM_MC_END(); \
10617 break; \
10618 \
10619 case IEMMODE_32BIT: \
10620 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \
10621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10623 IEMOP_HLP_DONE_DECODING(); \
10624 \
10625 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10626 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10627 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10628 \
10629 IEM_MC_ARG(uint32_t, u32Src, 2); \
10630 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10631 \
10632 IEM_MC_LOCAL(uint32_t, u32Eax); \
10633 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10634 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10635 \
10636 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10637 IEM_MC_FETCH_EFLAGS(EFlags); \
10638 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10639 \
10640 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10641 IEM_MC_COMMIT_EFLAGS(EFlags); \
10642 \
10643 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10644 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10645 } IEM_MC_ENDIF(); \
10646 \
10647 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10648 IEM_MC_END(); \
10649 break; \
10650 \
10651 case IEMMODE_64BIT: \
10652 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT, 0); \
10653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10655 IEMOP_HLP_DONE_DECODING(); \
10656 \
10657 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10658 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10659 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10660 \
10661 IEM_MC_ARG(uint64_t, u64Src, 2); \
10662 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10663 \
10664 IEM_MC_LOCAL(uint64_t, u64Rax); \
10665 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10666 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10667 \
10668 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10669 IEM_MC_FETCH_EFLAGS(EFlags); \
10670 \
10671 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10672 \
10673 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10674 IEM_MC_COMMIT_EFLAGS(EFlags); \
10675 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10676 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10677 IEM_MC_END(); \
10678 break; \
10679 \
10680 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10681 } \
10682 } while (0)
10683
10684 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10685 {
10686 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10687 }
10688 else
10689 {
10690 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10691 }
10692 }
10693}
10694
10695
10696/** Opcode 0x0f 0xb2. */
10697FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10698{
10699 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10700 IEMOP_HLP_MIN_386();
10701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10702 if (IEM_IS_MODRM_REG_MODE(bRm))
10703 IEMOP_RAISE_INVALID_OPCODE_RET();
10704 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10705}
10706
10707
10708/**
10709 * @opcode 0xb3
10710 * @oppfx n/a
10711 * @opflclass bitmap
10712 */
10713FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10714{
10715 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10716 IEMOP_HLP_MIN_386();
10717 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10718 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10719}
10720
10721
10722/** Opcode 0x0f 0xb4. */
10723FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10724{
10725 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10726 IEMOP_HLP_MIN_386();
10727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10728 if (IEM_IS_MODRM_REG_MODE(bRm))
10729 IEMOP_RAISE_INVALID_OPCODE_RET();
10730 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10731}
10732
10733
10734/** Opcode 0x0f 0xb5. */
10735FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10736{
10737 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10738 IEMOP_HLP_MIN_386();
10739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10740 if (IEM_IS_MODRM_REG_MODE(bRm))
10741 IEMOP_RAISE_INVALID_OPCODE_RET();
10742 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10743}
10744
10745
10746/** Opcode 0x0f 0xb6. */
10747FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10748{
10749 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10750 IEMOP_HLP_MIN_386();
10751
10752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10753
10754 /*
10755 * If rm is denoting a register, no more instruction bytes.
10756 */
10757 if (IEM_IS_MODRM_REG_MODE(bRm))
10758 {
10759 switch (pVCpu->iem.s.enmEffOpSize)
10760 {
10761 case IEMMODE_16BIT:
10762 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10764 IEM_MC_LOCAL(uint16_t, u16Value);
10765 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10766 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10767 IEM_MC_ADVANCE_RIP_AND_FINISH();
10768 IEM_MC_END();
10769 break;
10770
10771 case IEMMODE_32BIT:
10772 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10774 IEM_MC_LOCAL(uint32_t, u32Value);
10775 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10776 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10777 IEM_MC_ADVANCE_RIP_AND_FINISH();
10778 IEM_MC_END();
10779 break;
10780
10781 case IEMMODE_64BIT:
10782 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10784 IEM_MC_LOCAL(uint64_t, u64Value);
10785 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10786 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10787 IEM_MC_ADVANCE_RIP_AND_FINISH();
10788 IEM_MC_END();
10789 break;
10790
10791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10792 }
10793 }
10794 else
10795 {
10796 /*
10797 * We're loading a register from memory.
10798 */
10799 switch (pVCpu->iem.s.enmEffOpSize)
10800 {
10801 case IEMMODE_16BIT:
10802 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10803 IEM_MC_LOCAL(uint16_t, u16Value);
10804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10807 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10808 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10809 IEM_MC_ADVANCE_RIP_AND_FINISH();
10810 IEM_MC_END();
10811 break;
10812
10813 case IEMMODE_32BIT:
10814 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10815 IEM_MC_LOCAL(uint32_t, u32Value);
10816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10819 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10820 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10821 IEM_MC_ADVANCE_RIP_AND_FINISH();
10822 IEM_MC_END();
10823 break;
10824
10825 case IEMMODE_64BIT:
10826 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10827 IEM_MC_LOCAL(uint64_t, u64Value);
10828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10831 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10832 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10833 IEM_MC_ADVANCE_RIP_AND_FINISH();
10834 IEM_MC_END();
10835 break;
10836
10837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10838 }
10839 }
10840}
10841
10842
10843/** Opcode 0x0f 0xb7. */
10844FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10845{
10846 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10847 IEMOP_HLP_MIN_386();
10848
10849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10850
10851 /** @todo Not entirely sure how the operand size prefix is handled here,
10852 * assuming that it will be ignored. Would be nice to have a few
10853 * test for this. */
10854
10855 /** @todo There should be no difference in the behaviour whether REX.W is
10856 * present or not... */
10857
10858 /*
10859 * If rm is denoting a register, no more instruction bytes.
10860 */
10861 if (IEM_IS_MODRM_REG_MODE(bRm))
10862 {
10863 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10864 {
10865 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10867 IEM_MC_LOCAL(uint32_t, u32Value);
10868 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10869 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10870 IEM_MC_ADVANCE_RIP_AND_FINISH();
10871 IEM_MC_END();
10872 }
10873 else
10874 {
10875 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10877 IEM_MC_LOCAL(uint64_t, u64Value);
10878 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10879 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10880 IEM_MC_ADVANCE_RIP_AND_FINISH();
10881 IEM_MC_END();
10882 }
10883 }
10884 else
10885 {
10886 /*
10887 * We're loading a register from memory.
10888 */
10889 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10890 {
10891 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10892 IEM_MC_LOCAL(uint32_t, u32Value);
10893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10896 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10897 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10898 IEM_MC_ADVANCE_RIP_AND_FINISH();
10899 IEM_MC_END();
10900 }
10901 else
10902 {
10903 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10904 IEM_MC_LOCAL(uint64_t, u64Value);
10905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10908 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10909 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10910 IEM_MC_ADVANCE_RIP_AND_FINISH();
10911 IEM_MC_END();
10912 }
10913 }
10914}
10915
10916
10917/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10918FNIEMOP_UD_STUB(iemOp_jmpe);
10919
10920
10921/**
10922 * @opcode 0xb8
10923 * @oppfx 0xf3
10924 * @opflmodify cf,pf,af,zf,sf,of
10925 * @opflclear cf,pf,af,sf,of
10926 */
10927FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10928{
10929 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10931 return iemOp_InvalidNeedRM(pVCpu);
10932#ifndef TST_IEM_CHECK_MC
10933# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10934 static const IEMOPBINSIZES s_Native =
10935 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10936# endif
10937 static const IEMOPBINSIZES s_Fallback =
10938 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10939#endif
10940 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10941 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
10942}
10943
10944
10945/**
10946 * @opcode 0xb9
10947 * @opinvalid intel-modrm
10948 * @optest ->
10949 */
10950FNIEMOP_DEF(iemOp_Grp10)
10951{
10952 /*
10953 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10954 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10955 */
10956 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10957 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10958 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10959}
10960
10961
10962/**
10963 * Body for group 8 bit instruction.
10964 */
10965#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10966 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10967 \
10968 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10969 { \
10970 /* register destination. */ \
10971 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10972 \
10973 switch (pVCpu->iem.s.enmEffOpSize) \
10974 { \
10975 case IEMMODE_16BIT: \
10976 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10978 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10979 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10980 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10981 \
10982 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10983 IEM_MC_REF_EFLAGS(pEFlags); \
10984 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10985 \
10986 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10987 IEM_MC_END(); \
10988 break; \
10989 \
10990 case IEMMODE_32BIT: \
10991 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10993 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10994 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10995 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10996 \
10997 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10998 IEM_MC_REF_EFLAGS(pEFlags); \
10999 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11000 \
11001 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
11002 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11003 IEM_MC_END(); \
11004 break; \
11005 \
11006 case IEMMODE_64BIT: \
11007 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
11008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11009 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11010 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11011 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11012 \
11013 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11014 IEM_MC_REF_EFLAGS(pEFlags); \
11015 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11016 \
11017 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11018 IEM_MC_END(); \
11019 break; \
11020 \
11021 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11022 } \
11023 } \
11024 else \
11025 { \
11026 /* memory destination. */ \
11027 /** @todo test negative bit offsets! */ \
11028 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
11029 { \
11030 switch (pVCpu->iem.s.enmEffOpSize) \
11031 { \
11032 case IEMMODE_16BIT: \
11033 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11036 \
11037 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11038 IEMOP_HLP_DONE_DECODING(); \
11039 \
11040 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11041 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11042 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11043 \
11044 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11045 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11046 IEM_MC_FETCH_EFLAGS(EFlags); \
11047 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11048 \
11049 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
11050 IEM_MC_COMMIT_EFLAGS(EFlags); \
11051 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11052 IEM_MC_END(); \
11053 break; \
11054 \
11055 case IEMMODE_32BIT: \
11056 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11059 \
11060 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11061 IEMOP_HLP_DONE_DECODING(); \
11062 \
11063 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11064 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11065 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11066 \
11067 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11068 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11069 IEM_MC_FETCH_EFLAGS(EFlags); \
11070 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11071 \
11072 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
11073 IEM_MC_COMMIT_EFLAGS(EFlags); \
11074 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11075 IEM_MC_END(); \
11076 break; \
11077 \
11078 case IEMMODE_64BIT: \
11079 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11082 \
11083 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11084 IEMOP_HLP_DONE_DECODING(); \
11085 \
11086 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11087 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11088 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11089 \
11090 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11091 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11092 IEM_MC_FETCH_EFLAGS(EFlags); \
11093 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11094 \
11095 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
11096 IEM_MC_COMMIT_EFLAGS(EFlags); \
11097 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11098 IEM_MC_END(); \
11099 break; \
11100 \
11101 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11102 } \
11103 } \
11104 else \
11105 { \
11106 (void)0
11107/* Separate macro to work around parsing issue in IEMAllInstPython.py */
11108#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
11109 switch (pVCpu->iem.s.enmEffOpSize) \
11110 { \
11111 case IEMMODE_16BIT: \
11112 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11115 \
11116 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11117 IEMOP_HLP_DONE_DECODING(); \
11118 \
11119 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11120 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11121 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11122 \
11123 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11124 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11125 IEM_MC_FETCH_EFLAGS(EFlags); \
11126 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
11127 \
11128 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
11129 IEM_MC_COMMIT_EFLAGS(EFlags); \
11130 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11131 IEM_MC_END(); \
11132 break; \
11133 \
11134 case IEMMODE_32BIT: \
11135 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11138 \
11139 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11140 IEMOP_HLP_DONE_DECODING(); \
11141 \
11142 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11143 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11144 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11145 \
11146 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11148 IEM_MC_FETCH_EFLAGS(EFlags); \
11149 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
11150 \
11151 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
11152 IEM_MC_COMMIT_EFLAGS(EFlags); \
11153 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11154 IEM_MC_END(); \
11155 break; \
11156 \
11157 case IEMMODE_64BIT: \
11158 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11161 \
11162 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11163 IEMOP_HLP_DONE_DECODING(); \
11164 \
11165 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11166 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11167 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11168 \
11169 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11170 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11171 IEM_MC_FETCH_EFLAGS(EFlags); \
11172 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
11173 \
11174 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
11175 IEM_MC_COMMIT_EFLAGS(EFlags); \
11176 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11177 IEM_MC_END(); \
11178 break; \
11179 \
11180 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11181 } \
11182 } \
11183 } \
11184 (void)0
11185
11186/* Read-only version (bt) */
11187#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
11188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
11189 \
11190 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11191 { \
11192 /* register destination. */ \
11193 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11194 \
11195 switch (pVCpu->iem.s.enmEffOpSize) \
11196 { \
11197 case IEMMODE_16BIT: \
11198 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
11199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11200 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11201 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11202 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11203 \
11204 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11205 IEM_MC_REF_EFLAGS(pEFlags); \
11206 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11207 \
11208 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11209 IEM_MC_END(); \
11210 break; \
11211 \
11212 case IEMMODE_32BIT: \
11213 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
11214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11215 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11216 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11217 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11218 \
11219 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11220 IEM_MC_REF_EFLAGS(pEFlags); \
11221 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11222 \
11223 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11224 IEM_MC_END(); \
11225 break; \
11226 \
11227 case IEMMODE_64BIT: \
11228 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
11229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11230 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11231 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11233 \
11234 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11235 IEM_MC_REF_EFLAGS(pEFlags); \
11236 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11237 \
11238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11239 IEM_MC_END(); \
11240 break; \
11241 \
11242 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11243 } \
11244 } \
11245 else \
11246 { \
11247 /* memory destination. */ \
11248 /** @todo test negative bit offsets! */ \
11249 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11250 { \
11251 switch (pVCpu->iem.s.enmEffOpSize) \
11252 { \
11253 case IEMMODE_16BIT: \
11254 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11257 \
11258 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11259 IEMOP_HLP_DONE_DECODING(); \
11260 \
11261 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11262 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11263 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11264 \
11265 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11266 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11267 IEM_MC_FETCH_EFLAGS(EFlags); \
11268 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11269 \
11270 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11271 IEM_MC_COMMIT_EFLAGS(EFlags); \
11272 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11273 IEM_MC_END(); \
11274 break; \
11275 \
11276 case IEMMODE_32BIT: \
11277 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11280 \
11281 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11282 IEMOP_HLP_DONE_DECODING(); \
11283 \
11284 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11285 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11286 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11287 \
11288 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11289 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11290 IEM_MC_FETCH_EFLAGS(EFlags); \
11291 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11292 \
11293 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11294 IEM_MC_COMMIT_EFLAGS(EFlags); \
11295 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11296 IEM_MC_END(); \
11297 break; \
11298 \
11299 case IEMMODE_64BIT: \
11300 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11303 \
11304 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11305 IEMOP_HLP_DONE_DECODING(); \
11306 \
11307 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11308 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11309 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11310 \
11311 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11312 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11313 IEM_MC_FETCH_EFLAGS(EFlags); \
11314 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11315 \
11316 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11317 IEM_MC_COMMIT_EFLAGS(EFlags); \
11318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11319 IEM_MC_END(); \
11320 break; \
11321 \
11322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11323 } \
11324 } \
11325 else \
11326 { \
11327 IEMOP_HLP_DONE_DECODING(); \
11328 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11329 } \
11330 } \
11331 (void)0
11332
11333
11334/**
11335 * @opmaps grp8
11336 * @opcode /4
11337 * @oppfx n/a
11338 * @opflclass bitmap
11339 */
11340FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11341{
11342 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11343 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11344}
11345
11346
11347/**
11348 * @opmaps grp8
11349 * @opcode /5
11350 * @oppfx n/a
11351 * @opflclass bitmap
11352 */
11353FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11354{
11355 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11356 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11357 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11358}
11359
11360
11361/**
11362 * @opmaps grp8
11363 * @opcode /6
11364 * @oppfx n/a
11365 * @opflclass bitmap
11366 */
11367FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11368{
11369 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11370 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11371 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11372}
11373
11374
11375/**
11376 * @opmaps grp8
11377 * @opcode /7
11378 * @oppfx n/a
11379 * @opflclass bitmap
11380 */
11381FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11382{
11383 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11384 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11385 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11386}
11387
11388
11389/** Opcode 0x0f 0xba. */
11390FNIEMOP_DEF(iemOp_Grp8)
11391{
11392 IEMOP_HLP_MIN_386();
11393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11394 switch (IEM_GET_MODRM_REG_8(bRm))
11395 {
11396 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11397 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11398 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11399 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11400
11401 case 0: case 1: case 2: case 3:
11402 /* Both AMD and Intel want full modr/m decoding and imm8. */
11403 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11404
11405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11406 }
11407}
11408
11409
11410/**
11411 * @opcode 0xbb
11412 * @oppfx n/a
11413 * @opflclass bitmap
11414 */
11415FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11416{
11417 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11418 IEMOP_HLP_MIN_386();
11419 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11420 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11421}
11422
11423
11424/**
11425 * Common worker for BSF and BSR instructions.
11426 *
11427 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11428 * the destination register, which means that for 32-bit operations the high
11429 * bits must be left alone.
11430 *
11431 * @param pImpl Pointer to the instruction implementation (assembly).
11432 */
11433FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
11434{
11435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11436
11437 /*
11438 * If rm is denoting a register, no more instruction bytes.
11439 */
11440 if (IEM_IS_MODRM_REG_MODE(bRm))
11441 {
11442 switch (pVCpu->iem.s.enmEffOpSize)
11443 {
11444 case IEMMODE_16BIT:
11445 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11447 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11448 IEM_MC_ARG(uint16_t, u16Src, 1);
11449 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11450
11451 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11452 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11453 IEM_MC_REF_EFLAGS(pEFlags);
11454 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11455
11456 IEM_MC_ADVANCE_RIP_AND_FINISH();
11457 IEM_MC_END();
11458 break;
11459
11460 case IEMMODE_32BIT:
11461 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11463 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11464 IEM_MC_ARG(uint32_t, u32Src, 1);
11465 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11466
11467 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11468 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11469 IEM_MC_REF_EFLAGS(pEFlags);
11470 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11471 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11472 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11473 } IEM_MC_ENDIF();
11474 IEM_MC_ADVANCE_RIP_AND_FINISH();
11475 IEM_MC_END();
11476 break;
11477
11478 case IEMMODE_64BIT:
11479 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11481 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11482 IEM_MC_ARG(uint64_t, u64Src, 1);
11483 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11484
11485 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11486 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11487 IEM_MC_REF_EFLAGS(pEFlags);
11488 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11489
11490 IEM_MC_ADVANCE_RIP_AND_FINISH();
11491 IEM_MC_END();
11492 break;
11493
11494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11495 }
11496 }
11497 else
11498 {
11499 /*
11500 * We're accessing memory.
11501 */
11502 switch (pVCpu->iem.s.enmEffOpSize)
11503 {
11504 case IEMMODE_16BIT:
11505 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11506 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11507 IEM_MC_ARG(uint16_t, u16Src, 1);
11508 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11510
11511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11513 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11514 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11515 IEM_MC_REF_EFLAGS(pEFlags);
11516 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11517
11518 IEM_MC_ADVANCE_RIP_AND_FINISH();
11519 IEM_MC_END();
11520 break;
11521
11522 case IEMMODE_32BIT:
11523 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11524 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11525 IEM_MC_ARG(uint32_t, u32Src, 1);
11526 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11528
11529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11531 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11532 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11533 IEM_MC_REF_EFLAGS(pEFlags);
11534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11535
11536 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11537 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11538 } IEM_MC_ENDIF();
11539 IEM_MC_ADVANCE_RIP_AND_FINISH();
11540 IEM_MC_END();
11541 break;
11542
11543 case IEMMODE_64BIT:
11544 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
11545 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11546 IEM_MC_ARG(uint64_t, u64Src, 1);
11547 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11549
11550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11552 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11553 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11554 IEM_MC_REF_EFLAGS(pEFlags);
11555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11556
11557 IEM_MC_ADVANCE_RIP_AND_FINISH();
11558 IEM_MC_END();
11559 break;
11560
11561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11562 }
11563 }
11564}
11565
11566
11567/**
11568 * @opcode 0xbc
11569 * @oppfx !0xf3
11570 * @opfltest cf,pf,af,sf,of
11571 * @opflmodify cf,pf,af,zf,sf,of
11572 * @opflundef cf,pf,af,sf,of
11573 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11574 * document them as inputs. Sigh.
11575 */
11576FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11577{
11578 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11579 IEMOP_HLP_MIN_386();
11580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11581 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11582}
11583
11584
11585/**
11586 * @opcode 0xbc
11587 * @oppfx 0xf3
11588 * @opfltest pf,af,sf,of
11589 * @opflmodify cf,pf,af,zf,sf,of
11590 * @opflundef pf,af,sf,of
11591 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11592 * document them as inputs. Sigh.
11593 */
11594FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11595{
11596 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11597 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11598 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11599
11600#ifndef TST_IEM_CHECK_MC
11601 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11602 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11603 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11604 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11605 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11606 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11607 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11608 {
11609 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11610 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11611 };
11612#endif
11613 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11614 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11615 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11616 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11617}
11618
11619
11620/**
11621 * @opcode 0xbd
11622 * @oppfx !0xf3
11623 * @opfltest cf,pf,af,sf,of
11624 * @opflmodify cf,pf,af,zf,sf,of
11625 * @opflundef cf,pf,af,sf,of
11626 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11627 * document them as inputs. Sigh.
11628 */
11629FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11630{
11631 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11632 IEMOP_HLP_MIN_386();
11633 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11634 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11635}
11636
11637
11638/**
11639 * @opcode 0xbd
11640 * @oppfx 0xf3
11641 * @opfltest pf,af,sf,of
11642 * @opflmodify cf,pf,af,zf,sf,of
11643 * @opflundef pf,af,sf,of
11644 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11645 * document them as inputs. Sigh.
11646 */
11647FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11648{
11649 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11650 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11651 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11652
11653#ifndef TST_IEM_CHECK_MC
11654 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11655 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11656 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11657 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11658 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11659 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11660 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11661 {
11662 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11663 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11664 };
11665#endif
11666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11667 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11668 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11669 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11670}
11671
11672
11673
11674/** Opcode 0x0f 0xbe. */
11675FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11676{
11677 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11678 IEMOP_HLP_MIN_386();
11679
11680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11681
11682 /*
11683 * If rm is denoting a register, no more instruction bytes.
11684 */
11685 if (IEM_IS_MODRM_REG_MODE(bRm))
11686 {
11687 switch (pVCpu->iem.s.enmEffOpSize)
11688 {
11689 case IEMMODE_16BIT:
11690 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11692 IEM_MC_LOCAL(uint16_t, u16Value);
11693 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11694 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11695 IEM_MC_ADVANCE_RIP_AND_FINISH();
11696 IEM_MC_END();
11697 break;
11698
11699 case IEMMODE_32BIT:
11700 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11702 IEM_MC_LOCAL(uint32_t, u32Value);
11703 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11704 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11705 IEM_MC_ADVANCE_RIP_AND_FINISH();
11706 IEM_MC_END();
11707 break;
11708
11709 case IEMMODE_64BIT:
11710 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11712 IEM_MC_LOCAL(uint64_t, u64Value);
11713 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11714 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11715 IEM_MC_ADVANCE_RIP_AND_FINISH();
11716 IEM_MC_END();
11717 break;
11718
11719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11720 }
11721 }
11722 else
11723 {
11724 /*
11725 * We're loading a register from memory.
11726 */
11727 switch (pVCpu->iem.s.enmEffOpSize)
11728 {
11729 case IEMMODE_16BIT:
11730 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11731 IEM_MC_LOCAL(uint16_t, u16Value);
11732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11735 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11736 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11737 IEM_MC_ADVANCE_RIP_AND_FINISH();
11738 IEM_MC_END();
11739 break;
11740
11741 case IEMMODE_32BIT:
11742 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11743 IEM_MC_LOCAL(uint32_t, u32Value);
11744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11747 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11748 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11749 IEM_MC_ADVANCE_RIP_AND_FINISH();
11750 IEM_MC_END();
11751 break;
11752
11753 case IEMMODE_64BIT:
11754 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11755 IEM_MC_LOCAL(uint64_t, u64Value);
11756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11759 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11760 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11761 IEM_MC_ADVANCE_RIP_AND_FINISH();
11762 IEM_MC_END();
11763 break;
11764
11765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11766 }
11767 }
11768}
11769
11770
11771/** Opcode 0x0f 0xbf. */
11772FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11773{
11774 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11775 IEMOP_HLP_MIN_386();
11776
11777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11778
11779 /** @todo Not entirely sure how the operand size prefix is handled here,
11780 * assuming that it will be ignored. Would be nice to have a few
11781 * test for this. */
11782 /*
11783 * If rm is denoting a register, no more instruction bytes.
11784 */
11785 if (IEM_IS_MODRM_REG_MODE(bRm))
11786 {
11787 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11788 {
11789 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11791 IEM_MC_LOCAL(uint32_t, u32Value);
11792 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11793 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11794 IEM_MC_ADVANCE_RIP_AND_FINISH();
11795 IEM_MC_END();
11796 }
11797 else
11798 {
11799 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11801 IEM_MC_LOCAL(uint64_t, u64Value);
11802 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11803 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11804 IEM_MC_ADVANCE_RIP_AND_FINISH();
11805 IEM_MC_END();
11806 }
11807 }
11808 else
11809 {
11810 /*
11811 * We're loading a register from memory.
11812 */
11813 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11814 {
11815 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11816 IEM_MC_LOCAL(uint32_t, u32Value);
11817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11820 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11821 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11822 IEM_MC_ADVANCE_RIP_AND_FINISH();
11823 IEM_MC_END();
11824 }
11825 else
11826 {
11827 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11828 IEM_MC_LOCAL(uint64_t, u64Value);
11829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11832 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11833 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11834 IEM_MC_ADVANCE_RIP_AND_FINISH();
11835 IEM_MC_END();
11836 }
11837 }
11838}
11839
11840
11841/** Opcode 0x0f 0xc0. */
11842FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11843{
11844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11845 IEMOP_HLP_MIN_486();
11846 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11847
11848 /*
11849 * If rm is denoting a register, no more instruction bytes.
11850 */
11851 if (IEM_IS_MODRM_REG_MODE(bRm))
11852 {
11853 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11855 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11856 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11857 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11858
11859 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11860 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11861 IEM_MC_REF_EFLAGS(pEFlags);
11862 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11863
11864 IEM_MC_ADVANCE_RIP_AND_FINISH();
11865 IEM_MC_END();
11866 }
11867 else
11868 {
11869 /*
11870 * We're accessing memory.
11871 */
11872#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11873 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \
11874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11876 IEMOP_HLP_DONE_DECODING(); \
11877 \
11878 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11879 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11880 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11881 \
11882 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11883 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11884 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11885 \
11886 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11887 IEM_MC_FETCH_EFLAGS(EFlags); \
11888 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11889 \
11890 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11891 IEM_MC_COMMIT_EFLAGS(EFlags); \
11892 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11894 IEM_MC_END()
11895 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11896 {
11897 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11898 }
11899 else
11900 {
11901 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11902 }
11903 }
11904}
11905
11906
11907/** Opcode 0x0f 0xc1. */
11908FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11909{
11910 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11911 IEMOP_HLP_MIN_486();
11912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11913
11914 /*
11915 * If rm is denoting a register, no more instruction bytes.
11916 */
11917 if (IEM_IS_MODRM_REG_MODE(bRm))
11918 {
11919 switch (pVCpu->iem.s.enmEffOpSize)
11920 {
11921 case IEMMODE_16BIT:
11922 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11924 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11925 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11926 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11927
11928 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11929 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11930 IEM_MC_REF_EFLAGS(pEFlags);
11931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11932
11933 IEM_MC_ADVANCE_RIP_AND_FINISH();
11934 IEM_MC_END();
11935 break;
11936
11937 case IEMMODE_32BIT:
11938 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11940 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11941 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11942 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11943
11944 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11945 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11946 IEM_MC_REF_EFLAGS(pEFlags);
11947 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11948
11949 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11950 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11951 IEM_MC_ADVANCE_RIP_AND_FINISH();
11952 IEM_MC_END();
11953 break;
11954
11955 case IEMMODE_64BIT:
11956 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11958 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11959 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11960 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11961
11962 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11963 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11964 IEM_MC_REF_EFLAGS(pEFlags);
11965 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11966
11967 IEM_MC_ADVANCE_RIP_AND_FINISH();
11968 IEM_MC_END();
11969 break;
11970
11971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11972 }
11973 }
11974 else
11975 {
11976 /*
11977 * We're accessing memory.
11978 */
11979#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11980 do { \
11981 switch (pVCpu->iem.s.enmEffOpSize) \
11982 { \
11983 case IEMMODE_16BIT: \
11984 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \
11985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11987 IEMOP_HLP_DONE_DECODING(); \
11988 \
11989 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11990 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11991 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11992 \
11993 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11994 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11995 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11996 \
11997 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11998 IEM_MC_FETCH_EFLAGS(EFlags); \
11999 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
12000 \
12001 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12002 IEM_MC_COMMIT_EFLAGS(EFlags); \
12003 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
12004 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12005 IEM_MC_END(); \
12006 break; \
12007 \
12008 case IEMMODE_32BIT: \
12009 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \
12010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12012 IEMOP_HLP_DONE_DECODING(); \
12013 \
12014 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12015 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12016 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12017 \
12018 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
12019 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
12020 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
12021 \
12022 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
12023 IEM_MC_FETCH_EFLAGS(EFlags); \
12024 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
12025 \
12026 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12027 IEM_MC_COMMIT_EFLAGS(EFlags); \
12028 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
12029 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12030 IEM_MC_END(); \
12031 break; \
12032 \
12033 case IEMMODE_64BIT: \
12034 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
12035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12037 IEMOP_HLP_DONE_DECODING(); \
12038 \
12039 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12040 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12041 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12042 \
12043 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
12044 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
12045 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
12046 \
12047 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
12048 IEM_MC_FETCH_EFLAGS(EFlags); \
12049 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
12050 \
12051 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12052 IEM_MC_COMMIT_EFLAGS(EFlags); \
12053 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
12054 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12055 IEM_MC_END(); \
12056 break; \
12057 \
12058 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12059 } \
12060 } while (0)
12061
12062 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12063 {
12064 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
12065 }
12066 else
12067 {
12068 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
12069 }
12070 }
12071}
12072
12073
12074/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
12075FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
12076{
12077 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12078
12079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12080 if (IEM_IS_MODRM_REG_MODE(bRm))
12081 {
12082 /*
12083 * XMM, XMM.
12084 */
12085 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12086 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12088 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12089 IEM_MC_LOCAL(X86XMMREG, Dst);
12090 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12091 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12092 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12093 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12094 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12095 IEM_MC_PREPARE_SSE_USAGE();
12096 IEM_MC_REF_MXCSR(pfMxcsr);
12097 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12098 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
12099 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12100 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12101 } IEM_MC_ELSE() {
12102 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
12103 } IEM_MC_ENDIF();
12104
12105 IEM_MC_ADVANCE_RIP_AND_FINISH();
12106 IEM_MC_END();
12107 }
12108 else
12109 {
12110 /*
12111 * XMM, [mem128].
12112 */
12113 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12114 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12115 IEM_MC_LOCAL(X86XMMREG, Dst);
12116 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12117 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12118 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12120
12121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12122 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12123 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12125 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12126 IEM_MC_PREPARE_SSE_USAGE();
12127
12128 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12129 IEM_MC_REF_MXCSR(pfMxcsr);
12130 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
12131 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12132 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12133 } IEM_MC_ELSE() {
12134 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
12135 } IEM_MC_ENDIF();
12136
12137 IEM_MC_ADVANCE_RIP_AND_FINISH();
12138 IEM_MC_END();
12139 }
12140}
12141
12142
12143/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
12144FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
12145{
12146 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12147
12148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12149 if (IEM_IS_MODRM_REG_MODE(bRm))
12150 {
12151 /*
12152 * XMM, XMM.
12153 */
12154 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12155 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12157 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12158 IEM_MC_LOCAL(X86XMMREG, Dst);
12159 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12160 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12161 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12162 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12163 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12164 IEM_MC_PREPARE_SSE_USAGE();
12165 IEM_MC_REF_MXCSR(pfMxcsr);
12166 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12167 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12168 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12169 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12170 } IEM_MC_ELSE() {
12171 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
12172 } IEM_MC_ENDIF();
12173
12174 IEM_MC_ADVANCE_RIP_AND_FINISH();
12175 IEM_MC_END();
12176 }
12177 else
12178 {
12179 /*
12180 * XMM, [mem128].
12181 */
12182 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12183 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12184 IEM_MC_LOCAL(X86XMMREG, Dst);
12185 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12186 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12187 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12189
12190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12191 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12192 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12194 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12195 IEM_MC_PREPARE_SSE_USAGE();
12196
12197 IEM_MC_REF_MXCSR(pfMxcsr);
12198 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12199 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12200 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12201 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12202 } IEM_MC_ELSE() {
12203 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
12204 } IEM_MC_ENDIF();
12205
12206 IEM_MC_ADVANCE_RIP_AND_FINISH();
12207 IEM_MC_END();
12208 }
12209}
12210
12211
12212/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
12213FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
12214{
12215 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12216
12217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12218 if (IEM_IS_MODRM_REG_MODE(bRm))
12219 {
12220 /*
12221 * XMM32, XMM32.
12222 */
12223 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12224 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12226 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12227 IEM_MC_LOCAL(X86XMMREG, Dst);
12228 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12229 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12230 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12231 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12232 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12233 IEM_MC_PREPARE_SSE_USAGE();
12234 IEM_MC_REF_MXCSR(pfMxcsr);
12235 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12236 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
12237 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12238 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12239 } IEM_MC_ELSE() {
12240 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
12241 } IEM_MC_ENDIF();
12242
12243 IEM_MC_ADVANCE_RIP_AND_FINISH();
12244 IEM_MC_END();
12245 }
12246 else
12247 {
12248 /*
12249 * XMM32, [mem32].
12250 */
12251 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12252 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12253 IEM_MC_LOCAL(X86XMMREG, Dst);
12254 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12255 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12256 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12258
12259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12260 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12261 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12263 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12264 IEM_MC_PREPARE_SSE_USAGE();
12265
12266 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12267 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12268 IEM_MC_REF_MXCSR(pfMxcsr);
12269 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
12270 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12271 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12272 } IEM_MC_ELSE() {
12273 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
12274 } IEM_MC_ENDIF();
12275
12276 IEM_MC_ADVANCE_RIP_AND_FINISH();
12277 IEM_MC_END();
12278 }
12279}
12280
12281
12282/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
12283FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
12284{
12285 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12286
12287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12288 if (IEM_IS_MODRM_REG_MODE(bRm))
12289 {
12290 /*
12291 * XMM64, XMM64.
12292 */
12293 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12294 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12296 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12297 IEM_MC_LOCAL(X86XMMREG, Dst);
12298 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12299 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12300 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12301 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12302 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12303 IEM_MC_PREPARE_SSE_USAGE();
12304 IEM_MC_REF_MXCSR(pfMxcsr);
12305 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12306 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12307 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12308 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12309 } IEM_MC_ELSE() {
12310 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12311 } IEM_MC_ENDIF();
12312
12313 IEM_MC_ADVANCE_RIP_AND_FINISH();
12314 IEM_MC_END();
12315 }
12316 else
12317 {
12318 /*
12319 * XMM64, [mem64].
12320 */
12321 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12322 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12323 IEM_MC_LOCAL(X86XMMREG, Dst);
12324 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12325 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12326 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12328
12329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12330 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12331 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12333 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12334 IEM_MC_PREPARE_SSE_USAGE();
12335
12336 IEM_MC_REF_MXCSR(pfMxcsr);
12337 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12338 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12339 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12340 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12341 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12342 } IEM_MC_ELSE() {
12343 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12344 } IEM_MC_ENDIF();
12345
12346 IEM_MC_ADVANCE_RIP_AND_FINISH();
12347 IEM_MC_END();
12348 }
12349}
12350
12351
12352/** Opcode 0x0f 0xc3. */
12353FNIEMOP_DEF(iemOp_movnti_My_Gy)
12354{
12355 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12356
12357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12358
12359 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12360 if (IEM_IS_MODRM_MEM_MODE(bRm))
12361 {
12362 switch (pVCpu->iem.s.enmEffOpSize)
12363 {
12364 case IEMMODE_32BIT:
12365 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
12366 IEM_MC_LOCAL(uint32_t, u32Value);
12367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12368
12369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12371
12372 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12373 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12374 IEM_MC_ADVANCE_RIP_AND_FINISH();
12375 IEM_MC_END();
12376 break;
12377
12378 case IEMMODE_64BIT:
12379 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
12380 IEM_MC_LOCAL(uint64_t, u64Value);
12381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12382
12383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12385
12386 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12387 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12388 IEM_MC_ADVANCE_RIP_AND_FINISH();
12389 IEM_MC_END();
12390 break;
12391
12392 case IEMMODE_16BIT:
12393 /** @todo check this form. */
12394 IEMOP_RAISE_INVALID_OPCODE_RET();
12395
12396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12397 }
12398 }
12399 else
12400 IEMOP_RAISE_INVALID_OPCODE_RET();
12401}
12402
12403
12404/* Opcode 0x66 0x0f 0xc3 - invalid */
12405/* Opcode 0xf3 0x0f 0xc3 - invalid */
12406/* Opcode 0xf2 0x0f 0xc3 - invalid */
12407
12408
12409/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12410FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12411{
12412 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12414 if (IEM_IS_MODRM_REG_MODE(bRm))
12415 {
12416 /*
12417 * Register, register.
12418 */
12419 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12420 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12422 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12423 IEM_MC_ARG(uint16_t, u16Src, 1);
12424 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12425 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12426 IEM_MC_PREPARE_FPU_USAGE();
12427 IEM_MC_FPU_TO_MMX_MODE();
12428 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12429 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12430 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12431 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12432 IEM_MC_ADVANCE_RIP_AND_FINISH();
12433 IEM_MC_END();
12434 }
12435 else
12436 {
12437 /*
12438 * Register, memory.
12439 */
12440 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12441 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12442 IEM_MC_ARG(uint16_t, u16Src, 1);
12443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12444
12445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12446 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12447 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12449 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12450 IEM_MC_PREPARE_FPU_USAGE();
12451 IEM_MC_FPU_TO_MMX_MODE();
12452
12453 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12454 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12455 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12456 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12457 IEM_MC_ADVANCE_RIP_AND_FINISH();
12458 IEM_MC_END();
12459 }
12460}
12461
12462
12463/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12464FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12465{
12466 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12468 if (IEM_IS_MODRM_REG_MODE(bRm))
12469 {
12470 /*
12471 * Register, register.
12472 */
12473 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12474 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12476 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12477 IEM_MC_ARG(uint16_t, u16Src, 1);
12478 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12479 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12480 IEM_MC_PREPARE_SSE_USAGE();
12481 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12482 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12483 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12484 IEM_MC_ADVANCE_RIP_AND_FINISH();
12485 IEM_MC_END();
12486 }
12487 else
12488 {
12489 /*
12490 * Register, memory.
12491 */
12492 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12493 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12494 IEM_MC_ARG(uint16_t, u16Src, 1);
12495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12496
12497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12498 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12499 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12501 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12502 IEM_MC_PREPARE_SSE_USAGE();
12503
12504 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12505 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12506 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12507 IEM_MC_ADVANCE_RIP_AND_FINISH();
12508 IEM_MC_END();
12509 }
12510}
12511
12512
12513/* Opcode 0xf3 0x0f 0xc4 - invalid */
12514/* Opcode 0xf2 0x0f 0xc4 - invalid */
12515
12516
12517/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12518FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12519{
12520 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12522 if (IEM_IS_MODRM_REG_MODE(bRm))
12523 {
12524 /*
12525 * Greg32, MMX, imm8.
12526 */
12527 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12528 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12530 IEM_MC_LOCAL(uint16_t, u16Dst);
12531 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12532 IEM_MC_ARG(uint64_t, u64Src, 1);
12533 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12534 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12535 IEM_MC_PREPARE_FPU_USAGE();
12536 IEM_MC_FPU_TO_MMX_MODE();
12537 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12538 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12539 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12540 IEM_MC_ADVANCE_RIP_AND_FINISH();
12541 IEM_MC_END();
12542 }
12543 /* No memory operand. */
12544 else
12545 IEMOP_RAISE_INVALID_OPCODE_RET();
12546}
12547
12548
12549/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12550FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12551{
12552 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12554 if (IEM_IS_MODRM_REG_MODE(bRm))
12555 {
12556 /*
12557 * Greg32, XMM, imm8.
12558 */
12559 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12560 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12562 IEM_MC_LOCAL(uint16_t, u16Dst);
12563 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12564 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12565 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12566 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12567 IEM_MC_PREPARE_SSE_USAGE();
12568 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12569 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12570 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12571 IEM_MC_ADVANCE_RIP_AND_FINISH();
12572 IEM_MC_END();
12573 }
12574 /* No memory operand. */
12575 else
12576 IEMOP_RAISE_INVALID_OPCODE_RET();
12577}
12578
12579
12580/* Opcode 0xf3 0x0f 0xc5 - invalid */
12581/* Opcode 0xf2 0x0f 0xc5 - invalid */
12582
12583
12584/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12585FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12586{
12587 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12589 if (IEM_IS_MODRM_REG_MODE(bRm))
12590 {
12591 /*
12592 * XMM, XMM, imm8.
12593 */
12594 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12595 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12597 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12598 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12599 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12600 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12601 IEM_MC_PREPARE_SSE_USAGE();
12602 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12603 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12604 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12605 IEM_MC_ADVANCE_RIP_AND_FINISH();
12606 IEM_MC_END();
12607 }
12608 else
12609 {
12610 /*
12611 * XMM, [mem128], imm8.
12612 */
12613 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12614 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12615 IEM_MC_LOCAL(RTUINT128U, uSrc);
12616 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12618
12619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12620 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12621 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12623 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12624 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12625
12626 IEM_MC_PREPARE_SSE_USAGE();
12627 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12628 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12629
12630 IEM_MC_ADVANCE_RIP_AND_FINISH();
12631 IEM_MC_END();
12632 }
12633}
12634
12635
12636/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12637FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12638{
12639 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12641 if (IEM_IS_MODRM_REG_MODE(bRm))
12642 {
12643 /*
12644 * XMM, XMM, imm8.
12645 */
12646 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12647 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12649 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12650 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12651 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12653 IEM_MC_PREPARE_SSE_USAGE();
12654 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12655 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12656 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12657 IEM_MC_ADVANCE_RIP_AND_FINISH();
12658 IEM_MC_END();
12659 }
12660 else
12661 {
12662 /*
12663 * XMM, [mem128], imm8.
12664 */
12665 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12666 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12667 IEM_MC_LOCAL(RTUINT128U, uSrc);
12668 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12670
12671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12672 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12673 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12675 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12676 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12677
12678 IEM_MC_PREPARE_SSE_USAGE();
12679 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12680 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12681
12682 IEM_MC_ADVANCE_RIP_AND_FINISH();
12683 IEM_MC_END();
12684 }
12685}
12686
12687
12688/* Opcode 0xf3 0x0f 0xc6 - invalid */
12689/* Opcode 0xf2 0x0f 0xc6 - invalid */
12690
12691
12692/**
12693 * @opmaps grp9
12694 * @opcode /1
12695 * @opcodesub !11 mr/reg rex.w=0
12696 * @oppfx n/a
12697 * @opflmodify zf
12698 */
12699FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12700{
12701 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12702#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12703 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0); \
12704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12706 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12707 \
12708 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12709 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12710 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12711 \
12712 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12713 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12714 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12715 \
12716 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12717 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12718 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12719 \
12720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12721 IEM_MC_FETCH_EFLAGS(EFlags); \
12722 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12723 \
12724 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12725 IEM_MC_COMMIT_EFLAGS(EFlags); \
12726 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12727 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12728 } IEM_MC_ENDIF(); \
12729 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12730 \
12731 IEM_MC_END()
12732 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12733 {
12734 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12735 }
12736 else
12737 {
12738 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12739 }
12740}
12741
12742
12743/**
12744 * @opmaps grp9
12745 * @opcode /1
12746 * @opcodesub !11 mr/reg rex.w=1
12747 * @oppfx n/a
12748 * @opflmodify zf
12749 */
12750FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12751{
12752 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12753 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12754 {
12755 /*
12756 * This is hairy, very hairy macro fun. We're walking a fine line
12757 * here to make the code parsable by IEMAllInstPython.py and fit into
12758 * the patterns IEMAllThrdPython.py requires for the code morphing.
12759 */
12760#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12761 IEM_MC_BEGIN(5, 4, IEM_MC_F_64BIT, 0); \
12762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12764 IEMOP_HLP_DONE_DECODING(); \
12765 \
12766 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12767 bUnmapInfoStmt; \
12768 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12769 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12770 \
12771 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12772 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12773 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12774 \
12775 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12776 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12777 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12778 \
12779 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12780 IEM_MC_FETCH_EFLAGS(EFlags)
12781
12782#define BODY_CMPXCHG16B_TAIL(a_Type) \
12783 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12784 IEM_MC_COMMIT_EFLAGS(EFlags); \
12785 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12786 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12787 } IEM_MC_ENDIF(); \
12788 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12789 IEM_MC_END()
12790
12791#ifdef RT_ARCH_AMD64
12792 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12793 {
12794 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12795 {
12796 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12797 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12798 BODY_CMPXCHG16B_TAIL(RW);
12799 }
12800 else
12801 {
12802 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12803 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12804 BODY_CMPXCHG16B_TAIL(ATOMIC);
12805 }
12806 }
12807 else
12808 { /* (see comments in #else case below) */
12809 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12810 {
12811 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12812 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12813 BODY_CMPXCHG16B_TAIL(RW);
12814 }
12815 else
12816 {
12817 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12818 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12819 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12820 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12821 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12822 pEFlags, bUnmapInfo);
12823 IEM_MC_END();
12824 }
12825 }
12826
12827#elif defined(RT_ARCH_ARM64)
12828 /** @todo may require fallback for unaligned accesses... */
12829 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12830 {
12831 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12832 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12833 BODY_CMPXCHG16B_TAIL(RW);
12834 }
12835 else
12836 {
12837 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12838 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12839 BODY_CMPXCHG16B_TAIL(ATOMIC);
12840 }
12841
12842#else
12843 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12844 accesses and not all all atomic, which works fine on in UNI CPU guest
12845 configuration (ignoring DMA). If guest SMP is active we have no choice
12846 but to use a rendezvous callback here. Sigh. */
12847 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12848 {
12849 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12850 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12851 BODY_CMPXCHG16B_TAIL(RW);
12852 }
12853 else
12854 {
12855 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12856 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12857 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12858 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12859 iemCImpl_cmpxchg16b_fallback_rendezvous,
12860 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12861 IEM_MC_END();
12862 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12863 }
12864#endif
12865
12866#undef BODY_CMPXCHG16B
12867 }
12868 Log(("cmpxchg16b -> #UD\n"));
12869 IEMOP_RAISE_INVALID_OPCODE_RET();
12870}
12871
12872FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12873{
12874 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12875 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12876 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12877}
12878
12879
12880/** Opcode 0x0f 0xc7 11/6. */
12881FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12882{
12883 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12884 IEMOP_RAISE_INVALID_OPCODE_RET();
12885
12886 if (IEM_IS_MODRM_REG_MODE(bRm))
12887 {
12888 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12890 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12891 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12892 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12893 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12894 iemCImpl_rdrand, iReg, enmEffOpSize);
12895 IEM_MC_END();
12896 }
12897 /* Register only. */
12898 else
12899 IEMOP_RAISE_INVALID_OPCODE_RET();
12900}
12901
12902/** Opcode 0x0f 0xc7 !11/6. */
12903#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12904FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12905{
12906 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12907 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12908 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12909 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12910 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12912 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12913 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12914 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12915 IEM_MC_END();
12916}
12917#else
12918FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12919#endif
12920
12921/** Opcode 0x66 0x0f 0xc7 !11/6. */
12922#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12923FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12924{
12925 IEMOP_MNEMONIC(vmclear, "vmclear");
12926 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12927 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12928 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12929 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12931 IEMOP_HLP_DONE_DECODING();
12932 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12933 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12934 IEM_MC_END();
12935}
12936#else
12937FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12938#endif
12939
12940/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12941#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12942FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12943{
12944 IEMOP_MNEMONIC(vmxon, "vmxon");
12945 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12946 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12947 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12949 IEMOP_HLP_DONE_DECODING();
12950 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12951 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12952 IEM_MC_END();
12953}
12954#else
12955FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12956#endif
12957
12958/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12959#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12960FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12961{
12962 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12963 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12964 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12965 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12966 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12968 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12969 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12970 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12971 IEM_MC_END();
12972}
12973#else
12974FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12975#endif
12976
12977/** Opcode 0x0f 0xc7 11/7. */
12978FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12979{
12980 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12981 IEMOP_RAISE_INVALID_OPCODE_RET();
12982
12983 if (IEM_IS_MODRM_REG_MODE(bRm))
12984 {
12985 /* register destination. */
12986 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12988 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12989 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12990 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12991 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12992 iemCImpl_rdseed, iReg, enmEffOpSize);
12993 IEM_MC_END();
12994 }
12995 /* Register only. */
12996 else
12997 IEMOP_RAISE_INVALID_OPCODE_RET();
12998}
12999
13000/**
13001 * Group 9 jump table for register variant.
13002 */
13003IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
13004{ /* pfx: none, 066h, 0f3h, 0f2h */
13005 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
13006 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
13007 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
13008 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
13009 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
13010 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
13011 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
13012 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
13013};
13014AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
13015
13016
13017/**
13018 * Group 9 jump table for memory variant.
13019 */
13020IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
13021{ /* pfx: none, 066h, 0f3h, 0f2h */
13022 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
13023 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
13024 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
13025 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
13026 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
13027 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
13028 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
13029 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
13030};
13031AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
13032
13033
13034/** Opcode 0x0f 0xc7. */
13035FNIEMOP_DEF(iemOp_Grp9)
13036{
13037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13038 if (IEM_IS_MODRM_REG_MODE(bRm))
13039 /* register, register */
13040 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
13041 + pVCpu->iem.s.idxPrefix], bRm);
13042 /* memory, register */
13043 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
13044 + pVCpu->iem.s.idxPrefix], bRm);
13045}
13046
13047
13048/**
13049 * Common 'bswap register' helper.
13050 */
13051FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
13052{
13053 switch (pVCpu->iem.s.enmEffOpSize)
13054 {
13055 case IEMMODE_16BIT:
13056 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
13057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13058 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13059 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
13060 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
13061 IEM_MC_ADVANCE_RIP_AND_FINISH();
13062 IEM_MC_END();
13063 break;
13064
13065 case IEMMODE_32BIT:
13066 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
13067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13068 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13069 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
13070 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
13071 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
13072 IEM_MC_ADVANCE_RIP_AND_FINISH();
13073 IEM_MC_END();
13074 break;
13075
13076 case IEMMODE_64BIT:
13077 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13079 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13080 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
13081 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
13082 IEM_MC_ADVANCE_RIP_AND_FINISH();
13083 IEM_MC_END();
13084 break;
13085
13086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13087 }
13088}
13089
13090
13091/** Opcode 0x0f 0xc8. */
13092FNIEMOP_DEF(iemOp_bswap_rAX_r8)
13093{
13094 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
13095 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
13096 prefix. REX.B is the correct prefix it appears. For a parallel
13097 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
13098 IEMOP_HLP_MIN_486();
13099 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13100}
13101
13102
13103/** Opcode 0x0f 0xc9. */
13104FNIEMOP_DEF(iemOp_bswap_rCX_r9)
13105{
13106 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
13107 IEMOP_HLP_MIN_486();
13108 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13109}
13110
13111
13112/** Opcode 0x0f 0xca. */
13113FNIEMOP_DEF(iemOp_bswap_rDX_r10)
13114{
13115 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
13116 IEMOP_HLP_MIN_486();
13117 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13118}
13119
13120
13121/** Opcode 0x0f 0xcb. */
13122FNIEMOP_DEF(iemOp_bswap_rBX_r11)
13123{
13124 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
13125 IEMOP_HLP_MIN_486();
13126 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13127}
13128
13129
13130/** Opcode 0x0f 0xcc. */
13131FNIEMOP_DEF(iemOp_bswap_rSP_r12)
13132{
13133 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
13134 IEMOP_HLP_MIN_486();
13135 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13136}
13137
13138
13139/** Opcode 0x0f 0xcd. */
13140FNIEMOP_DEF(iemOp_bswap_rBP_r13)
13141{
13142 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
13143 IEMOP_HLP_MIN_486();
13144 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13145}
13146
13147
13148/** Opcode 0x0f 0xce. */
13149FNIEMOP_DEF(iemOp_bswap_rSI_r14)
13150{
13151 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
13152 IEMOP_HLP_MIN_486();
13153 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13154}
13155
13156
13157/** Opcode 0x0f 0xcf. */
13158FNIEMOP_DEF(iemOp_bswap_rDI_r15)
13159{
13160 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
13161 IEMOP_HLP_MIN_486();
13162 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13163}
13164
13165
13166/* Opcode 0x0f 0xd0 - invalid */
13167
13168
13169/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
13170FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
13171{
13172 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13173 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
13174}
13175
13176
13177/* Opcode 0xf3 0x0f 0xd0 - invalid */
13178
13179
13180/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
13181FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
13182{
13183 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13184 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
13185}
13186
13187
13188
13189/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
13190FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
13191{
13192 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13193 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
13194}
13195
13196/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
13197FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
13198{
13199 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13200 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
13201}
13202
13203/* Opcode 0xf3 0x0f 0xd1 - invalid */
13204/* Opcode 0xf2 0x0f 0xd1 - invalid */
13205
13206/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
13207FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
13208{
13209 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13210 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
13211}
13212
13213
13214/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
13215FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
13216{
13217 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13218 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
13219}
13220
13221
13222/* Opcode 0xf3 0x0f 0xd2 - invalid */
13223/* Opcode 0xf2 0x0f 0xd2 - invalid */
13224
13225/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
13226FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
13227{
13228 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13229 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
13230}
13231
13232
13233/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
13234FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
13235{
13236 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13237 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
13238}
13239
13240
13241/* Opcode 0xf3 0x0f 0xd3 - invalid */
13242/* Opcode 0xf2 0x0f 0xd3 - invalid */
13243
13244
13245/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
13246FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
13247{
13248 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13249 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
13250}
13251
13252
13253/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
13254FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
13255{
13256 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13257 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
13258}
13259
13260
13261/* Opcode 0xf3 0x0f 0xd4 - invalid */
13262/* Opcode 0xf2 0x0f 0xd4 - invalid */
13263
13264/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
13265FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
13266{
13267 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13268 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
13269}
13270
13271/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
13272FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
13273{
13274 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13275 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
13276}
13277
13278
13279/* Opcode 0xf3 0x0f 0xd5 - invalid */
13280/* Opcode 0xf2 0x0f 0xd5 - invalid */
13281
13282/* Opcode 0x0f 0xd6 - invalid */
13283
13284/**
13285 * @opcode 0xd6
13286 * @oppfx 0x66
13287 * @opcpuid sse2
13288 * @opgroup og_sse2_pcksclr_datamove
13289 * @opxcpttype none
13290 * @optest op1=-1 op2=2 -> op1=2
13291 * @optest op1=0 op2=-42 -> op1=-42
13292 */
13293FNIEMOP_DEF(iemOp_movq_Wq_Vq)
13294{
13295 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13297 if (IEM_IS_MODRM_REG_MODE(bRm))
13298 {
13299 /*
13300 * Register, register.
13301 */
13302 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13304 IEM_MC_LOCAL(uint64_t, uSrc);
13305
13306 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13307 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13308
13309 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13310 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
13311
13312 IEM_MC_ADVANCE_RIP_AND_FINISH();
13313 IEM_MC_END();
13314 }
13315 else
13316 {
13317 /*
13318 * Memory, register.
13319 */
13320 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13321 IEM_MC_LOCAL(uint64_t, uSrc);
13322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13323
13324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13326 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13328
13329 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13330 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13331
13332 IEM_MC_ADVANCE_RIP_AND_FINISH();
13333 IEM_MC_END();
13334 }
13335}
13336
13337
13338/**
13339 * @opcode 0xd6
13340 * @opcodesub 11 mr/reg
13341 * @oppfx f3
13342 * @opcpuid sse2
13343 * @opgroup og_sse2_simdint_datamove
13344 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13345 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13346 */
13347FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13348{
13349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13350 if (IEM_IS_MODRM_REG_MODE(bRm))
13351 {
13352 /*
13353 * Register, register.
13354 */
13355 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13356 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13358 IEM_MC_LOCAL(uint64_t, uSrc);
13359
13360 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13361 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13362 IEM_MC_FPU_TO_MMX_MODE();
13363
13364 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13365 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13366
13367 IEM_MC_ADVANCE_RIP_AND_FINISH();
13368 IEM_MC_END();
13369 }
13370
13371 /**
13372 * @opdone
13373 * @opmnemonic udf30fd6mem
13374 * @opcode 0xd6
13375 * @opcodesub !11 mr/reg
13376 * @oppfx f3
13377 * @opunused intel-modrm
13378 * @opcpuid sse
13379 * @optest ->
13380 */
13381 else
13382 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13383}
13384
13385
13386/**
13387 * @opcode 0xd6
13388 * @opcodesub 11 mr/reg
13389 * @oppfx f2
13390 * @opcpuid sse2
13391 * @opgroup og_sse2_simdint_datamove
13392 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13393 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13394 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13395 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13396 * @optest op1=-42 op2=0xfedcba9876543210
13397 * -> op1=0xfedcba9876543210 ftw=0xff
13398 */
13399FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13400{
13401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13402 if (IEM_IS_MODRM_REG_MODE(bRm))
13403 {
13404 /*
13405 * Register, register.
13406 */
13407 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13408 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13410 IEM_MC_LOCAL(uint64_t, uSrc);
13411
13412 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13413 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13414 IEM_MC_FPU_TO_MMX_MODE();
13415
13416 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13417 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13418
13419 IEM_MC_ADVANCE_RIP_AND_FINISH();
13420 IEM_MC_END();
13421 }
13422
13423 /**
13424 * @opdone
13425 * @opmnemonic udf20fd6mem
13426 * @opcode 0xd6
13427 * @opcodesub !11 mr/reg
13428 * @oppfx f2
13429 * @opunused intel-modrm
13430 * @opcpuid sse
13431 * @optest ->
13432 */
13433 else
13434 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13435}
13436
13437
13438/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13439FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13440{
13441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13442 /* Docs says register only. */
13443 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13444 {
13445 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13446 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13447 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13449 IEM_MC_ARG(uint64_t *, puDst, 0);
13450 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13451 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13452 IEM_MC_PREPARE_FPU_USAGE();
13453 IEM_MC_FPU_TO_MMX_MODE();
13454
13455 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13456 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13457 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13458
13459 IEM_MC_ADVANCE_RIP_AND_FINISH();
13460 IEM_MC_END();
13461 }
13462 else
13463 IEMOP_RAISE_INVALID_OPCODE_RET();
13464}
13465
13466
13467/** Opcode 0x66 0x0f 0xd7 - */
13468FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13469{
13470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13471 /* Docs says register only. */
13472 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13473 {
13474 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13475 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13476 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13478 IEM_MC_ARG(uint64_t *, puDst, 0);
13479 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13480 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13481 IEM_MC_PREPARE_SSE_USAGE();
13482 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13483 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13484 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13485 IEM_MC_ADVANCE_RIP_AND_FINISH();
13486 IEM_MC_END();
13487 }
13488 else
13489 IEMOP_RAISE_INVALID_OPCODE_RET();
13490}
13491
13492
13493/* Opcode 0xf3 0x0f 0xd7 - invalid */
13494/* Opcode 0xf2 0x0f 0xd7 - invalid */
13495
13496
13497/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13498FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13499{
13500 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13501 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13502}
13503
13504
13505/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13506FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13507{
13508 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13509 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13510}
13511
13512
13513/* Opcode 0xf3 0x0f 0xd8 - invalid */
13514/* Opcode 0xf2 0x0f 0xd8 - invalid */
13515
13516/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13517FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13518{
13519 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13520 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13521}
13522
13523
13524/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13525FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13526{
13527 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13528 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13529}
13530
13531
13532/* Opcode 0xf3 0x0f 0xd9 - invalid */
13533/* Opcode 0xf2 0x0f 0xd9 - invalid */
13534
13535/** Opcode 0x0f 0xda - pminub Pq, Qq */
13536FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13537{
13538 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13539 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13540}
13541
13542
13543/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13544FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13545{
13546 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13547 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13548}
13549
13550/* Opcode 0xf3 0x0f 0xda - invalid */
13551/* Opcode 0xf2 0x0f 0xda - invalid */
13552
13553/** Opcode 0x0f 0xdb - pand Pq, Qq */
13554FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13555{
13556 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13557 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13558}
13559
13560
13561/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13562FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13563{
13564 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13565 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13566}
13567
13568
13569/* Opcode 0xf3 0x0f 0xdb - invalid */
13570/* Opcode 0xf2 0x0f 0xdb - invalid */
13571
13572/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13573FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13574{
13575 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13576 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13577}
13578
13579
13580/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13581FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13582{
13583 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13584 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13585}
13586
13587
13588/* Opcode 0xf3 0x0f 0xdc - invalid */
13589/* Opcode 0xf2 0x0f 0xdc - invalid */
13590
13591/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13592FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13593{
13594 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13595 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13596}
13597
13598
13599/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13600FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13601{
13602 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13603 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13604}
13605
13606
13607/* Opcode 0xf3 0x0f 0xdd - invalid */
13608/* Opcode 0xf2 0x0f 0xdd - invalid */
13609
13610/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13611FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13612{
13613 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13614 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13615}
13616
13617
13618/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13619FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13620{
13621 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13622 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13623}
13624
13625/* Opcode 0xf3 0x0f 0xde - invalid */
13626/* Opcode 0xf2 0x0f 0xde - invalid */
13627
13628
13629/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13630FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13631{
13632 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13633 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13634}
13635
13636
13637/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13638FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13639{
13640 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13641 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13642}
13643
13644
13645/* Opcode 0xf3 0x0f 0xdf - invalid */
13646/* Opcode 0xf2 0x0f 0xdf - invalid */
13647
13648/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13649FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13650{
13651 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13652 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13653}
13654
13655
13656/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13657FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13658{
13659 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13660 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13661}
13662
13663
13664/* Opcode 0xf3 0x0f 0xe0 - invalid */
13665/* Opcode 0xf2 0x0f 0xe0 - invalid */
13666
13667/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13668FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13669{
13670 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13671 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13672}
13673
13674
13675/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13676FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13677{
13678 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13679 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13680}
13681
13682
13683/* Opcode 0xf3 0x0f 0xe1 - invalid */
13684/* Opcode 0xf2 0x0f 0xe1 - invalid */
13685
13686/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13687FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13688{
13689 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13690 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13691}
13692
13693
13694/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13695FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13696{
13697 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13698 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13699}
13700
13701
13702/* Opcode 0xf3 0x0f 0xe2 - invalid */
13703/* Opcode 0xf2 0x0f 0xe2 - invalid */
13704
13705/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13706FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13707{
13708 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13709 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13710}
13711
13712
13713/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13714FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13715{
13716 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13717 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13718}
13719
13720
13721/* Opcode 0xf3 0x0f 0xe3 - invalid */
13722/* Opcode 0xf2 0x0f 0xe3 - invalid */
13723
13724/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13725FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13726{
13727 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13728 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13729}
13730
13731
13732/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13733FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13734{
13735 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13736 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13737}
13738
13739
13740/* Opcode 0xf3 0x0f 0xe4 - invalid */
13741/* Opcode 0xf2 0x0f 0xe4 - invalid */
13742
13743/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13744FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13745{
13746 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13747 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13748}
13749
13750
13751/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13752FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13753{
13754 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13755 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13756}
13757
13758
13759/* Opcode 0xf3 0x0f 0xe5 - invalid */
13760/* Opcode 0xf2 0x0f 0xe5 - invalid */
13761/* Opcode 0x0f 0xe6 - invalid */
13762
13763
13764/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13765FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13766{
13767 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13768 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13769}
13770
13771
13772/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13773FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13774{
13775 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13776 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13777}
13778
13779
13780/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13781FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13782{
13783 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13784 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13785}
13786
13787
13788/**
13789 * @opcode 0xe7
13790 * @opcodesub !11 mr/reg
13791 * @oppfx none
13792 * @opcpuid sse
13793 * @opgroup og_sse1_cachect
13794 * @opxcpttype none
13795 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13796 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13797 */
13798FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13799{
13800 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13802 if (IEM_IS_MODRM_MEM_MODE(bRm))
13803 {
13804 /* Register, memory. */
13805 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13806 IEM_MC_LOCAL(uint64_t, uSrc);
13807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13808
13809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13811 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13812 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13813 IEM_MC_FPU_TO_MMX_MODE();
13814
13815 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13816 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13817
13818 IEM_MC_ADVANCE_RIP_AND_FINISH();
13819 IEM_MC_END();
13820 }
13821 /**
13822 * @opdone
13823 * @opmnemonic ud0fe7reg
13824 * @opcode 0xe7
13825 * @opcodesub 11 mr/reg
13826 * @oppfx none
13827 * @opunused immediate
13828 * @opcpuid sse
13829 * @optest ->
13830 */
13831 else
13832 IEMOP_RAISE_INVALID_OPCODE_RET();
13833}
13834
13835/**
13836 * @opcode 0xe7
13837 * @opcodesub !11 mr/reg
13838 * @oppfx 0x66
13839 * @opcpuid sse2
13840 * @opgroup og_sse2_cachect
13841 * @opxcpttype 1
13842 * @optest op1=-1 op2=2 -> op1=2
13843 * @optest op1=0 op2=-42 -> op1=-42
13844 */
13845FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13846{
13847 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13849 if (IEM_IS_MODRM_MEM_MODE(bRm))
13850 {
13851 /* Register, memory. */
13852 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13853 IEM_MC_LOCAL(RTUINT128U, uSrc);
13854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13855
13856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13858 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13859 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13860
13861 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13862 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13863
13864 IEM_MC_ADVANCE_RIP_AND_FINISH();
13865 IEM_MC_END();
13866 }
13867
13868 /**
13869 * @opdone
13870 * @opmnemonic ud660fe7reg
13871 * @opcode 0xe7
13872 * @opcodesub 11 mr/reg
13873 * @oppfx 0x66
13874 * @opunused immediate
13875 * @opcpuid sse
13876 * @optest ->
13877 */
13878 else
13879 IEMOP_RAISE_INVALID_OPCODE_RET();
13880}
13881
13882/* Opcode 0xf3 0x0f 0xe7 - invalid */
13883/* Opcode 0xf2 0x0f 0xe7 - invalid */
13884
13885
13886/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13887FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13888{
13889 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13890 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13891}
13892
13893
13894/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13895FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13896{
13897 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13898 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13899}
13900
13901
13902/* Opcode 0xf3 0x0f 0xe8 - invalid */
13903/* Opcode 0xf2 0x0f 0xe8 - invalid */
13904
13905/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13906FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13907{
13908 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13909 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13910}
13911
13912
13913/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13914FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13915{
13916 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13917 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13918}
13919
13920
13921/* Opcode 0xf3 0x0f 0xe9 - invalid */
13922/* Opcode 0xf2 0x0f 0xe9 - invalid */
13923
13924
13925/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13926FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13927{
13928 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13929 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13930}
13931
13932
13933/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13934FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13935{
13936 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13937 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13938}
13939
13940
13941/* Opcode 0xf3 0x0f 0xea - invalid */
13942/* Opcode 0xf2 0x0f 0xea - invalid */
13943
13944
13945/** Opcode 0x0f 0xeb - por Pq, Qq */
13946FNIEMOP_DEF(iemOp_por_Pq_Qq)
13947{
13948 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13949 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13950}
13951
13952
13953/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13954FNIEMOP_DEF(iemOp_por_Vx_Wx)
13955{
13956 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13957 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13958}
13959
13960
13961/* Opcode 0xf3 0x0f 0xeb - invalid */
13962/* Opcode 0xf2 0x0f 0xeb - invalid */
13963
13964/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13965FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13966{
13967 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13968 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13969}
13970
13971
13972/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13973FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13974{
13975 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13976 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13977}
13978
13979
13980/* Opcode 0xf3 0x0f 0xec - invalid */
13981/* Opcode 0xf2 0x0f 0xec - invalid */
13982
13983/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13984FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13985{
13986 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13987 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13988}
13989
13990
13991/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13992FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13993{
13994 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13995 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13996}
13997
13998
13999/* Opcode 0xf3 0x0f 0xed - invalid */
14000/* Opcode 0xf2 0x0f 0xed - invalid */
14001
14002
14003/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
14004FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
14005{
14006 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14007 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
14008}
14009
14010
14011/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
14012FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
14013{
14014 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14015 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
14016}
14017
14018
14019/* Opcode 0xf3 0x0f 0xee - invalid */
14020/* Opcode 0xf2 0x0f 0xee - invalid */
14021
14022
14023/** Opcode 0x0f 0xef - pxor Pq, Qq */
14024FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
14025{
14026 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14027 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
14028}
14029
14030
14031/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
14032FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
14033{
14034 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14035 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
14036}
14037
14038
14039/* Opcode 0xf3 0x0f 0xef - invalid */
14040/* Opcode 0xf2 0x0f 0xef - invalid */
14041
14042/* Opcode 0x0f 0xf0 - invalid */
14043/* Opcode 0x66 0x0f 0xf0 - invalid */
14044
14045
14046/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
14047FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
14048{
14049 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14051 if (IEM_IS_MODRM_REG_MODE(bRm))
14052 {
14053 /*
14054 * Register, register - (not implemented, assuming it raises \#UD).
14055 */
14056 IEMOP_RAISE_INVALID_OPCODE_RET();
14057 }
14058 else
14059 {
14060 /*
14061 * Register, memory.
14062 */
14063 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
14064 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
14065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14066
14067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
14069 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
14070 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
14071 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14072 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
14073
14074 IEM_MC_ADVANCE_RIP_AND_FINISH();
14075 IEM_MC_END();
14076 }
14077}
14078
14079
14080/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
14081FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
14082{
14083 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
14084 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
14085}
14086
14087
14088/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
14089FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
14090{
14091 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14092 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
14093}
14094
14095
14096/* Opcode 0xf2 0x0f 0xf1 - invalid */
14097
14098/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
14099FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
14100{
14101 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
14102 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
14103}
14104
14105
14106/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
14107FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
14108{
14109 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14110 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
14111}
14112
14113
14114/* Opcode 0xf2 0x0f 0xf2 - invalid */
14115
14116/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
14117FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
14118{
14119 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
14120 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
14121}
14122
14123
14124/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
14125FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
14126{
14127 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14128 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
14129}
14130
14131/* Opcode 0xf2 0x0f 0xf3 - invalid */
14132
14133/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
14134FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
14135{
14136 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14137 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
14138}
14139
14140
14141/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
14142FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
14143{
14144 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14145 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
14146}
14147
14148
14149/* Opcode 0xf2 0x0f 0xf4 - invalid */
14150
14151/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
14152FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
14153{
14154 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
14155 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
14156}
14157
14158
14159/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
14160FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
14161{
14162 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14163 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
14164}
14165
14166/* Opcode 0xf2 0x0f 0xf5 - invalid */
14167
14168/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
14169FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
14170{
14171 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14172 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
14173}
14174
14175
14176/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
14177FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
14178{
14179 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14180 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
14181}
14182
14183
14184/* Opcode 0xf2 0x0f 0xf6 - invalid */
14185
14186/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
14187FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
14188/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
14189FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
14190/* Opcode 0xf2 0x0f 0xf7 - invalid */
14191
14192
14193/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
14194FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
14195{
14196 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14197 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
14198}
14199
14200
14201/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
14202FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
14203{
14204 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14205 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
14206}
14207
14208
14209/* Opcode 0xf2 0x0f 0xf8 - invalid */
14210
14211
14212/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
14213FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
14214{
14215 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14216 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
14217}
14218
14219
14220/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
14221FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
14222{
14223 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14224 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
14225}
14226
14227
14228/* Opcode 0xf2 0x0f 0xf9 - invalid */
14229
14230
14231/** Opcode 0x0f 0xfa - psubd Pq, Qq */
14232FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
14233{
14234 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14235 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
14236}
14237
14238
14239/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
14240FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
14241{
14242 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14243 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
14244}
14245
14246
14247/* Opcode 0xf2 0x0f 0xfa - invalid */
14248
14249
14250/** Opcode 0x0f 0xfb - psubq Pq, Qq */
14251FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
14252{
14253 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14254 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
14255}
14256
14257
14258/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
14259FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
14260{
14261 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14262 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
14263}
14264
14265
14266/* Opcode 0xf2 0x0f 0xfb - invalid */
14267
14268
14269/** Opcode 0x0f 0xfc - paddb Pq, Qq */
14270FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
14271{
14272 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14273 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
14274}
14275
14276
14277/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
14278FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
14279{
14280 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14281 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
14282}
14283
14284
14285/* Opcode 0xf2 0x0f 0xfc - invalid */
14286
14287
14288/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14289FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14290{
14291 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14292 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
14293}
14294
14295
14296/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14297FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14298{
14299 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14300 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
14301}
14302
14303
14304/* Opcode 0xf2 0x0f 0xfd - invalid */
14305
14306
14307/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14308FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14309{
14310 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14311 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
14312}
14313
14314
14315/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14316FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14317{
14318 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14319 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
14320}
14321
14322
14323/* Opcode 0xf2 0x0f 0xfe - invalid */
14324
14325
14326/** Opcode **** 0x0f 0xff - UD0 */
14327FNIEMOP_DEF(iemOp_ud0)
14328{
14329 IEMOP_MNEMONIC(ud0, "ud0");
14330 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14331 {
14332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14333 if (IEM_IS_MODRM_MEM_MODE(bRm))
14334 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14335 }
14336 IEMOP_HLP_DONE_DECODING();
14337 IEMOP_RAISE_INVALID_OPCODE_RET();
14338}
14339
14340
14341
14342/**
14343 * Two byte opcode map, first byte 0x0f.
14344 *
14345 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14346 * check if it needs updating as well when making changes.
14347 */
14348const PFNIEMOP g_apfnTwoByteMap[] =
14349{
14350 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14351 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14352 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14353 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14354 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14355 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14356 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14357 /* 0x06 */ IEMOP_X4(iemOp_clts),
14358 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14359 /* 0x08 */ IEMOP_X4(iemOp_invd),
14360 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14361 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14362 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14363 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14364 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14365 /* 0x0e */ IEMOP_X4(iemOp_femms),
14366 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14367
14368 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14369 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14370 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14371 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14372 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14373 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14374 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14375 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14376 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14377 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14378 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14379 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14380 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14381 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14382 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14383 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14384
14385 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14386 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14387 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14388 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14389 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14390 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14391 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14392 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14393 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14394 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14395 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14396 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14397 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14398 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14399 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14400 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14401
14402 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14403 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14404 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14405 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14406 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14407 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14408 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14409 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14410 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14411 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14412 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14413 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14414 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14415 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14416 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14417 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14418
14419 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14420 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14421 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14422 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14423 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14424 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14425 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14426 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14427 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14428 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14429 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14430 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14431 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14432 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14433 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14434 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14435
14436 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14437 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14438 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14439 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14440 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14441 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14442 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14443 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14444 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14445 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14446 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14447 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14448 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14449 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14450 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14451 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14452
14453 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14454 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14455 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14456 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14457 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14458 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14459 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14460 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14461 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14462 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14463 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14464 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14465 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14466 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14467 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14468 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14469
14470 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14471 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14472 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14473 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14474 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14475 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14476 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14477 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14478
14479 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14480 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14481 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14482 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14483 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14484 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14485 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14486 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14487
14488 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14489 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14490 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14491 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14492 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14493 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14494 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14495 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14496 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14497 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14498 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14499 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14500 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14501 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14502 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14503 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14504
14505 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14506 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14507 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14508 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14509 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14510 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14511 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14512 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14513 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14514 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14515 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14516 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14517 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14518 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14519 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14520 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14521
14522 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14523 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14524 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14525 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14526 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14527 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14528 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14529 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14530 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14531 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14532 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14533 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14534 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14535 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14536 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14537 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14538
14539 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14540 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14541 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14542 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14543 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14544 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14545 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14546 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14547 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14548 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14549 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14550 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14551 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14552 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14553 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14554 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14555
14556 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14557 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14558 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14559 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14560 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14561 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14562 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14563 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14564 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14565 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14566 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14567 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14568 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14569 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14570 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14571 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14572
14573 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14574 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14575 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14576 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14577 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14578 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14579 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14580 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14581 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14582 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14583 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14584 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14585 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14586 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14587 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14588 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14589
14590 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14591 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14592 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14593 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14594 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14595 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14596 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14597 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14598 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14599 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14600 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14601 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14602 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14603 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14604 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14605 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14606
14607 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14608 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14609 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14610 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14611 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14612 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14613 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14614 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14615 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14616 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14617 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14618 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14619 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14620 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14621 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14622 /* 0xff */ IEMOP_X4(iemOp_ud0),
14623};
14624AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14625
14626/** @} */
14627
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette