VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 95410

最後變更 在這個檔案從95410是 95403,由 vboxsync 提交於 3 年 前

VMM/IEM: vxorps, vxorpd, vpxor, xorps, xorpd and various related fixes. bugref:9898

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 155.1 KB
 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 95403 2022-06-27 23:38:38Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.alldomusa.eu.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name VEX Opcode Map 1
23 * @{
24 */
25
26/**
27 * Common worker for AVX2 instructions on the forms:
28 * - vpxxx xmm0, xmm1, xmm2/mem128
29 * - vpxxx ymm0, ymm1, ymm2/mem256
30 *
31 * Exceptions type 4. AVX2 cpuid checks.
32 */
33FNIEMOP_DEF_1(iemOpCommonAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
34{
35 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
36 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
37 {
38 /*
39 * Register, register.
40 */
41 IEMOP_HLP_DONE_VEX_DECODING();
42 if (pVCpu->iem.s.uVexLength)
43 {
44 IEM_MC_BEGIN(4, 3);
45 IEM_MC_LOCAL(RTUINT256U, uDst);
46 IEM_MC_LOCAL(RTUINT256U, uSrc1);
47 IEM_MC_LOCAL(RTUINT256U, uSrc2);
48 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
49 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
50 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
51 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
52 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
53 IEM_MC_PREPARE_AVX_USAGE();
54 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
55 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
56 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
57 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
58 IEM_MC_ADVANCE_RIP();
59 IEM_MC_END();
60 }
61 else
62 {
63 IEM_MC_BEGIN(4, 0);
64 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
65 IEM_MC_ARG(PRTUINT128U, puDst, 1);
66 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
67 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
68 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
69 IEM_MC_PREPARE_AVX_USAGE();
70 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
71 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
72 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
73 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
74 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
75 IEM_MC_ADVANCE_RIP();
76 IEM_MC_END();
77 }
78 }
79 else
80 {
81 /*
82 * Register, memory.
83 */
84 if (pVCpu->iem.s.uVexLength)
85 {
86 IEM_MC_BEGIN(4, 4);
87 IEM_MC_LOCAL(RTUINT256U, uDst);
88 IEM_MC_LOCAL(RTUINT256U, uSrc1);
89 IEM_MC_LOCAL(RTUINT256U, uSrc2);
90 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
91 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
92 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
93 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
94 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
95
96 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
97 IEMOP_HLP_DONE_VEX_DECODING();
98 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
99 IEM_MC_PREPARE_AVX_USAGE();
100
101 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
102 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
103 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
104 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
105
106 IEM_MC_ADVANCE_RIP();
107 IEM_MC_END();
108 }
109 else
110 {
111 IEM_MC_BEGIN(4, 2);
112 IEM_MC_LOCAL(RTUINT128U, uSrc2);
113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
114 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
115 IEM_MC_ARG(PRTUINT128U, puDst, 1);
116 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
117 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
118
119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
120 IEMOP_HLP_DONE_VEX_DECODING();
121 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
122 IEM_MC_PREPARE_AVX_USAGE();
123
124 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
125 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
126 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
127 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
128 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
129
130 IEM_MC_ADVANCE_RIP();
131 IEM_MC_END();
132 }
133 }
134 return VINF_SUCCESS;
135}
136
137
138
139/* Opcode VEX.0F 0x00 - invalid */
140/* Opcode VEX.0F 0x01 - invalid */
141/* Opcode VEX.0F 0x02 - invalid */
142/* Opcode VEX.0F 0x03 - invalid */
143/* Opcode VEX.0F 0x04 - invalid */
144/* Opcode VEX.0F 0x05 - invalid */
145/* Opcode VEX.0F 0x06 - invalid */
146/* Opcode VEX.0F 0x07 - invalid */
147/* Opcode VEX.0F 0x08 - invalid */
148/* Opcode VEX.0F 0x09 - invalid */
149/* Opcode VEX.0F 0x0a - invalid */
150
151/** Opcode VEX.0F 0x0b. */
152FNIEMOP_DEF(iemOp_vud2)
153{
154 IEMOP_MNEMONIC(vud2, "vud2");
155 return IEMOP_RAISE_INVALID_OPCODE();
156}
157
158/* Opcode VEX.0F 0x0c - invalid */
159/* Opcode VEX.0F 0x0d - invalid */
160/* Opcode VEX.0F 0x0e - invalid */
161/* Opcode VEX.0F 0x0f - invalid */
162
163
164/**
165 * @opcode 0x10
166 * @oppfx none
167 * @opcpuid avx
168 * @opgroup og_avx_simdfp_datamove
169 * @opxcpttype 4UA
170 * @optest op1=1 op2=2 -> op1=2
171 * @optest op1=0 op2=-22 -> op1=-22
172 */
173FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
174{
175 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
176 Assert(pVCpu->iem.s.uVexLength <= 1);
177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
178 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
179 {
180 /*
181 * Register, register.
182 */
183 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
184 IEM_MC_BEGIN(0, 0);
185 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
186 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
187 if (pVCpu->iem.s.uVexLength == 0)
188 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
189 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
190 else
191 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
192 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
193 IEM_MC_ADVANCE_RIP();
194 IEM_MC_END();
195 }
196 else if (pVCpu->iem.s.uVexLength == 0)
197 {
198 /*
199 * 128-bit: Register, Memory
200 */
201 IEM_MC_BEGIN(0, 2);
202 IEM_MC_LOCAL(RTUINT128U, uSrc);
203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
204
205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
206 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
207 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
208 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
209
210 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
211 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
212
213 IEM_MC_ADVANCE_RIP();
214 IEM_MC_END();
215 }
216 else
217 {
218 /*
219 * 256-bit: Register, Memory
220 */
221 IEM_MC_BEGIN(0, 2);
222 IEM_MC_LOCAL(RTUINT256U, uSrc);
223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
224
225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
226 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
227 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
228 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
229
230 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
231 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
232
233 IEM_MC_ADVANCE_RIP();
234 IEM_MC_END();
235 }
236 return VINF_SUCCESS;
237}
238
239
240/**
241 * @opcode 0x10
242 * @oppfx 0x66
243 * @opcpuid avx
244 * @opgroup og_avx_simdfp_datamove
245 * @opxcpttype 4UA
246 * @optest op1=1 op2=2 -> op1=2
247 * @optest op1=0 op2=-22 -> op1=-22
248 */
249FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
250{
251 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
252 Assert(pVCpu->iem.s.uVexLength <= 1);
253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
255 {
256 /*
257 * Register, register.
258 */
259 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
260 IEM_MC_BEGIN(0, 0);
261 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
262 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
263 if (pVCpu->iem.s.uVexLength == 0)
264 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
265 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
266 else
267 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
268 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
269 IEM_MC_ADVANCE_RIP();
270 IEM_MC_END();
271 }
272 else if (pVCpu->iem.s.uVexLength == 0)
273 {
274 /*
275 * 128-bit: Memory, register.
276 */
277 IEM_MC_BEGIN(0, 2);
278 IEM_MC_LOCAL(RTUINT128U, uSrc);
279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
280
281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
282 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
283 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
284 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
285
286 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
287 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
288
289 IEM_MC_ADVANCE_RIP();
290 IEM_MC_END();
291 }
292 else
293 {
294 /*
295 * 256-bit: Memory, register.
296 */
297 IEM_MC_BEGIN(0, 2);
298 IEM_MC_LOCAL(RTUINT256U, uSrc);
299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
300
301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
302 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
303 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
304 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
305
306 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
307 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
308
309 IEM_MC_ADVANCE_RIP();
310 IEM_MC_END();
311 }
312 return VINF_SUCCESS;
313}
314
315
316FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
317{
318 Assert(pVCpu->iem.s.uVexLength <= 1);
319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
321 {
322 /**
323 * @opcode 0x10
324 * @oppfx 0xf3
325 * @opcodesub 11 mr/reg
326 * @opcpuid avx
327 * @opgroup og_avx_simdfp_datamerge
328 * @opxcpttype 5
329 * @optest op1=1 op2=0 op3=2 -> op1=2
330 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
331 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
332 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
333 * @note HssHi refers to bits 127:32.
334 */
335 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
336 IEMOP_HLP_DONE_VEX_DECODING();
337 IEM_MC_BEGIN(0, 0);
338
339 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
340 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
341 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
342 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
343 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
344 IEM_MC_ADVANCE_RIP();
345 IEM_MC_END();
346 }
347 else
348 {
349 /**
350 * @opdone
351 * @opcode 0x10
352 * @oppfx 0xf3
353 * @opcodesub !11 mr/reg
354 * @opcpuid avx
355 * @opgroup og_avx_simdfp_datamove
356 * @opxcpttype 5
357 * @opfunction iemOp_vmovss_Vss_Hss_Wss
358 * @optest op1=1 op2=2 -> op1=2
359 * @optest op1=0 op2=-22 -> op1=-22
360 */
361 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
362 IEM_MC_BEGIN(0, 2);
363 IEM_MC_LOCAL(uint32_t, uSrc);
364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
365
366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
367 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
369 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
370
371 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
372 IEM_MC_STORE_YREG_U32_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
373
374 IEM_MC_ADVANCE_RIP();
375 IEM_MC_END();
376 }
377
378 return VINF_SUCCESS;
379}
380
381
382FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
383{
384 Assert(pVCpu->iem.s.uVexLength <= 1);
385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
386 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
387 {
388 /**
389 * @opcode 0x10
390 * @oppfx 0xf2
391 * @opcodesub 11 mr/reg
392 * @opcpuid avx
393 * @opgroup og_avx_simdfp_datamerge
394 * @opxcpttype 5
395 * @optest op1=1 op2=0 op3=2 -> op1=2
396 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
397 * @optest op1=3 op2=-1 op3=0x77 ->
398 * op1=0xffffffffffffffff0000000000000077
399 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
400 */
401 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
402 IEMOP_HLP_DONE_VEX_DECODING();
403 IEM_MC_BEGIN(0, 0);
404
405 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
406 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
407 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
408 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
409 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
410 IEM_MC_ADVANCE_RIP();
411 IEM_MC_END();
412 }
413 else
414 {
415 /**
416 * @opdone
417 * @opcode 0x10
418 * @oppfx 0xf2
419 * @opcodesub !11 mr/reg
420 * @opcpuid avx
421 * @opgroup og_avx_simdfp_datamove
422 * @opxcpttype 5
423 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
424 * @optest op1=1 op2=2 -> op1=2
425 * @optest op1=0 op2=-22 -> op1=-22
426 */
427 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
428 IEM_MC_BEGIN(0, 2);
429 IEM_MC_LOCAL(uint64_t, uSrc);
430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
431
432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
433 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
434 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
435 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
436
437 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
438 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
439
440 IEM_MC_ADVANCE_RIP();
441 IEM_MC_END();
442 }
443
444 return VINF_SUCCESS;
445}
446
447
448/**
449 * @opcode 0x11
450 * @oppfx none
451 * @opcpuid avx
452 * @opgroup og_avx_simdfp_datamove
453 * @opxcpttype 4UA
454 * @optest op1=1 op2=2 -> op1=2
455 * @optest op1=0 op2=-22 -> op1=-22
456 */
457FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
458{
459 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
460 Assert(pVCpu->iem.s.uVexLength <= 1);
461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
463 {
464 /*
465 * Register, register.
466 */
467 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
468 IEM_MC_BEGIN(0, 0);
469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
470 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
471 if (pVCpu->iem.s.uVexLength == 0)
472 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
473 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
474 else
475 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
476 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
477 IEM_MC_ADVANCE_RIP();
478 IEM_MC_END();
479 }
480 else if (pVCpu->iem.s.uVexLength == 0)
481 {
482 /*
483 * 128-bit: Memory, register.
484 */
485 IEM_MC_BEGIN(0, 2);
486 IEM_MC_LOCAL(RTUINT128U, uSrc);
487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
488
489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
490 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
491 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
492 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
493
494 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
495 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
496
497 IEM_MC_ADVANCE_RIP();
498 IEM_MC_END();
499 }
500 else
501 {
502 /*
503 * 256-bit: Memory, register.
504 */
505 IEM_MC_BEGIN(0, 2);
506 IEM_MC_LOCAL(RTUINT256U, uSrc);
507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
508
509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
510 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
512 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
513
514 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
515 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
516
517 IEM_MC_ADVANCE_RIP();
518 IEM_MC_END();
519 }
520 return VINF_SUCCESS;
521}
522
523
524/**
525 * @opcode 0x11
526 * @oppfx 0x66
527 * @opcpuid avx
528 * @opgroup og_avx_simdfp_datamove
529 * @opxcpttype 4UA
530 * @optest op1=1 op2=2 -> op1=2
531 * @optest op1=0 op2=-22 -> op1=-22
532 */
533FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
534{
535 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
536 Assert(pVCpu->iem.s.uVexLength <= 1);
537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
539 {
540 /*
541 * Register, register.
542 */
543 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
544 IEM_MC_BEGIN(0, 0);
545 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
546 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
547 if (pVCpu->iem.s.uVexLength == 0)
548 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
549 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
550 else
551 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
552 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 }
556 else if (pVCpu->iem.s.uVexLength == 0)
557 {
558 /*
559 * 128-bit: Memory, register.
560 */
561 IEM_MC_BEGIN(0, 2);
562 IEM_MC_LOCAL(RTUINT128U, uSrc);
563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
564
565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
566 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
567 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
568 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
569
570 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
571 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
572
573 IEM_MC_ADVANCE_RIP();
574 IEM_MC_END();
575 }
576 else
577 {
578 /*
579 * 256-bit: Memory, register.
580 */
581 IEM_MC_BEGIN(0, 2);
582 IEM_MC_LOCAL(RTUINT256U, uSrc);
583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
584
585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
586 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
587 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
588 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
589
590 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
591 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
592
593 IEM_MC_ADVANCE_RIP();
594 IEM_MC_END();
595 }
596 return VINF_SUCCESS;
597}
598
599
600FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
601{
602 Assert(pVCpu->iem.s.uVexLength <= 1);
603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 /**
607 * @opcode 0x11
608 * @oppfx 0xf3
609 * @opcodesub 11 mr/reg
610 * @opcpuid avx
611 * @opgroup og_avx_simdfp_datamerge
612 * @opxcpttype 5
613 * @optest op1=1 op2=0 op3=2 -> op1=2
614 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
615 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
616 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
617 */
618 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
619 IEMOP_HLP_DONE_VEX_DECODING();
620 IEM_MC_BEGIN(0, 0);
621
622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
624 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
625 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
626 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
627 IEM_MC_ADVANCE_RIP();
628 IEM_MC_END();
629 }
630 else
631 {
632 /**
633 * @opdone
634 * @opcode 0x11
635 * @oppfx 0xf3
636 * @opcodesub !11 mr/reg
637 * @opcpuid avx
638 * @opgroup og_avx_simdfp_datamove
639 * @opxcpttype 5
640 * @opfunction iemOp_vmovss_Vss_Hss_Wss
641 * @optest op1=1 op2=2 -> op1=2
642 * @optest op1=0 op2=-22 -> op1=-22
643 */
644 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
645 IEM_MC_BEGIN(0, 2);
646 IEM_MC_LOCAL(uint32_t, uSrc);
647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
648
649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
650 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
651 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
653
654 IEM_MC_FETCH_YREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
655 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
656
657 IEM_MC_ADVANCE_RIP();
658 IEM_MC_END();
659 }
660
661 return VINF_SUCCESS;
662}
663
664
665FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
666{
667 Assert(pVCpu->iem.s.uVexLength <= 1);
668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
670 {
671 /**
672 * @opcode 0x11
673 * @oppfx 0xf2
674 * @opcodesub 11 mr/reg
675 * @opcpuid avx
676 * @opgroup og_avx_simdfp_datamerge
677 * @opxcpttype 5
678 * @optest op1=1 op2=0 op3=2 -> op1=2
679 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
680 * @optest op1=3 op2=-1 op3=0x77 ->
681 * op1=0xffffffffffffffff0000000000000077
682 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
683 */
684 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
685 IEMOP_HLP_DONE_VEX_DECODING();
686 IEM_MC_BEGIN(0, 0);
687
688 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
689 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
690 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
691 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
692 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
693 IEM_MC_ADVANCE_RIP();
694 IEM_MC_END();
695 }
696 else
697 {
698 /**
699 * @opdone
700 * @opcode 0x11
701 * @oppfx 0xf2
702 * @opcodesub !11 mr/reg
703 * @opcpuid avx
704 * @opgroup og_avx_simdfp_datamove
705 * @opxcpttype 5
706 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
707 * @optest op1=1 op2=2 -> op1=2
708 * @optest op1=0 op2=-22 -> op1=-22
709 */
710 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
711 IEM_MC_BEGIN(0, 2);
712 IEM_MC_LOCAL(uint64_t, uSrc);
713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
714
715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
716 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
717 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
718 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
719
720 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
722
723 IEM_MC_ADVANCE_RIP();
724 IEM_MC_END();
725 }
726
727 return VINF_SUCCESS;
728}
729
730
731FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
732{
733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
734 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
735 {
736 /**
737 * @opcode 0x12
738 * @opcodesub 11 mr/reg
739 * @oppfx none
740 * @opcpuid avx
741 * @opgroup og_avx_simdfp_datamerge
742 * @opxcpttype 7LZ
743 * @optest op2=0x2200220122022203
744 * op3=0x3304330533063307
745 * -> op1=0x22002201220222033304330533063307
746 * @optest op2=-1 op3=-42 -> op1=-42
747 * @note op3 and op2 are only the 8-byte high XMM register halfs.
748 */
749 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
750
751 IEMOP_HLP_DONE_VEX_DECODING_L0();
752 IEM_MC_BEGIN(0, 0);
753
754 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
755 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
756 IEM_MC_MERGE_YREG_U64HI_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
757 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
758 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
759
760 IEM_MC_ADVANCE_RIP();
761 IEM_MC_END();
762 }
763 else
764 {
765 /**
766 * @opdone
767 * @opcode 0x12
768 * @opcodesub !11 mr/reg
769 * @oppfx none
770 * @opcpuid avx
771 * @opgroup og_avx_simdfp_datamove
772 * @opxcpttype 5LZ
773 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
774 * @optest op1=1 op2=0 op3=0 -> op1=0
775 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
776 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
777 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
778 */
779 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
780
781 IEM_MC_BEGIN(0, 2);
782 IEM_MC_LOCAL(uint64_t, uSrc);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_VEX_DECODING_L0();
787 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
788 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
789
790 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
792 uSrc,
793 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
794
795 IEM_MC_ADVANCE_RIP();
796 IEM_MC_END();
797 }
798 return VINF_SUCCESS;
799}
800
801
802/**
803 * @opcode 0x12
804 * @opcodesub !11 mr/reg
805 * @oppfx 0x66
806 * @opcpuid avx
807 * @opgroup og_avx_pcksclr_datamerge
808 * @opxcpttype 5LZ
809 * @optest op2=0 op3=2 -> op1=2
810 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
811 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
812 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
813 */
814FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
815{
816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
817 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
818 {
819 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
820
821 IEM_MC_BEGIN(0, 2);
822 IEM_MC_LOCAL(uint64_t, uSrc);
823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
824
825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
826 IEMOP_HLP_DONE_VEX_DECODING_L0();
827 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
828 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
829
830 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
831 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
832 uSrc,
833 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
834
835 IEM_MC_ADVANCE_RIP();
836 IEM_MC_END();
837 return VINF_SUCCESS;
838 }
839
840 /**
841 * @opdone
842 * @opmnemonic udvex660f12m3
843 * @opcode 0x12
844 * @opcodesub 11 mr/reg
845 * @oppfx 0x66
846 * @opunused immediate
847 * @opcpuid avx
848 * @optest ->
849 */
850 return IEMOP_RAISE_INVALID_OPCODE();
851}
852
853
854/**
855 * @opcode 0x12
856 * @oppfx 0xf3
857 * @opcpuid avx
858 * @opgroup og_avx_pcksclr_datamove
859 * @opxcpttype 4
860 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
861 * -> op1=0x00000002000000020000000100000001
862 * @optest vex.l==1 /
863 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
864 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
865 */
866FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
867{
868 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
869 Assert(pVCpu->iem.s.uVexLength <= 1);
870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
872 {
873 /*
874 * Register, register.
875 */
876 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
877 if (pVCpu->iem.s.uVexLength == 0)
878 {
879 IEM_MC_BEGIN(2, 0);
880 IEM_MC_ARG(PRTUINT128U, puDst, 0);
881 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
882
883 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
884 IEM_MC_PREPARE_AVX_USAGE();
885
886 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
887 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
888 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
889 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
890
891 IEM_MC_ADVANCE_RIP();
892 IEM_MC_END();
893 }
894 else
895 {
896 IEM_MC_BEGIN(3, 0);
897 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
898 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
899 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2);
900
901 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
902 IEM_MC_PREPARE_AVX_USAGE();
903 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
904
905 IEM_MC_ADVANCE_RIP();
906 IEM_MC_END();
907 }
908 }
909 else
910 {
911 /*
912 * Register, memory.
913 */
914 if (pVCpu->iem.s.uVexLength == 0)
915 {
916 IEM_MC_BEGIN(2, 2);
917 IEM_MC_LOCAL(RTUINT128U, uSrc);
918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
919 IEM_MC_ARG(PRTUINT128U, puDst, 0);
920 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
921
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
924 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
925 IEM_MC_PREPARE_AVX_USAGE();
926
927 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
928 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
929 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
930 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
931
932 IEM_MC_ADVANCE_RIP();
933 IEM_MC_END();
934 }
935 else
936 {
937 IEM_MC_BEGIN(3, 2);
938 IEM_MC_LOCAL(RTUINT256U, uSrc);
939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
940 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
941 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
942 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
943
944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
945 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
946 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
947 IEM_MC_PREPARE_AVX_USAGE();
948
949 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
950 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
951
952 IEM_MC_ADVANCE_RIP();
953 IEM_MC_END();
954 }
955 }
956 return VINF_SUCCESS;
957}
958
959
960/**
961 * @opcode 0x12
962 * @oppfx 0xf2
963 * @opcpuid avx
964 * @opgroup og_avx_pcksclr_datamove
965 * @opxcpttype 5
966 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
967 * -> op1=0x22222222111111112222222211111111
968 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
969 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
970 */
971FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
972{
973 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
976 {
977 /*
978 * Register, register.
979 */
980 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
981 if (pVCpu->iem.s.uVexLength == 0)
982 {
983 IEM_MC_BEGIN(2, 0);
984 IEM_MC_ARG(PRTUINT128U, puDst, 0);
985 IEM_MC_ARG(uint64_t, uSrc, 1);
986
987 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
988 IEM_MC_PREPARE_AVX_USAGE();
989
990 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
991 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
992 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
993 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
994
995 IEM_MC_ADVANCE_RIP();
996 IEM_MC_END();
997 }
998 else
999 {
1000 IEM_MC_BEGIN(3, 0);
1001 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1002 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
1003 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2);
1004
1005 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1006 IEM_MC_PREPARE_AVX_USAGE();
1007 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1008
1009 IEM_MC_ADVANCE_RIP();
1010 IEM_MC_END();
1011 }
1012 }
1013 else
1014 {
1015 /*
1016 * Register, memory.
1017 */
1018 if (pVCpu->iem.s.uVexLength == 0)
1019 {
1020 IEM_MC_BEGIN(2, 2);
1021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1022 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1023 IEM_MC_ARG(uint64_t, uSrc, 1);
1024
1025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1026 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1027 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1028 IEM_MC_PREPARE_AVX_USAGE();
1029
1030 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1031 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1032 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1033 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1034
1035 IEM_MC_ADVANCE_RIP();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 IEM_MC_BEGIN(3, 2);
1041 IEM_MC_LOCAL(RTUINT256U, uSrc);
1042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1043 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1044 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
1045 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1046
1047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1048 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1049 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1050 IEM_MC_PREPARE_AVX_USAGE();
1051
1052 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1053 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1054
1055 IEM_MC_ADVANCE_RIP();
1056 IEM_MC_END();
1057 }
1058 }
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/**
1064 * @opcode 0x13
1065 * @opcodesub !11 mr/reg
1066 * @oppfx none
1067 * @opcpuid avx
1068 * @opgroup og_avx_simdfp_datamove
1069 * @opxcpttype 5
1070 * @optest op1=1 op2=2 -> op1=2
1071 * @optest op1=0 op2=-42 -> op1=-42
1072 */
1073FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1074{
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1077 {
1078 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1079
1080 IEM_MC_BEGIN(0, 2);
1081 IEM_MC_LOCAL(uint64_t, uSrc);
1082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1083
1084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1085 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1086 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1087 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1088
1089 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1090 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1091
1092 IEM_MC_ADVANCE_RIP();
1093 IEM_MC_END();
1094 return VINF_SUCCESS;
1095 }
1096
1097 /**
1098 * @opdone
1099 * @opmnemonic udvex0f13m3
1100 * @opcode 0x13
1101 * @opcodesub 11 mr/reg
1102 * @oppfx none
1103 * @opunused immediate
1104 * @opcpuid avx
1105 * @optest ->
1106 */
1107 return IEMOP_RAISE_INVALID_OPCODE();
1108}
1109
1110
1111/**
1112 * @opcode 0x13
1113 * @opcodesub !11 mr/reg
1114 * @oppfx 0x66
1115 * @opcpuid avx
1116 * @opgroup og_avx_pcksclr_datamove
1117 * @opxcpttype 5
1118 * @optest op1=1 op2=2 -> op1=2
1119 * @optest op1=0 op2=-42 -> op1=-42
1120 */
1121FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1122{
1123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1124 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1125 {
1126 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1127 IEM_MC_BEGIN(0, 2);
1128 IEM_MC_LOCAL(uint64_t, uSrc);
1129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1130
1131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1132 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1133 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1134 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1135
1136 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1137 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1138
1139 IEM_MC_ADVANCE_RIP();
1140 IEM_MC_END();
1141 return VINF_SUCCESS;
1142 }
1143
1144 /**
1145 * @opdone
1146 * @opmnemonic udvex660f13m3
1147 * @opcode 0x13
1148 * @opcodesub 11 mr/reg
1149 * @oppfx 0x66
1150 * @opunused immediate
1151 * @opcpuid avx
1152 * @optest ->
1153 */
1154 return IEMOP_RAISE_INVALID_OPCODE();
1155}
1156
1157/* Opcode VEX.F3.0F 0x13 - invalid */
1158/* Opcode VEX.F2.0F 0x13 - invalid */
1159
1160/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1161FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1162/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1163FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1164/* Opcode VEX.F3.0F 0x14 - invalid */
1165/* Opcode VEX.F2.0F 0x14 - invalid */
1166/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1167FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1168/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1169FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1170/* Opcode VEX.F3.0F 0x15 - invalid */
1171/* Opcode VEX.F2.0F 0x15 - invalid */
1172/** Opcode VEX.0F 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1173FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1174/** Opcode VEX.66.0F 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1175FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1176/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1177FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1178/* Opcode VEX.F2.0F 0x16 - invalid */
1179/** Opcode VEX.0F 0x17 - vmovhpsv1 Mq, Vq */
1180FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1181/** Opcode VEX.66.0F 0x17 - vmovhpdv1 Mq, Vq */
1182FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1183/* Opcode VEX.F3.0F 0x17 - invalid */
1184/* Opcode VEX.F2.0F 0x17 - invalid */
1185
1186
1187/* Opcode VEX.0F 0x18 - invalid */
1188/* Opcode VEX.0F 0x19 - invalid */
1189/* Opcode VEX.0F 0x1a - invalid */
1190/* Opcode VEX.0F 0x1b - invalid */
1191/* Opcode VEX.0F 0x1c - invalid */
1192/* Opcode VEX.0F 0x1d - invalid */
1193/* Opcode VEX.0F 0x1e - invalid */
1194/* Opcode VEX.0F 0x1f - invalid */
1195
1196/* Opcode VEX.0F 0x20 - invalid */
1197/* Opcode VEX.0F 0x21 - invalid */
1198/* Opcode VEX.0F 0x22 - invalid */
1199/* Opcode VEX.0F 0x23 - invalid */
1200/* Opcode VEX.0F 0x24 - invalid */
1201/* Opcode VEX.0F 0x25 - invalid */
1202/* Opcode VEX.0F 0x26 - invalid */
1203/* Opcode VEX.0F 0x27 - invalid */
1204
1205/**
1206 * @opcode 0x28
1207 * @oppfx none
1208 * @opcpuid avx
1209 * @opgroup og_avx_pcksclr_datamove
1210 * @opxcpttype 1
1211 * @optest op1=1 op2=2 -> op1=2
1212 * @optest op1=0 op2=-42 -> op1=-42
1213 * @note Almost identical to vmovapd.
1214 */
1215FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1216{
1217 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1219 Assert(pVCpu->iem.s.uVexLength <= 1);
1220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1221 {
1222 /*
1223 * Register, register.
1224 */
1225 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1226 IEM_MC_BEGIN(1, 0);
1227
1228 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1229 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1230 if (pVCpu->iem.s.uVexLength == 0)
1231 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1232 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1233 else
1234 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1235 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1236 IEM_MC_ADVANCE_RIP();
1237 IEM_MC_END();
1238 }
1239 else
1240 {
1241 /*
1242 * Register, memory.
1243 */
1244 if (pVCpu->iem.s.uVexLength == 0)
1245 {
1246 IEM_MC_BEGIN(0, 2);
1247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1248 IEM_MC_LOCAL(RTUINT128U, uSrc);
1249
1250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1251 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1252 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1253 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1254
1255 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1256 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1257
1258 IEM_MC_ADVANCE_RIP();
1259 IEM_MC_END();
1260 }
1261 else
1262 {
1263 IEM_MC_BEGIN(0, 2);
1264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1265 IEM_MC_LOCAL(RTUINT256U, uSrc);
1266
1267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1268 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1269 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1270 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1271
1272 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1273 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1274
1275 IEM_MC_ADVANCE_RIP();
1276 IEM_MC_END();
1277 }
1278 }
1279 return VINF_SUCCESS;
1280}
1281
1282
1283/**
1284 * @opcode 0x28
1285 * @oppfx 66
1286 * @opcpuid avx
1287 * @opgroup og_avx_pcksclr_datamove
1288 * @opxcpttype 1
1289 * @optest op1=1 op2=2 -> op1=2
1290 * @optest op1=0 op2=-42 -> op1=-42
1291 * @note Almost identical to vmovaps
1292 */
1293FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1294{
1295 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1297 Assert(pVCpu->iem.s.uVexLength <= 1);
1298 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1299 {
1300 /*
1301 * Register, register.
1302 */
1303 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1304 IEM_MC_BEGIN(1, 0);
1305
1306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1307 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1308 if (pVCpu->iem.s.uVexLength == 0)
1309 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1310 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1311 else
1312 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1313 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1314 IEM_MC_ADVANCE_RIP();
1315 IEM_MC_END();
1316 }
1317 else
1318 {
1319 /*
1320 * Register, memory.
1321 */
1322 if (pVCpu->iem.s.uVexLength == 0)
1323 {
1324 IEM_MC_BEGIN(0, 2);
1325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1326 IEM_MC_LOCAL(RTUINT128U, uSrc);
1327
1328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1332
1333 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1334 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1335
1336 IEM_MC_ADVANCE_RIP();
1337 IEM_MC_END();
1338 }
1339 else
1340 {
1341 IEM_MC_BEGIN(0, 2);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343 IEM_MC_LOCAL(RTUINT256U, uSrc);
1344
1345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1346 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1349
1350 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1351 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1352
1353 IEM_MC_ADVANCE_RIP();
1354 IEM_MC_END();
1355 }
1356 }
1357 return VINF_SUCCESS;
1358}
1359
1360/**
1361 * @opmnemonic udvexf30f28
1362 * @opcode 0x28
1363 * @oppfx 0xf3
1364 * @opunused vex.modrm
1365 * @opcpuid avx
1366 * @optest ->
1367 * @opdone
1368 */
1369
1370/**
1371 * @opmnemonic udvexf20f28
1372 * @opcode 0x28
1373 * @oppfx 0xf2
1374 * @opunused vex.modrm
1375 * @opcpuid avx
1376 * @optest ->
1377 * @opdone
1378 */
1379
1380/**
1381 * @opcode 0x29
1382 * @oppfx none
1383 * @opcpuid avx
1384 * @opgroup og_avx_pcksclr_datamove
1385 * @opxcpttype 1
1386 * @optest op1=1 op2=2 -> op1=2
1387 * @optest op1=0 op2=-42 -> op1=-42
1388 * @note Almost identical to vmovapd.
1389 */
1390FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1391{
1392 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1394 Assert(pVCpu->iem.s.uVexLength <= 1);
1395 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1396 {
1397 /*
1398 * Register, register.
1399 */
1400 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1401 IEM_MC_BEGIN(1, 0);
1402
1403 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1404 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1405 if (pVCpu->iem.s.uVexLength == 0)
1406 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1407 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1408 else
1409 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1410 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1411 IEM_MC_ADVANCE_RIP();
1412 IEM_MC_END();
1413 }
1414 else
1415 {
1416 /*
1417 * Register, memory.
1418 */
1419 if (pVCpu->iem.s.uVexLength == 0)
1420 {
1421 IEM_MC_BEGIN(0, 2);
1422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1423 IEM_MC_LOCAL(RTUINT128U, uSrc);
1424
1425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1426 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1427 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1428 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1429
1430 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1431 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1432
1433 IEM_MC_ADVANCE_RIP();
1434 IEM_MC_END();
1435 }
1436 else
1437 {
1438 IEM_MC_BEGIN(0, 2);
1439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1440 IEM_MC_LOCAL(RTUINT256U, uSrc);
1441
1442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1443 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1444 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1445 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1446
1447 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1448 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1449
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 }
1453 }
1454 return VINF_SUCCESS;
1455}
1456
1457/**
1458 * @opcode 0x29
1459 * @oppfx 66
1460 * @opcpuid avx
1461 * @opgroup og_avx_pcksclr_datamove
1462 * @opxcpttype 1
1463 * @optest op1=1 op2=2 -> op1=2
1464 * @optest op1=0 op2=-42 -> op1=-42
1465 * @note Almost identical to vmovaps
1466 */
1467FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1468{
1469 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1470 Assert(pVCpu->iem.s.uVexLength <= 1);
1471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1473 {
1474 /*
1475 * Register, register.
1476 */
1477 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1478 IEM_MC_BEGIN(1, 0);
1479
1480 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1481 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1482 if (pVCpu->iem.s.uVexLength == 0)
1483 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1484 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1485 else
1486 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1487 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1488 IEM_MC_ADVANCE_RIP();
1489 IEM_MC_END();
1490 }
1491 else
1492 {
1493 /*
1494 * Register, memory.
1495 */
1496 if (pVCpu->iem.s.uVexLength == 0)
1497 {
1498 IEM_MC_BEGIN(0, 2);
1499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1500 IEM_MC_LOCAL(RTUINT128U, uSrc);
1501
1502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1503 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1504 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1505 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1506
1507 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1508 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1509
1510 IEM_MC_ADVANCE_RIP();
1511 IEM_MC_END();
1512 }
1513 else
1514 {
1515 IEM_MC_BEGIN(0, 2);
1516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1517 IEM_MC_LOCAL(RTUINT256U, uSrc);
1518
1519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1520 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1521 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1522 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1523
1524 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1525 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1526
1527 IEM_MC_ADVANCE_RIP();
1528 IEM_MC_END();
1529 }
1530 }
1531 return VINF_SUCCESS;
1532}
1533
1534
1535/**
1536 * @opmnemonic udvexf30f29
1537 * @opcode 0x29
1538 * @oppfx 0xf3
1539 * @opunused vex.modrm
1540 * @opcpuid avx
1541 * @optest ->
1542 * @opdone
1543 */
1544
1545/**
1546 * @opmnemonic udvexf20f29
1547 * @opcode 0x29
1548 * @oppfx 0xf2
1549 * @opunused vex.modrm
1550 * @opcpuid avx
1551 * @optest ->
1552 * @opdone
1553 */
1554
1555
1556/** Opcode VEX.0F 0x2a - invalid */
1557/** Opcode VEX.66.0F 0x2a - invalid */
1558/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
1559FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
1560/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1561FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
1562
1563
1564/**
1565 * @opcode 0x2b
1566 * @opcodesub !11 mr/reg
1567 * @oppfx none
1568 * @opcpuid avx
1569 * @opgroup og_avx_cachect
1570 * @opxcpttype 1
1571 * @optest op1=1 op2=2 -> op1=2
1572 * @optest op1=0 op2=-42 -> op1=-42
1573 * @note Identical implementation to vmovntpd
1574 */
1575FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1576{
1577 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1578 Assert(pVCpu->iem.s.uVexLength <= 1);
1579 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1580 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1581 {
1582 /*
1583 * memory, register.
1584 */
1585 if (pVCpu->iem.s.uVexLength == 0)
1586 {
1587 IEM_MC_BEGIN(0, 2);
1588 IEM_MC_LOCAL(RTUINT128U, uSrc);
1589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1590
1591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1592 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1594 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1595
1596 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1597 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1598
1599 IEM_MC_ADVANCE_RIP();
1600 IEM_MC_END();
1601 }
1602 else
1603 {
1604 IEM_MC_BEGIN(0, 2);
1605 IEM_MC_LOCAL(RTUINT256U, uSrc);
1606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1607
1608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1609 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1610 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1611 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1612
1613 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1614 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1615
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 }
1620 /* The register, register encoding is invalid. */
1621 else
1622 return IEMOP_RAISE_INVALID_OPCODE();
1623 return VINF_SUCCESS;
1624}
1625
1626/**
1627 * @opcode 0x2b
1628 * @opcodesub !11 mr/reg
1629 * @oppfx 0x66
1630 * @opcpuid avx
1631 * @opgroup og_avx_cachect
1632 * @opxcpttype 1
1633 * @optest op1=1 op2=2 -> op1=2
1634 * @optest op1=0 op2=-42 -> op1=-42
1635 * @note Identical implementation to vmovntps
1636 */
1637FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1638{
1639 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1640 Assert(pVCpu->iem.s.uVexLength <= 1);
1641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1642 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1643 {
1644 /*
1645 * memory, register.
1646 */
1647 if (pVCpu->iem.s.uVexLength == 0)
1648 {
1649 IEM_MC_BEGIN(0, 2);
1650 IEM_MC_LOCAL(RTUINT128U, uSrc);
1651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1652
1653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1654 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1655 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1656 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1657
1658 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1659 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1660
1661 IEM_MC_ADVANCE_RIP();
1662 IEM_MC_END();
1663 }
1664 else
1665 {
1666 IEM_MC_BEGIN(0, 2);
1667 IEM_MC_LOCAL(RTUINT256U, uSrc);
1668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1669
1670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1671 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1672 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1673 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1674
1675 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1676 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1677
1678 IEM_MC_ADVANCE_RIP();
1679 IEM_MC_END();
1680 }
1681 }
1682 /* The register, register encoding is invalid. */
1683 else
1684 return IEMOP_RAISE_INVALID_OPCODE();
1685 return VINF_SUCCESS;
1686}
1687
1688/**
1689 * @opmnemonic udvexf30f2b
1690 * @opcode 0x2b
1691 * @oppfx 0xf3
1692 * @opunused vex.modrm
1693 * @opcpuid avx
1694 * @optest ->
1695 * @opdone
1696 */
1697
1698/**
1699 * @opmnemonic udvexf20f2b
1700 * @opcode 0x2b
1701 * @oppfx 0xf2
1702 * @opunused vex.modrm
1703 * @opcpuid avx
1704 * @optest ->
1705 * @opdone
1706 */
1707
1708
1709/* Opcode VEX.0F 0x2c - invalid */
1710/* Opcode VEX.66.0F 0x2c - invalid */
1711/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
1712FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1713/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
1714FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1715
1716/* Opcode VEX.0F 0x2d - invalid */
1717/* Opcode VEX.66.0F 0x2d - invalid */
1718/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
1719FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1720/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
1721FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1722
1723/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
1724FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss);
1725/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
1726FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd);
1727/* Opcode VEX.F3.0F 0x2e - invalid */
1728/* Opcode VEX.F2.0F 0x2e - invalid */
1729
1730/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
1731FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1732/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
1733FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1734/* Opcode VEX.F3.0F 0x2f - invalid */
1735/* Opcode VEX.F2.0F 0x2f - invalid */
1736
1737/* Opcode VEX.0F 0x30 - invalid */
1738/* Opcode VEX.0F 0x31 - invalid */
1739/* Opcode VEX.0F 0x32 - invalid */
1740/* Opcode VEX.0F 0x33 - invalid */
1741/* Opcode VEX.0F 0x34 - invalid */
1742/* Opcode VEX.0F 0x35 - invalid */
1743/* Opcode VEX.0F 0x36 - invalid */
1744/* Opcode VEX.0F 0x37 - invalid */
1745/* Opcode VEX.0F 0x38 - invalid */
1746/* Opcode VEX.0F 0x39 - invalid */
1747/* Opcode VEX.0F 0x3a - invalid */
1748/* Opcode VEX.0F 0x3b - invalid */
1749/* Opcode VEX.0F 0x3c - invalid */
1750/* Opcode VEX.0F 0x3d - invalid */
1751/* Opcode VEX.0F 0x3e - invalid */
1752/* Opcode VEX.0F 0x3f - invalid */
1753/* Opcode VEX.0F 0x40 - invalid */
1754/* Opcode VEX.0F 0x41 - invalid */
1755/* Opcode VEX.0F 0x42 - invalid */
1756/* Opcode VEX.0F 0x43 - invalid */
1757/* Opcode VEX.0F 0x44 - invalid */
1758/* Opcode VEX.0F 0x45 - invalid */
1759/* Opcode VEX.0F 0x46 - invalid */
1760/* Opcode VEX.0F 0x47 - invalid */
1761/* Opcode VEX.0F 0x48 - invalid */
1762/* Opcode VEX.0F 0x49 - invalid */
1763/* Opcode VEX.0F 0x4a - invalid */
1764/* Opcode VEX.0F 0x4b - invalid */
1765/* Opcode VEX.0F 0x4c - invalid */
1766/* Opcode VEX.0F 0x4d - invalid */
1767/* Opcode VEX.0F 0x4e - invalid */
1768/* Opcode VEX.0F 0x4f - invalid */
1769
1770/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
1771FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1772/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
1773FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1774/* Opcode VEX.F3.0F 0x50 - invalid */
1775/* Opcode VEX.F2.0F 0x50 - invalid */
1776
1777/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
1778FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1779/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
1780FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1781/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
1782FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1783/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1784FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1785
1786/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
1787FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1788/* Opcode VEX.66.0F 0x52 - invalid */
1789/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
1790FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1791/* Opcode VEX.F2.0F 0x52 - invalid */
1792
1793/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
1794FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1795/* Opcode VEX.66.0F 0x53 - invalid */
1796/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
1797FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1798/* Opcode VEX.F2.0F 0x53 - invalid */
1799
1800/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
1801FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1802/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
1803FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1804/* Opcode VEX.F3.0F 0x54 - invalid */
1805/* Opcode VEX.F2.0F 0x54 - invalid */
1806
1807/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
1808FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1809/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
1810FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1811/* Opcode VEX.F3.0F 0x55 - invalid */
1812/* Opcode VEX.F2.0F 0x55 - invalid */
1813
1814/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
1815FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1816/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
1817FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1818/* Opcode VEX.F3.0F 0x56 - invalid */
1819/* Opcode VEX.F2.0F 0x56 - invalid */
1820
1821
1822/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
1823FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
1824{
1825 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1826 return FNIEMOP_CALL_1(iemOpCommonAvx2_Vx_Hx_Wx,
1827 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
1828}
1829
1830
1831/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
1832FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
1833{
1834 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
1835 return FNIEMOP_CALL_1(iemOpCommonAvx2_Vx_Hx_Wx,
1836 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
1837}
1838
1839
1840/* Opcode VEX.F3.0F 0x57 - invalid */
1841/* Opcode VEX.F2.0F 0x57 - invalid */
1842
1843/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
1844FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
1845/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
1846FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
1847/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
1848FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
1849/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
1850FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
1851
1852/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
1853FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
1854/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
1855FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
1856/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
1857FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
1858/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
1859FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
1860
1861/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
1862FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
1863/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
1864FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
1865/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
1866FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
1867/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
1868FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
1869
1870/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
1871FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
1872/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
1873FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
1874/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
1875FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
1876/* Opcode VEX.F2.0F 0x5b - invalid */
1877
1878/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
1879FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
1880/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
1881FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
1882/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
1883FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
1884/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
1885FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
1886
1887/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
1888FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
1889/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
1890FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
1891/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
1892FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
1893/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
1894FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
1895
1896/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
1897FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
1898/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
1899FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
1900/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
1901FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
1902/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
1903FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
1904
1905/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
1906FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
1907/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
1908FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
1909/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
1910FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
1911/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
1912FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
1913
1914
1915///**
1916// * Common worker for SSE2 instructions on the forms:
1917// * pxxxx xmm1, xmm2/mem128
1918// *
1919// * The 2nd operand is the first half of a register, which in the memory case
1920// * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
1921// * memory accessed for MMX.
1922// *
1923// * Exceptions type 4.
1924// */
1925//FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
1926//{
1927// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1928// if (!pImpl->pfnU64)
1929// return IEMOP_RAISE_INVALID_OPCODE();
1930// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1931// {
1932// /*
1933// * Register, register.
1934// */
1935// /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
1936// /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
1937// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1938// IEM_MC_BEGIN(2, 0);
1939// IEM_MC_ARG(uint64_t *, pDst, 0);
1940// IEM_MC_ARG(uint32_t const *, pSrc, 1);
1941// IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
1942// IEM_MC_PREPARE_FPU_USAGE();
1943// IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1944// IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
1945// IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
1946// IEM_MC_ADVANCE_RIP();
1947// IEM_MC_END();
1948// }
1949// else
1950// {
1951// /*
1952// * Register, memory.
1953// */
1954// IEM_MC_BEGIN(2, 2);
1955// IEM_MC_ARG(uint64_t *, pDst, 0);
1956// IEM_MC_LOCAL(uint32_t, uSrc);
1957// IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
1958// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1959//
1960// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1961// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1962// IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
1963// IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1964//
1965// IEM_MC_PREPARE_FPU_USAGE();
1966// IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1967// IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
1968//
1969// IEM_MC_ADVANCE_RIP();
1970// IEM_MC_END();
1971// }
1972// return VINF_SUCCESS;
1973//}
1974
1975
1976/* Opcode VEX.0F 0x60 - invalid */
1977
1978/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, W */
1979FNIEMOP_STUB(iemOp_vpunpcklbw_Vx_Hx_Wx);
1980//FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
1981//{
1982// IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
1983// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
1984//}
1985
1986/* Opcode VEX.F3.0F 0x60 - invalid */
1987
1988
1989/* Opcode VEX.0F 0x61 - invalid */
1990
1991/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
1992FNIEMOP_STUB(iemOp_vpunpcklwd_Vx_Hx_Wx);
1993//FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
1994//{
1995// IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
1996// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
1997//}
1998
1999/* Opcode VEX.F3.0F 0x61 - invalid */
2000
2001
2002/* Opcode VEX.0F 0x62 - invalid */
2003
2004/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2005FNIEMOP_STUB(iemOp_vpunpckldq_Vx_Hx_Wx);
2006//FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2007//{
2008// IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2009// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2010//}
2011
2012/* Opcode VEX.F3.0F 0x62 - invalid */
2013
2014
2015
2016/* Opcode VEX.0F 0x63 - invalid */
2017/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2018FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2019/* Opcode VEX.F3.0F 0x63 - invalid */
2020
2021/* Opcode VEX.0F 0x64 - invalid */
2022/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2023FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2024/* Opcode VEX.F3.0F 0x64 - invalid */
2025
2026/* Opcode VEX.0F 0x65 - invalid */
2027/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2028FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2029/* Opcode VEX.F3.0F 0x65 - invalid */
2030
2031/* Opcode VEX.0F 0x66 - invalid */
2032/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
2033FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2034/* Opcode VEX.F3.0F 0x66 - invalid */
2035
2036/* Opcode VEX.0F 0x67 - invalid */
2037/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
2038FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2039/* Opcode VEX.F3.0F 0x67 - invalid */
2040
2041
2042///**
2043// * Common worker for SSE2 instructions on the form:
2044// * pxxxx xmm1, xmm2/mem128
2045// *
2046// * The 2nd operand is the second half of a register, which in the memory case
2047// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2048// * where it may read the full 128 bits or only the upper 64 bits.
2049// *
2050// * Exceptions type 4.
2051// */
2052//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2053//{
2054// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2055// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2056// {
2057// /*
2058// * Register, register.
2059// */
2060// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2061// IEM_MC_BEGIN(2, 0);
2062// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2063// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2064// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2065// IEM_MC_PREPARE_SSE_USAGE();
2066// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2067// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2068// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2069// IEM_MC_ADVANCE_RIP();
2070// IEM_MC_END();
2071// }
2072// else
2073// {
2074// /*
2075// * Register, memory.
2076// */
2077// IEM_MC_BEGIN(2, 2);
2078// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2079// IEM_MC_LOCAL(RTUINT128U, uSrc);
2080// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2081// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2082//
2083// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2084// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2085// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2086// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2087//
2088// IEM_MC_PREPARE_SSE_USAGE();
2089// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2090// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2091//
2092// IEM_MC_ADVANCE_RIP();
2093// IEM_MC_END();
2094// }
2095// return VINF_SUCCESS;
2096//}
2097
2098
2099/* Opcode VEX.0F 0x68 - invalid */
2100
2101/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
2102FNIEMOP_STUB(iemOp_vpunpckhbw_Vx_Hx_Wx);
2103//FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2104//{
2105// IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2106// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2107//}
2108/* Opcode VEX.F3.0F 0x68 - invalid */
2109
2110
2111/* Opcode VEX.0F 0x69 - invalid */
2112
2113/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
2114FNIEMOP_STUB(iemOp_vpunpckhwd_Vx_Hx_Wx);
2115//FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2116//{
2117// IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2118// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2119//
2120//}
2121/* Opcode VEX.F3.0F 0x69 - invalid */
2122
2123
2124/* Opcode VEX.0F 0x6a - invalid */
2125
2126/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
2127FNIEMOP_STUB(iemOp_vpunpckhdq_Vx_Hx_W);
2128//FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2129//{
2130// IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2131// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2132//}
2133/* Opcode VEX.F3.0F 0x6a - invalid */
2134
2135
2136/* Opcode VEX.0F 0x6b - invalid */
2137/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
2138FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2139/* Opcode VEX.F3.0F 0x6b - invalid */
2140
2141
2142/* Opcode VEX.0F 0x6c - invalid */
2143
2144/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
2145FNIEMOP_STUB(iemOp_vpunpcklqdq_Vx_Hx_Wx);
2146//FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2147//{
2148// IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2149// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2150//}
2151
2152/* Opcode VEX.F3.0F 0x6c - invalid */
2153/* Opcode VEX.F2.0F 0x6c - invalid */
2154
2155
2156/* Opcode VEX.0F 0x6d - invalid */
2157
2158/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
2159FNIEMOP_STUB(iemOp_vpunpckhqdq_Vx_Hx_W);
2160//FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2161//{
2162// IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2163// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2164//}
2165
2166/* Opcode VEX.F3.0F 0x6d - invalid */
2167
2168
2169/* Opcode VEX.0F 0x6e - invalid */
2170
2171FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2172{
2173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2174 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2175 {
2176 /**
2177 * @opcode 0x6e
2178 * @opcodesub rex.w=1
2179 * @oppfx 0x66
2180 * @opcpuid avx
2181 * @opgroup og_avx_simdint_datamov
2182 * @opxcpttype 5
2183 * @optest 64-bit / op1=1 op2=2 -> op1=2
2184 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
2185 */
2186 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2188 {
2189 /* XMM, greg64 */
2190 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2191 IEM_MC_BEGIN(0, 1);
2192 IEM_MC_LOCAL(uint64_t, u64Tmp);
2193
2194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2195 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2196
2197 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2198 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2199
2200 IEM_MC_ADVANCE_RIP();
2201 IEM_MC_END();
2202 }
2203 else
2204 {
2205 /* XMM, [mem64] */
2206 IEM_MC_BEGIN(0, 2);
2207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2208 IEM_MC_LOCAL(uint64_t, u64Tmp);
2209
2210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2211 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2213 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2214
2215 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2216 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2217
2218 IEM_MC_ADVANCE_RIP();
2219 IEM_MC_END();
2220 }
2221 }
2222 else
2223 {
2224 /**
2225 * @opdone
2226 * @opcode 0x6e
2227 * @opcodesub rex.w=0
2228 * @oppfx 0x66
2229 * @opcpuid avx
2230 * @opgroup og_avx_simdint_datamov
2231 * @opxcpttype 5
2232 * @opfunction iemOp_vmovd_q_Vy_Ey
2233 * @optest op1=1 op2=2 -> op1=2
2234 * @optest op1=0 op2=-42 -> op1=-42
2235 */
2236 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2238 {
2239 /* XMM, greg32 */
2240 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2241 IEM_MC_BEGIN(0, 1);
2242 IEM_MC_LOCAL(uint32_t, u32Tmp);
2243
2244 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2245 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2246
2247 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2248 IEM_MC_STORE_YREG_U32_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2249
2250 IEM_MC_ADVANCE_RIP();
2251 IEM_MC_END();
2252 }
2253 else
2254 {
2255 /* XMM, [mem32] */
2256 IEM_MC_BEGIN(0, 2);
2257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2258 IEM_MC_LOCAL(uint32_t, u32Tmp);
2259
2260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2261 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2262 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2263 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2264
2265 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2266 IEM_MC_STORE_YREG_U32_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2267
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 }
2272 return VINF_SUCCESS;
2273}
2274
2275
2276/* Opcode VEX.F3.0F 0x6e - invalid */
2277
2278
2279/* Opcode VEX.0F 0x6f - invalid */
2280
2281/**
2282 * @opcode 0x6f
2283 * @oppfx 0x66
2284 * @opcpuid avx
2285 * @opgroup og_avx_simdint_datamove
2286 * @opxcpttype 1
2287 * @optest op1=1 op2=2 -> op1=2
2288 * @optest op1=0 op2=-42 -> op1=-42
2289 */
2290FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2291{
2292 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2293 Assert(pVCpu->iem.s.uVexLength <= 1);
2294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2295 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2296 {
2297 /*
2298 * Register, register.
2299 */
2300 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2301 IEM_MC_BEGIN(0, 0);
2302
2303 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2304 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2305 if (pVCpu->iem.s.uVexLength == 0)
2306 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2307 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2308 else
2309 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2310 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2311 IEM_MC_ADVANCE_RIP();
2312 IEM_MC_END();
2313 }
2314 else if (pVCpu->iem.s.uVexLength == 0)
2315 {
2316 /*
2317 * Register, memory128.
2318 */
2319 IEM_MC_BEGIN(0, 2);
2320 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2322
2323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2324 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2325 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2326 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2327
2328 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2329 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2330
2331 IEM_MC_ADVANCE_RIP();
2332 IEM_MC_END();
2333 }
2334 else
2335 {
2336 /*
2337 * Register, memory256.
2338 */
2339 IEM_MC_BEGIN(0, 2);
2340 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
2341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2342
2343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2344 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2345 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2346 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2347
2348 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2349 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u256Tmp);
2350
2351 IEM_MC_ADVANCE_RIP();
2352 IEM_MC_END();
2353 }
2354 return VINF_SUCCESS;
2355}
2356
2357/**
2358 * @opcode 0x6f
2359 * @oppfx 0xf3
2360 * @opcpuid avx
2361 * @opgroup og_avx_simdint_datamove
2362 * @opxcpttype 4UA
2363 * @optest op1=1 op2=2 -> op1=2
2364 * @optest op1=0 op2=-42 -> op1=-42
2365 */
2366FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2367{
2368 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2369 Assert(pVCpu->iem.s.uVexLength <= 1);
2370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2371 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2372 {
2373 /*
2374 * Register, register.
2375 */
2376 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2377 IEM_MC_BEGIN(0, 0);
2378
2379 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2380 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2381 if (pVCpu->iem.s.uVexLength == 0)
2382 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2383 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2384 else
2385 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2386 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2387 IEM_MC_ADVANCE_RIP();
2388 IEM_MC_END();
2389 }
2390 else if (pVCpu->iem.s.uVexLength == 0)
2391 {
2392 /*
2393 * Register, memory128.
2394 */
2395 IEM_MC_BEGIN(0, 2);
2396 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2398
2399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2400 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2402 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2403
2404 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2405 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2406
2407 IEM_MC_ADVANCE_RIP();
2408 IEM_MC_END();
2409 }
2410 else
2411 {
2412 /*
2413 * Register, memory256.
2414 */
2415 IEM_MC_BEGIN(0, 2);
2416 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
2417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2418
2419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2420 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2421 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2422 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2423
2424 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2425 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u256Tmp);
2426
2427 IEM_MC_ADVANCE_RIP();
2428 IEM_MC_END();
2429 }
2430 return VINF_SUCCESS;
2431}
2432
2433
2434/* Opcode VEX.0F 0x70 - invalid */
2435
2436/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
2437FNIEMOP_STUB(iemOp_vpshufd_Vx_Wx_Ib);
2438//FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2439//{
2440// IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2441// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2442// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2443// {
2444// /*
2445// * Register, register.
2446// */
2447// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2448// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2449//
2450// IEM_MC_BEGIN(3, 0);
2451// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2452// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2453// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2454// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2455// IEM_MC_PREPARE_SSE_USAGE();
2456// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2457// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2458// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2459// IEM_MC_ADVANCE_RIP();
2460// IEM_MC_END();
2461// }
2462// else
2463// {
2464// /*
2465// * Register, memory.
2466// */
2467// IEM_MC_BEGIN(3, 2);
2468// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2469// IEM_MC_LOCAL(RTUINT128U, uSrc);
2470// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2471// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2472//
2473// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2474// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2475// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2476// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2477// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2478//
2479// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2480// IEM_MC_PREPARE_SSE_USAGE();
2481// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2482// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2483//
2484// IEM_MC_ADVANCE_RIP();
2485// IEM_MC_END();
2486// }
2487// return VINF_SUCCESS;
2488//}
2489
2490/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
2491FNIEMOP_STUB(iemOp_vpshufhw_Vx_Wx_Ib);
2492//FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2493//{
2494// IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2495// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2496// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2497// {
2498// /*
2499// * Register, register.
2500// */
2501// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2502// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2503//
2504// IEM_MC_BEGIN(3, 0);
2505// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2506// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2507// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2508// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2509// IEM_MC_PREPARE_SSE_USAGE();
2510// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2511// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2512// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2513// IEM_MC_ADVANCE_RIP();
2514// IEM_MC_END();
2515// }
2516// else
2517// {
2518// /*
2519// * Register, memory.
2520// */
2521// IEM_MC_BEGIN(3, 2);
2522// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2523// IEM_MC_LOCAL(RTUINT128U, uSrc);
2524// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2525// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2526//
2527// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2528// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2529// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2530// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2531// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2532//
2533// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2534// IEM_MC_PREPARE_SSE_USAGE();
2535// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2536// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2537//
2538// IEM_MC_ADVANCE_RIP();
2539// IEM_MC_END();
2540// }
2541// return VINF_SUCCESS;
2542//}
2543
2544/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
2545FNIEMOP_STUB(iemOp_vpshuflw_Vx_Wx_Ib);
2546//FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2547//{
2548// IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2549// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2550// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2551// {
2552// /*
2553// * Register, register.
2554// */
2555// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2556// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2557//
2558// IEM_MC_BEGIN(3, 0);
2559// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2560// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2561// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2562// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2563// IEM_MC_PREPARE_SSE_USAGE();
2564// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2565// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2566// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2567// IEM_MC_ADVANCE_RIP();
2568// IEM_MC_END();
2569// }
2570// else
2571// {
2572// /*
2573// * Register, memory.
2574// */
2575// IEM_MC_BEGIN(3, 2);
2576// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2577// IEM_MC_LOCAL(RTUINT128U, uSrc);
2578// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2579// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2580//
2581// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2582// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2583// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2584// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2585// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2586//
2587// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2588// IEM_MC_PREPARE_SSE_USAGE();
2589// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2590// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2591//
2592// IEM_MC_ADVANCE_RIP();
2593// IEM_MC_END();
2594// }
2595// return VINF_SUCCESS;
2596//}
2597
2598
2599/* Opcode VEX.0F 0x71 11/2 - invalid. */
2600/** Opcode VEX.66.0F 0x71 11/2. */
2601FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2602
2603/* Opcode VEX.0F 0x71 11/4 - invalid */
2604/** Opcode VEX.66.0F 0x71 11/4. */
2605FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2606
2607/* Opcode VEX.0F 0x71 11/6 - invalid */
2608/** Opcode VEX.66.0F 0x71 11/6. */
2609FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2610
2611
2612/**
2613 * VEX Group 12 jump table for register variant.
2614 */
2615IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
2616{
2617 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2618 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2619 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2620 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2621 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2622 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2623 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2624 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2625};
2626AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
2627
2628
2629/** Opcode VEX.0F 0x71. */
2630FNIEMOP_DEF(iemOp_VGrp12)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 /* register, register */
2635 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2636 + pVCpu->iem.s.idxPrefix], bRm);
2637 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2638}
2639
2640
2641/* Opcode VEX.0F 0x72 11/2 - invalid. */
2642/** Opcode VEX.66.0F 0x72 11/2. */
2643FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
2644
2645/* Opcode VEX.0F 0x72 11/4 - invalid. */
2646/** Opcode VEX.66.0F 0x72 11/4. */
2647FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
2648
2649/* Opcode VEX.0F 0x72 11/6 - invalid. */
2650/** Opcode VEX.66.0F 0x72 11/6. */
2651FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
2652
2653
2654/**
2655 * Group 13 jump table for register variant.
2656 */
2657IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
2658{
2659 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2660 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2661 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2662 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2663 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2664 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2665 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2666 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2667};
2668AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
2669
2670/** Opcode VEX.0F 0x72. */
2671FNIEMOP_DEF(iemOp_VGrp13)
2672{
2673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2674 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2675 /* register, register */
2676 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2677 + pVCpu->iem.s.idxPrefix], bRm);
2678 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2679}
2680
2681
2682/* Opcode VEX.0F 0x73 11/2 - invalid. */
2683/** Opcode VEX.66.0F 0x73 11/2. */
2684FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
2685
2686/** Opcode VEX.66.0F 0x73 11/3. */
2687FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
2688
2689/* Opcode VEX.0F 0x73 11/6 - invalid. */
2690/** Opcode VEX.66.0F 0x73 11/6. */
2691FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
2692
2693/** Opcode VEX.66.0F 0x73 11/7. */
2694FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
2695
2696/**
2697 * Group 14 jump table for register variant.
2698 */
2699IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
2700{
2701 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2702 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2703 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2704 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2705 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2706 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2707 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2708 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2709};
2710AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
2711
2712
2713/** Opcode VEX.0F 0x73. */
2714FNIEMOP_DEF(iemOp_VGrp14)
2715{
2716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2718 /* register, register */
2719 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2720 + pVCpu->iem.s.idxPrefix], bRm);
2721 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2722}
2723
2724
2725/* Opcode VEX.0F 0x74 - invalid */
2726
2727/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
2728FNIEMOP_STUB(iemOp_vpcmpeqb_Vx_Hx_Wx);
2729//FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
2730//{
2731// IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
2732// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2733//}
2734
2735/* Opcode VEX.F3.0F 0x74 - invalid */
2736/* Opcode VEX.F2.0F 0x74 - invalid */
2737
2738
2739/* Opcode VEX.0F 0x75 - invalid */
2740
2741/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
2742FNIEMOP_STUB(iemOp_vpcmpeqw_Vx_Hx_Wx);
2743//FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
2744//{
2745// IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
2746// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2747//}
2748
2749/* Opcode VEX.F3.0F 0x75 - invalid */
2750/* Opcode VEX.F2.0F 0x75 - invalid */
2751
2752
2753/* Opcode VEX.0F 0x76 - invalid */
2754
2755/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
2756FNIEMOP_STUB(iemOp_vpcmpeqd_Vx_Hx_Wx);
2757//FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
2758//{
2759// IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
2760// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2761//}
2762
2763/* Opcode VEX.F3.0F 0x76 - invalid */
2764/* Opcode VEX.F2.0F 0x76 - invalid */
2765
2766
2767/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
2768FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
2769/* Opcode VEX.66.0F 0x77 - invalid */
2770/* Opcode VEX.F3.0F 0x77 - invalid */
2771/* Opcode VEX.F2.0F 0x77 - invalid */
2772
2773/* Opcode VEX.0F 0x78 - invalid */
2774/* Opcode VEX.66.0F 0x78 - invalid */
2775/* Opcode VEX.F3.0F 0x78 - invalid */
2776/* Opcode VEX.F2.0F 0x78 - invalid */
2777
2778/* Opcode VEX.0F 0x79 - invalid */
2779/* Opcode VEX.66.0F 0x79 - invalid */
2780/* Opcode VEX.F3.0F 0x79 - invalid */
2781/* Opcode VEX.F2.0F 0x79 - invalid */
2782
2783/* Opcode VEX.0F 0x7a - invalid */
2784/* Opcode VEX.66.0F 0x7a - invalid */
2785/* Opcode VEX.F3.0F 0x7a - invalid */
2786/* Opcode VEX.F2.0F 0x7a - invalid */
2787
2788/* Opcode VEX.0F 0x7b - invalid */
2789/* Opcode VEX.66.0F 0x7b - invalid */
2790/* Opcode VEX.F3.0F 0x7b - invalid */
2791/* Opcode VEX.F2.0F 0x7b - invalid */
2792
2793/* Opcode VEX.0F 0x7c - invalid */
2794/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
2795FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
2796/* Opcode VEX.F3.0F 0x7c - invalid */
2797/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
2798FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
2799
2800/* Opcode VEX.0F 0x7d - invalid */
2801/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
2802FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
2803/* Opcode VEX.F3.0F 0x7d - invalid */
2804/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
2805FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
2806
2807
2808/* Opcode VEX.0F 0x7e - invalid */
2809
2810FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
2811{
2812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2813 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2814 {
2815 /**
2816 * @opcode 0x7e
2817 * @opcodesub rex.w=1
2818 * @oppfx 0x66
2819 * @opcpuid avx
2820 * @opgroup og_avx_simdint_datamov
2821 * @opxcpttype 5
2822 * @optest 64-bit / op1=1 op2=2 -> op1=2
2823 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
2824 */
2825 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2827 {
2828 /* greg64, XMM */
2829 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2830 IEM_MC_BEGIN(0, 1);
2831 IEM_MC_LOCAL(uint64_t, u64Tmp);
2832
2833 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2834 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2835
2836 IEM_MC_FETCH_YREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2837 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
2838
2839 IEM_MC_ADVANCE_RIP();
2840 IEM_MC_END();
2841 }
2842 else
2843 {
2844 /* [mem64], XMM */
2845 IEM_MC_BEGIN(0, 2);
2846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2847 IEM_MC_LOCAL(uint64_t, u64Tmp);
2848
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2850 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2851 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2852 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2853
2854 IEM_MC_FETCH_YREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2855 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
2856
2857 IEM_MC_ADVANCE_RIP();
2858 IEM_MC_END();
2859 }
2860 }
2861 else
2862 {
2863 /**
2864 * @opdone
2865 * @opcode 0x7e
2866 * @opcodesub rex.w=0
2867 * @oppfx 0x66
2868 * @opcpuid avx
2869 * @opgroup og_avx_simdint_datamov
2870 * @opxcpttype 5
2871 * @opfunction iemOp_vmovd_q_Vy_Ey
2872 * @optest op1=1 op2=2 -> op1=2
2873 * @optest op1=0 op2=-42 -> op1=-42
2874 */
2875 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2876 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2877 {
2878 /* greg32, XMM */
2879 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2880 IEM_MC_BEGIN(0, 1);
2881 IEM_MC_LOCAL(uint32_t, u32Tmp);
2882
2883 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2884 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2885
2886 IEM_MC_FETCH_YREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2887 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
2888
2889 IEM_MC_ADVANCE_RIP();
2890 IEM_MC_END();
2891 }
2892 else
2893 {
2894 /* [mem32], XMM */
2895 IEM_MC_BEGIN(0, 2);
2896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2897 IEM_MC_LOCAL(uint32_t, u32Tmp);
2898
2899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2900 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2901 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2902 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2903
2904 IEM_MC_FETCH_YREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2905 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
2906
2907 IEM_MC_ADVANCE_RIP();
2908 IEM_MC_END();
2909 }
2910 }
2911 return VINF_SUCCESS;
2912}
2913
2914/**
2915 * @opcode 0x7e
2916 * @oppfx 0xf3
2917 * @opcpuid avx
2918 * @opgroup og_avx_pcksclr_datamove
2919 * @opxcpttype none
2920 * @optest op1=1 op2=2 -> op1=2
2921 * @optest op1=0 op2=-42 -> op1=-42
2922 */
2923FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
2924{
2925 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2927 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2928 {
2929 /*
2930 * Register, register.
2931 */
2932 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2933 IEM_MC_BEGIN(0, 0);
2934
2935 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2936 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2937
2938 IEM_MC_COPY_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2939 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2940 IEM_MC_ADVANCE_RIP();
2941 IEM_MC_END();
2942 }
2943 else
2944 {
2945 /*
2946 * Memory, register.
2947 */
2948 IEM_MC_BEGIN(0, 2);
2949 IEM_MC_LOCAL(uint64_t, uSrc);
2950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2951
2952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2953 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2954 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2955 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2956
2957 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2958 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2959
2960 IEM_MC_ADVANCE_RIP();
2961 IEM_MC_END();
2962 }
2963 return VINF_SUCCESS;
2964
2965}
2966/* Opcode VEX.F2.0F 0x7e - invalid */
2967
2968
2969/* Opcode VEX.0F 0x7f - invalid */
2970
2971/**
2972 * @opcode 0x7f
2973 * @oppfx 0x66
2974 * @opcpuid avx
2975 * @opgroup og_avx_simdint_datamove
2976 * @opxcpttype 1
2977 * @optest op1=1 op2=2 -> op1=2
2978 * @optest op1=0 op2=-42 -> op1=-42
2979 */
2980FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
2981{
2982 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2983 Assert(pVCpu->iem.s.uVexLength <= 1);
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2986 {
2987 /*
2988 * Register, register.
2989 */
2990 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2991 IEM_MC_BEGIN(0, 0);
2992
2993 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2994 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2995 if (pVCpu->iem.s.uVexLength == 0)
2996 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2997 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2998 else
2999 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3000 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3001 IEM_MC_ADVANCE_RIP();
3002 IEM_MC_END();
3003 }
3004 else if (pVCpu->iem.s.uVexLength == 0)
3005 {
3006 /*
3007 * Register, memory128.
3008 */
3009 IEM_MC_BEGIN(0, 2);
3010 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3012
3013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3014 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3015 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3016 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3017
3018 IEM_MC_FETCH_YREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3019 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3020
3021 IEM_MC_ADVANCE_RIP();
3022 IEM_MC_END();
3023 }
3024 else
3025 {
3026 /*
3027 * Register, memory256.
3028 */
3029 IEM_MC_BEGIN(0, 2);
3030 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3032
3033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3034 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3035 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3036 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3037
3038 IEM_MC_FETCH_YREG_U256(u256Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3039 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3040
3041 IEM_MC_ADVANCE_RIP();
3042 IEM_MC_END();
3043 }
3044 return VINF_SUCCESS;
3045}
3046
3047/**
3048 * @opcode 0x7f
3049 * @oppfx 0xf3
3050 * @opcpuid avx
3051 * @opgroup og_avx_simdint_datamove
3052 * @opxcpttype 4UA
3053 * @optest op1=1 op2=2 -> op1=2
3054 * @optest op1=0 op2=-42 -> op1=-42
3055 */
3056FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3057{
3058 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3059 Assert(pVCpu->iem.s.uVexLength <= 1);
3060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3062 {
3063 /*
3064 * Register, register.
3065 */
3066 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3067 IEM_MC_BEGIN(0, 0);
3068
3069 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3070 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3071 if (pVCpu->iem.s.uVexLength == 0)
3072 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3073 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3074 else
3075 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3076 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3077 IEM_MC_ADVANCE_RIP();
3078 IEM_MC_END();
3079 }
3080 else if (pVCpu->iem.s.uVexLength == 0)
3081 {
3082 /*
3083 * Register, memory128.
3084 */
3085 IEM_MC_BEGIN(0, 2);
3086 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3088
3089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3090 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3091 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3092 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3093
3094 IEM_MC_FETCH_YREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3095 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3096
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 else
3101 {
3102 /*
3103 * Register, memory256.
3104 */
3105 IEM_MC_BEGIN(0, 2);
3106 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3108
3109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3110 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3111 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3112 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3113
3114 IEM_MC_FETCH_YREG_U256(u256Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3115 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3116
3117 IEM_MC_ADVANCE_RIP();
3118 IEM_MC_END();
3119 }
3120 return VINF_SUCCESS;
3121}
3122
3123/* Opcode VEX.F2.0F 0x7f - invalid */
3124
3125
3126/* Opcode VEX.0F 0x80 - invalid */
3127/* Opcode VEX.0F 0x81 - invalid */
3128/* Opcode VEX.0F 0x82 - invalid */
3129/* Opcode VEX.0F 0x83 - invalid */
3130/* Opcode VEX.0F 0x84 - invalid */
3131/* Opcode VEX.0F 0x85 - invalid */
3132/* Opcode VEX.0F 0x86 - invalid */
3133/* Opcode VEX.0F 0x87 - invalid */
3134/* Opcode VEX.0F 0x88 - invalid */
3135/* Opcode VEX.0F 0x89 - invalid */
3136/* Opcode VEX.0F 0x8a - invalid */
3137/* Opcode VEX.0F 0x8b - invalid */
3138/* Opcode VEX.0F 0x8c - invalid */
3139/* Opcode VEX.0F 0x8d - invalid */
3140/* Opcode VEX.0F 0x8e - invalid */
3141/* Opcode VEX.0F 0x8f - invalid */
3142/* Opcode VEX.0F 0x90 - invalid */
3143/* Opcode VEX.0F 0x91 - invalid */
3144/* Opcode VEX.0F 0x92 - invalid */
3145/* Opcode VEX.0F 0x93 - invalid */
3146/* Opcode VEX.0F 0x94 - invalid */
3147/* Opcode VEX.0F 0x95 - invalid */
3148/* Opcode VEX.0F 0x96 - invalid */
3149/* Opcode VEX.0F 0x97 - invalid */
3150/* Opcode VEX.0F 0x98 - invalid */
3151/* Opcode VEX.0F 0x99 - invalid */
3152/* Opcode VEX.0F 0x9a - invalid */
3153/* Opcode VEX.0F 0x9b - invalid */
3154/* Opcode VEX.0F 0x9c - invalid */
3155/* Opcode VEX.0F 0x9d - invalid */
3156/* Opcode VEX.0F 0x9e - invalid */
3157/* Opcode VEX.0F 0x9f - invalid */
3158/* Opcode VEX.0F 0xa0 - invalid */
3159/* Opcode VEX.0F 0xa1 - invalid */
3160/* Opcode VEX.0F 0xa2 - invalid */
3161/* Opcode VEX.0F 0xa3 - invalid */
3162/* Opcode VEX.0F 0xa4 - invalid */
3163/* Opcode VEX.0F 0xa5 - invalid */
3164/* Opcode VEX.0F 0xa6 - invalid */
3165/* Opcode VEX.0F 0xa7 - invalid */
3166/* Opcode VEX.0F 0xa8 - invalid */
3167/* Opcode VEX.0F 0xa9 - invalid */
3168/* Opcode VEX.0F 0xaa - invalid */
3169/* Opcode VEX.0F 0xab - invalid */
3170/* Opcode VEX.0F 0xac - invalid */
3171/* Opcode VEX.0F 0xad - invalid */
3172
3173
3174/* Opcode VEX.0F 0xae mem/0 - invalid. */
3175/* Opcode VEX.0F 0xae mem/1 - invalid. */
3176
3177/**
3178 * @ opmaps grp15
3179 * @ opcode !11/2
3180 * @ oppfx none
3181 * @ opcpuid sse
3182 * @ opgroup og_sse_mxcsrsm
3183 * @ opxcpttype 5
3184 * @ optest op1=0 -> mxcsr=0
3185 * @ optest op1=0x2083 -> mxcsr=0x2083
3186 * @ optest op1=0xfffffffe -> value.xcpt=0xd
3187 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
3188 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
3189 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
3190 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
3191 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
3192 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3193 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3194 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3195 */
3196FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
3197//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
3198//{
3199// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3200// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
3201// return IEMOP_RAISE_INVALID_OPCODE();
3202//
3203// IEM_MC_BEGIN(2, 0);
3204// IEM_MC_ARG(uint8_t, iEffSeg, 0);
3205// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3206// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3207// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3208// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3209// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3210// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
3211// IEM_MC_END();
3212// return VINF_SUCCESS;
3213//}
3214
3215
3216/**
3217 * @opmaps vexgrp15
3218 * @opcode !11/3
3219 * @oppfx none
3220 * @opcpuid avx
3221 * @opgroup og_avx_mxcsrsm
3222 * @opxcpttype 5
3223 * @optest mxcsr=0 -> op1=0
3224 * @optest mxcsr=0x2083 -> op1=0x2083
3225 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
3226 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
3227 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
3228 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
3229 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
3230 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
3231 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
3232 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
3233 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
3234 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
3235 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3236 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
3237 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3238 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
3239 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3240 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
3241 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
3242 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
3243 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
3244 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
3245 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
3246 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
3247 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
3248 * -> value.xcpt=0x6
3249 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
3250 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
3251 * APMv4 rev 3.17 page 509.
3252 * @todo Test this instruction on AMD Ryzen.
3253 */
3254FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
3255{
3256 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3257 IEM_MC_BEGIN(2, 0);
3258 IEM_MC_ARG(uint8_t, iEffSeg, 0);
3259 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3261 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3262 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3263 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3264 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
3265 IEM_MC_END();
3266 return VINF_SUCCESS;
3267}
3268
3269/* Opcode VEX.0F 0xae mem/4 - invalid. */
3270/* Opcode VEX.0F 0xae mem/5 - invalid. */
3271/* Opcode VEX.0F 0xae mem/6 - invalid. */
3272/* Opcode VEX.0F 0xae mem/7 - invalid. */
3273
3274/* Opcode VEX.0F 0xae 11b/0 - invalid. */
3275/* Opcode VEX.0F 0xae 11b/1 - invalid. */
3276/* Opcode VEX.0F 0xae 11b/2 - invalid. */
3277/* Opcode VEX.0F 0xae 11b/3 - invalid. */
3278/* Opcode VEX.0F 0xae 11b/4 - invalid. */
3279/* Opcode VEX.0F 0xae 11b/5 - invalid. */
3280/* Opcode VEX.0F 0xae 11b/6 - invalid. */
3281/* Opcode VEX.0F 0xae 11b/7 - invalid. */
3282
3283/**
3284 * Vex group 15 jump table for memory variant.
3285 */
3286IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
3287{ /* pfx: none, 066h, 0f3h, 0f2h */
3288 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3289 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3290 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3291 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3292 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3293 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3294 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3295 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3296};
3297AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
3298
3299
3300/** Opcode vex. 0xae. */
3301FNIEMOP_DEF(iemOp_VGrp15)
3302{
3303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3305 /* register, register */
3306 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
3307
3308 /* memory, register */
3309 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3310 + pVCpu->iem.s.idxPrefix], bRm);
3311}
3312
3313
3314/* Opcode VEX.0F 0xaf - invalid. */
3315
3316/* Opcode VEX.0F 0xb0 - invalid. */
3317/* Opcode VEX.0F 0xb1 - invalid. */
3318/* Opcode VEX.0F 0xb2 - invalid. */
3319/* Opcode VEX.0F 0xb2 - invalid. */
3320/* Opcode VEX.0F 0xb3 - invalid. */
3321/* Opcode VEX.0F 0xb4 - invalid. */
3322/* Opcode VEX.0F 0xb5 - invalid. */
3323/* Opcode VEX.0F 0xb6 - invalid. */
3324/* Opcode VEX.0F 0xb7 - invalid. */
3325/* Opcode VEX.0F 0xb8 - invalid. */
3326/* Opcode VEX.0F 0xb9 - invalid. */
3327/* Opcode VEX.0F 0xba - invalid. */
3328/* Opcode VEX.0F 0xbb - invalid. */
3329/* Opcode VEX.0F 0xbc - invalid. */
3330/* Opcode VEX.0F 0xbd - invalid. */
3331/* Opcode VEX.0F 0xbe - invalid. */
3332/* Opcode VEX.0F 0xbf - invalid. */
3333
3334/* Opcode VEX.0F 0xc0 - invalid. */
3335/* Opcode VEX.66.0F 0xc0 - invalid. */
3336/* Opcode VEX.F3.0F 0xc0 - invalid. */
3337/* Opcode VEX.F2.0F 0xc0 - invalid. */
3338
3339/* Opcode VEX.0F 0xc1 - invalid. */
3340/* Opcode VEX.66.0F 0xc1 - invalid. */
3341/* Opcode VEX.F3.0F 0xc1 - invalid. */
3342/* Opcode VEX.F2.0F 0xc1 - invalid. */
3343
3344/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
3345FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
3346/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
3347FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
3348/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
3349FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
3350/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
3351FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
3352
3353/* Opcode VEX.0F 0xc3 - invalid */
3354/* Opcode VEX.66.0F 0xc3 - invalid */
3355/* Opcode VEX.F3.0F 0xc3 - invalid */
3356/* Opcode VEX.F2.0F 0xc3 - invalid */
3357
3358/* Opcode VEX.0F 0xc4 - invalid */
3359/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
3360FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
3361/* Opcode VEX.F3.0F 0xc4 - invalid */
3362/* Opcode VEX.F2.0F 0xc4 - invalid */
3363
3364/* Opcode VEX.0F 0xc5 - invlid */
3365/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
3366FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
3367/* Opcode VEX.F3.0F 0xc5 - invalid */
3368/* Opcode VEX.F2.0F 0xc5 - invalid */
3369
3370/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
3371FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
3372/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
3373FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
3374/* Opcode VEX.F3.0F 0xc6 - invalid */
3375/* Opcode VEX.F2.0F 0xc6 - invalid */
3376
3377/* Opcode VEX.0F 0xc7 - invalid */
3378/* Opcode VEX.66.0F 0xc7 - invalid */
3379/* Opcode VEX.F3.0F 0xc7 - invalid */
3380/* Opcode VEX.F2.0F 0xc7 - invalid */
3381
3382/* Opcode VEX.0F 0xc8 - invalid */
3383/* Opcode VEX.0F 0xc9 - invalid */
3384/* Opcode VEX.0F 0xca - invalid */
3385/* Opcode VEX.0F 0xcb - invalid */
3386/* Opcode VEX.0F 0xcc - invalid */
3387/* Opcode VEX.0F 0xcd - invalid */
3388/* Opcode VEX.0F 0xce - invalid */
3389/* Opcode VEX.0F 0xcf - invalid */
3390
3391
3392/* Opcode VEX.0F 0xd0 - invalid */
3393/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
3394FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
3395/* Opcode VEX.F3.0F 0xd0 - invalid */
3396/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
3397FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
3398
3399/* Opcode VEX.0F 0xd1 - invalid */
3400/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
3401FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
3402/* Opcode VEX.F3.0F 0xd1 - invalid */
3403/* Opcode VEX.F2.0F 0xd1 - invalid */
3404
3405/* Opcode VEX.0F 0xd2 - invalid */
3406/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
3407FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
3408/* Opcode VEX.F3.0F 0xd2 - invalid */
3409/* Opcode VEX.F2.0F 0xd2 - invalid */
3410
3411/* Opcode VEX.0F 0xd3 - invalid */
3412/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
3413FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
3414/* Opcode VEX.F3.0F 0xd3 - invalid */
3415/* Opcode VEX.F2.0F 0xd3 - invalid */
3416
3417/* Opcode VEX.0F 0xd4 - invalid */
3418/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
3419FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
3420/* Opcode VEX.F3.0F 0xd4 - invalid */
3421/* Opcode VEX.F2.0F 0xd4 - invalid */
3422
3423/* Opcode VEX.0F 0xd5 - invalid */
3424/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
3425FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
3426/* Opcode VEX.F3.0F 0xd5 - invalid */
3427/* Opcode VEX.F2.0F 0xd5 - invalid */
3428
3429/* Opcode VEX.0F 0xd6 - invalid */
3430
3431/**
3432 * @opcode 0xd6
3433 * @oppfx 0x66
3434 * @opcpuid avx
3435 * @opgroup og_avx_pcksclr_datamove
3436 * @opxcpttype none
3437 * @optest op1=-1 op2=2 -> op1=2
3438 * @optest op1=0 op2=-42 -> op1=-42
3439 */
3440FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
3441{
3442 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3444 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3445 {
3446 /*
3447 * Register, register.
3448 */
3449 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3450 IEM_MC_BEGIN(0, 0);
3451
3452 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3453 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3454
3455 IEM_MC_COPY_YREG_U64_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3456 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3457 IEM_MC_ADVANCE_RIP();
3458 IEM_MC_END();
3459 }
3460 else
3461 {
3462 /*
3463 * Memory, register.
3464 */
3465 IEM_MC_BEGIN(0, 2);
3466 IEM_MC_LOCAL(uint64_t, uSrc);
3467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3468
3469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3470 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3471 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3472 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3473
3474 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3475 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3476
3477 IEM_MC_ADVANCE_RIP();
3478 IEM_MC_END();
3479 }
3480 return VINF_SUCCESS;
3481}
3482
3483/* Opcode VEX.F3.0F 0xd6 - invalid */
3484/* Opcode VEX.F2.0F 0xd6 - invalid */
3485
3486
3487/* Opcode VEX.0F 0xd7 - invalid */
3488
3489/** Opcode VEX.66.0F 0xd7 - */
3490FNIEMOP_STUB(iemOp_vpmovmskb_Gd_Ux);
3491//FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
3492//{
3493// /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
3494// /** @todo testcase: Check that the instruction implicitly clears the high
3495// * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
3496// * and opcode modifications are made to work with the whole width (not
3497// * just 128). */
3498// IEMOP_MNEMONIC(vpmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
3499// /* Docs says register only. */
3500// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3501// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
3502// {
3503// IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
3504// IEM_MC_BEGIN(2, 0);
3505// IEM_MC_ARG(uint64_t *, pDst, 0);
3506// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3507// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3508// IEM_MC_PREPARE_SSE_USAGE();
3509// IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3510// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3511// IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
3512// IEM_MC_ADVANCE_RIP();
3513// IEM_MC_END();
3514// return VINF_SUCCESS;
3515// }
3516// return IEMOP_RAISE_INVALID_OPCODE();
3517//}
3518
3519/* Opcode VEX.F3.0F 0xd7 - invalid */
3520/* Opcode VEX.F2.0F 0xd7 - invalid */
3521
3522
3523/* Opcode VEX.0F 0xd8 - invalid */
3524/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
3525FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
3526/* Opcode VEX.F3.0F 0xd8 - invalid */
3527/* Opcode VEX.F2.0F 0xd8 - invalid */
3528
3529/* Opcode VEX.0F 0xd9 - invalid */
3530/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
3531FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
3532/* Opcode VEX.F3.0F 0xd9 - invalid */
3533/* Opcode VEX.F2.0F 0xd9 - invalid */
3534
3535/* Opcode VEX.0F 0xda - invalid */
3536/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
3537FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
3538/* Opcode VEX.F3.0F 0xda - invalid */
3539/* Opcode VEX.F2.0F 0xda - invalid */
3540
3541/* Opcode VEX.0F 0xdb - invalid */
3542/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, W */
3543FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
3544/* Opcode VEX.F3.0F 0xdb - invalid */
3545/* Opcode VEX.F2.0F 0xdb - invalid */
3546
3547/* Opcode VEX.0F 0xdc - invalid */
3548/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
3549FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
3550/* Opcode VEX.F3.0F 0xdc - invalid */
3551/* Opcode VEX.F2.0F 0xdc - invalid */
3552
3553/* Opcode VEX.0F 0xdd - invalid */
3554/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
3555FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
3556/* Opcode VEX.F3.0F 0xdd - invalid */
3557/* Opcode VEX.F2.0F 0xdd - invalid */
3558
3559/* Opcode VEX.0F 0xde - invalid */
3560/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, W */
3561FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
3562/* Opcode VEX.F3.0F 0xde - invalid */
3563/* Opcode VEX.F2.0F 0xde - invalid */
3564
3565/* Opcode VEX.0F 0xdf - invalid */
3566/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
3567FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
3568/* Opcode VEX.F3.0F 0xdf - invalid */
3569/* Opcode VEX.F2.0F 0xdf - invalid */
3570
3571/* Opcode VEX.0F 0xe0 - invalid */
3572/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
3573FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
3574/* Opcode VEX.F3.0F 0xe0 - invalid */
3575/* Opcode VEX.F2.0F 0xe0 - invalid */
3576
3577/* Opcode VEX.0F 0xe1 - invalid */
3578/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
3579FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
3580/* Opcode VEX.F3.0F 0xe1 - invalid */
3581/* Opcode VEX.F2.0F 0xe1 - invalid */
3582
3583/* Opcode VEX.0F 0xe2 - invalid */
3584/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
3585FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
3586/* Opcode VEX.F3.0F 0xe2 - invalid */
3587/* Opcode VEX.F2.0F 0xe2 - invalid */
3588
3589/* Opcode VEX.0F 0xe3 - invalid */
3590/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
3591FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
3592/* Opcode VEX.F3.0F 0xe3 - invalid */
3593/* Opcode VEX.F2.0F 0xe3 - invalid */
3594
3595/* Opcode VEX.0F 0xe4 - invalid */
3596/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, W */
3597FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
3598/* Opcode VEX.F3.0F 0xe4 - invalid */
3599/* Opcode VEX.F2.0F 0xe4 - invalid */
3600
3601/* Opcode VEX.0F 0xe5 - invalid */
3602/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
3603FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
3604/* Opcode VEX.F3.0F 0xe5 - invalid */
3605/* Opcode VEX.F2.0F 0xe5 - invalid */
3606
3607/* Opcode VEX.0F 0xe6 - invalid */
3608/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
3609FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
3610/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
3611FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
3612/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
3613FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
3614
3615
3616/* Opcode VEX.0F 0xe7 - invalid */
3617
3618/**
3619 * @opcode 0xe7
3620 * @opcodesub !11 mr/reg
3621 * @oppfx 0x66
3622 * @opcpuid avx
3623 * @opgroup og_avx_cachect
3624 * @opxcpttype 1
3625 * @optest op1=-1 op2=2 -> op1=2
3626 * @optest op1=0 op2=-42 -> op1=-42
3627 */
3628FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
3629{
3630 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3631 Assert(pVCpu->iem.s.uVexLength <= 1);
3632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3633 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3634 {
3635 if (pVCpu->iem.s.uVexLength == 0)
3636 {
3637 /*
3638 * 128-bit: Memory, register.
3639 */
3640 IEM_MC_BEGIN(0, 2);
3641 IEM_MC_LOCAL(RTUINT128U, uSrc);
3642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3643
3644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3645 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3646 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3647 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3648
3649 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3650 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3651
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 }
3655 else
3656 {
3657 /*
3658 * 256-bit: Memory, register.
3659 */
3660 IEM_MC_BEGIN(0, 2);
3661 IEM_MC_LOCAL(RTUINT256U, uSrc);
3662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3663
3664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3665 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3666 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3667 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3668
3669 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3670 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3671
3672 IEM_MC_ADVANCE_RIP();
3673 IEM_MC_END();
3674 }
3675 return VINF_SUCCESS;
3676 }
3677 /**
3678 * @opdone
3679 * @opmnemonic udvex660fe7reg
3680 * @opcode 0xe7
3681 * @opcodesub 11 mr/reg
3682 * @oppfx 0x66
3683 * @opunused immediate
3684 * @opcpuid avx
3685 * @optest ->
3686 */
3687 return IEMOP_RAISE_INVALID_OPCODE();
3688}
3689
3690/* Opcode VEX.F3.0F 0xe7 - invalid */
3691/* Opcode VEX.F2.0F 0xe7 - invalid */
3692
3693
3694/* Opcode VEX.0F 0xe8 - invalid */
3695/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
3696FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
3697/* Opcode VEX.F3.0F 0xe8 - invalid */
3698/* Opcode VEX.F2.0F 0xe8 - invalid */
3699
3700/* Opcode VEX.0F 0xe9 - invalid */
3701/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
3702FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
3703/* Opcode VEX.F3.0F 0xe9 - invalid */
3704/* Opcode VEX.F2.0F 0xe9 - invalid */
3705
3706/* Opcode VEX.0F 0xea - invalid */
3707/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
3708FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
3709/* Opcode VEX.F3.0F 0xea - invalid */
3710/* Opcode VEX.F2.0F 0xea - invalid */
3711
3712/* Opcode VEX.0F 0xeb - invalid */
3713/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, W */
3714FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
3715/* Opcode VEX.F3.0F 0xeb - invalid */
3716/* Opcode VEX.F2.0F 0xeb - invalid */
3717
3718/* Opcode VEX.0F 0xec - invalid */
3719/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
3720FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
3721/* Opcode VEX.F3.0F 0xec - invalid */
3722/* Opcode VEX.F2.0F 0xec - invalid */
3723
3724/* Opcode VEX.0F 0xed - invalid */
3725/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
3726FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
3727/* Opcode VEX.F3.0F 0xed - invalid */
3728/* Opcode VEX.F2.0F 0xed - invalid */
3729
3730/* Opcode VEX.0F 0xee - invalid */
3731/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, W */
3732FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
3733/* Opcode VEX.F3.0F 0xee - invalid */
3734/* Opcode VEX.F2.0F 0xee - invalid */
3735
3736
3737/* Opcode VEX.0F 0xef - invalid */
3738
3739/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
3740FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
3741{
3742 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3743 return FNIEMOP_CALL_1(iemOpCommonAvx2_Vx_Hx_Wx,
3744 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3745}
3746
3747/* Opcode VEX.F3.0F 0xef - invalid */
3748/* Opcode VEX.F2.0F 0xef - invalid */
3749
3750/* Opcode VEX.0F 0xf0 - invalid */
3751/* Opcode VEX.66.0F 0xf0 - invalid */
3752/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
3753FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
3754
3755/* Opcode VEX.0F 0xf1 - invalid */
3756/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
3757FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
3758/* Opcode VEX.F2.0F 0xf1 - invalid */
3759
3760/* Opcode VEX.0F 0xf2 - invalid */
3761/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
3762FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
3763/* Opcode VEX.F2.0F 0xf2 - invalid */
3764
3765/* Opcode VEX.0F 0xf3 - invalid */
3766/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
3767FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
3768/* Opcode VEX.F2.0F 0xf3 - invalid */
3769
3770/* Opcode VEX.0F 0xf4 - invalid */
3771/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
3772FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
3773/* Opcode VEX.F2.0F 0xf4 - invalid */
3774
3775/* Opcode VEX.0F 0xf5 - invalid */
3776/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
3777FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
3778/* Opcode VEX.F2.0F 0xf5 - invalid */
3779
3780/* Opcode VEX.0F 0xf6 - invalid */
3781/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
3782FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
3783/* Opcode VEX.F2.0F 0xf6 - invalid */
3784
3785/* Opcode VEX.0F 0xf7 - invalid */
3786/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
3787FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
3788/* Opcode VEX.F2.0F 0xf7 - invalid */
3789
3790/* Opcode VEX.0F 0xf8 - invalid */
3791/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
3792FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
3793/* Opcode VEX.F2.0F 0xf8 - invalid */
3794
3795/* Opcode VEX.0F 0xf9 - invalid */
3796/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
3797FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
3798/* Opcode VEX.F2.0F 0xf9 - invalid */
3799
3800/* Opcode VEX.0F 0xfa - invalid */
3801/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
3802FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
3803/* Opcode VEX.F2.0F 0xfa - invalid */
3804
3805/* Opcode VEX.0F 0xfb - invalid */
3806/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
3807FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
3808/* Opcode VEX.F2.0F 0xfb - invalid */
3809
3810/* Opcode VEX.0F 0xfc - invalid */
3811/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
3812FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
3813/* Opcode VEX.F2.0F 0xfc - invalid */
3814
3815/* Opcode VEX.0F 0xfd - invalid */
3816/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
3817FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
3818/* Opcode VEX.F2.0F 0xfd - invalid */
3819
3820/* Opcode VEX.0F 0xfe - invalid */
3821/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
3822FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
3823/* Opcode VEX.F2.0F 0xfe - invalid */
3824
3825
3826/** Opcode **** 0x0f 0xff - UD0 */
3827FNIEMOP_DEF(iemOp_vud0)
3828{
3829 IEMOP_MNEMONIC(vud0, "vud0");
3830 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
3831 {
3832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
3833#ifndef TST_IEM_CHECK_MC
3834 RTGCPTR GCPtrEff;
3835 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
3836 if (rcStrict != VINF_SUCCESS)
3837 return rcStrict;
3838#endif
3839 IEMOP_HLP_DONE_DECODING();
3840 }
3841 return IEMOP_RAISE_INVALID_OPCODE();
3842}
3843
3844
3845
3846/**
3847 * VEX opcode map \#1.
3848 *
3849 * @sa g_apfnTwoByteMap
3850 */
3851IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
3852{
3853 /* no prefix, 066h prefix f3h prefix, f2h prefix */
3854 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
3855 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
3856 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
3857 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
3858 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
3859 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
3860 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
3861 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
3862 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
3863 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
3864 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
3865 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
3866 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
3867 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
3868 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
3869 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
3870
3871 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
3872 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
3873 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
3874 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3875 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3876 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3877 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
3878 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3879 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
3880 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
3881 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
3882 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
3883 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
3884 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
3885 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
3886 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
3887
3888 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
3889 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
3890 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
3891 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
3892 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
3893 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
3894 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
3895 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
3896 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3897 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3898 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
3899 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3900 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
3901 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
3902 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3903 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3904
3905 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
3906 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
3907 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
3908 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
3909 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
3910 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
3911 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
3912 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
3913 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3914 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3915 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3916 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3917 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3918 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3919 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3920 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3921
3922 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
3923 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
3924 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
3925 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
3926 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
3927 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
3928 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
3929 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
3930 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
3931 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
3932 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
3933 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
3934 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
3935 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
3936 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
3937 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
3938
3939 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3940 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
3941 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
3942 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
3943 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3944 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3945 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3946 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3947 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
3948 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
3949 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
3950 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
3951 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
3952 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
3953 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
3954 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
3955
3956 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3957 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3958 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3959 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3960 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3961 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3962 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3963 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3964 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3965 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3966 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3967 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3968 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3969 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3970 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3971 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
3972
3973 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
3974 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3975 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3976 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3977 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3978 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3979 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3980 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3981 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
3982 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
3983 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
3984 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
3985 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
3986 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
3987 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
3988 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
3989
3990 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
3991 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
3992 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
3993 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
3994 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
3995 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
3996 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
3997 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
3998 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
3999 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
4000 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
4001 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
4002 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
4003 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
4004 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
4005 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
4006
4007 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
4008 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
4009 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
4010 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
4011 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
4012 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
4013 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
4014 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
4015 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
4016 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
4017 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
4018 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
4019 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
4020 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
4021 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
4022 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
4023
4024 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4025 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4026 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4027 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4028 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4029 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4030 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4031 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4032 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4033 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4034 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
4035 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
4036 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
4037 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
4038 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
4039 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
4040
4041 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4042 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4043 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4044 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4045 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4046 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4047 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4048 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4049 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4050 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4051 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
4052 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
4053 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
4054 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
4055 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
4056 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
4057
4058 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4059 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4060 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
4061 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4062 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4063 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4064 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
4065 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4066 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4067 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4068 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
4069 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
4070 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
4071 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
4072 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
4073 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
4074
4075 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
4076 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4077 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4078 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4079 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4080 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4081 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4082 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4083 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4084 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4085 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4086 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4087 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4088 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4089 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4090 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4091
4092 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4093 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4094 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4095 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4096 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4097 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4098 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
4099 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4100 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4101 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4102 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4103 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4104 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4105 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4106 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4107 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4108
4109 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
4110 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4111 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4112 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4113 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4114 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4115 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4116 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4117 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4118 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4119 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4120 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4121 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4122 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4123 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4124 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
4125};
4126AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
4127/** @} */
4128
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette