VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 103182

最後變更 在這個檔案從103182是 103182,由 vboxsync 提交於 14 月 前

VMM/IEM: Implement vpsll[wdq] 'reg/mem' instruction dispatch & emulation
VMM/IEM: Implement vpsll[wdq] 'imm8' instruction decode, dispatch & emulation

Note: some known test failures in IEM (unexpected #AC for 128/256 'mem' instructions)

VMM/IEM: instruction extractor: add VMX_VMI* tokens for VEX_VM inst w/imm byte

ValidationKit/bootsectors: Improve vpsra[wd], vpsrl[wdq] test tables; emulations not implemented yet
ValidationKit/bootsectors: Remove mistaken g_aXcptConfig4psll[] from bs3-cpu-instr-3.c32

bugref:9898

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 216.7 KB
 
1/* $Id: IEMAllInstVexMap1.cpp.h 103182 2024-02-03 15:44:12Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.alldomusa.eu.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_LOCAL(RTUINT256U, uDst);
56 IEM_MC_LOCAL(RTUINT256U, uSrc1);
57 IEM_MC_LOCAL(RTUINT256U, uSrc2);
58 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
59 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
61 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
62 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
63 IEM_MC_PREPARE_AVX_USAGE();
64 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
68 IEM_MC_ADVANCE_RIP_AND_FINISH();
69 IEM_MC_END();
70 }
71 else
72 {
73 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
75 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
76 IEM_MC_ARG(PRTUINT128U, puDst, 1);
77 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
78 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
79 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
82 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
84 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
85 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_ADVANCE_RIP_AND_FINISH();
87 IEM_MC_END();
88 }
89 }
90 else
91 {
92 /*
93 * Register, memory.
94 */
95 if (pVCpu->iem.s.uVexLength)
96 {
97 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
98 IEM_MC_LOCAL(RTUINT256U, uDst);
99 IEM_MC_LOCAL(RTUINT256U, uSrc1);
100 IEM_MC_LOCAL(RTUINT256U, uSrc2);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
103 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
104 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
105 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
106
107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
108 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
109 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
110 IEM_MC_PREPARE_AVX_USAGE();
111
112 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
113 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
114 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
115 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
116
117 IEM_MC_ADVANCE_RIP_AND_FINISH();
118 IEM_MC_END();
119 }
120 else
121 {
122 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
123 IEM_MC_LOCAL(RTUINT128U, uSrc2);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
126 IEM_MC_ARG(PRTUINT128U, puDst, 1);
127 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
128 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
129
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
136 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
137 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
140
141 IEM_MC_ADVANCE_RIP_AND_FINISH();
142 IEM_MC_END();
143 }
144 }
145}
146
147
148/**
149 * Common worker for AVX2 instructions on the forms:
150 * - vpxxx xmm0, xmm1, xmm2/mem128
151 * - vpxxx ymm0, ymm1, ymm2/mem256
152 *
153 * Takes function table for function w/o implicit state parameter.
154 *
155 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
156 */
157FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
158{
159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
160 if (IEM_IS_MODRM_REG_MODE(bRm))
161 {
162 /*
163 * Register, register.
164 */
165 if (pVCpu->iem.s.uVexLength)
166 {
167 IEM_MC_BEGIN(3, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
169 IEM_MC_LOCAL(RTUINT256U, uDst);
170 IEM_MC_LOCAL(RTUINT256U, uSrc1);
171 IEM_MC_LOCAL(RTUINT256U, uSrc2);
172 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
173 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
174 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
176 IEM_MC_PREPARE_AVX_USAGE();
177 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
178 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
180 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_ADVANCE_RIP_AND_FINISH();
182 IEM_MC_END();
183 }
184 else
185 {
186 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
187 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
188 IEM_MC_ARG(PRTUINT128U, puDst, 0);
189 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
190 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
192 IEM_MC_PREPARE_AVX_USAGE();
193 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
194 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
195 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
197 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
198 IEM_MC_ADVANCE_RIP_AND_FINISH();
199 IEM_MC_END();
200 }
201 }
202 else
203 {
204 /*
205 * Register, memory.
206 */
207 if (pVCpu->iem.s.uVexLength)
208 {
209 IEM_MC_BEGIN(3, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
210 IEM_MC_LOCAL(RTUINT256U, uDst);
211 IEM_MC_LOCAL(RTUINT256U, uSrc1);
212 IEM_MC_LOCAL(RTUINT256U, uSrc2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
214 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
215 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
216 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
217
218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
219 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
220 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
221 IEM_MC_PREPARE_AVX_USAGE();
222
223 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
224 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
226 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
227
228 IEM_MC_ADVANCE_RIP_AND_FINISH();
229 IEM_MC_END();
230 }
231 else
232 {
233 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
234 IEM_MC_LOCAL(RTUINT128U, uSrc2);
235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
236 IEM_MC_ARG(PRTUINT128U, puDst, 0);
237 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
238 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
239
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
243 IEM_MC_PREPARE_AVX_USAGE();
244
245 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
246 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
248 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
249 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
250
251 IEM_MC_ADVANCE_RIP_AND_FINISH();
252 IEM_MC_END();
253 }
254 }
255}
256
257
258/**
259 * Common worker for AVX2 instructions on the forms:
260 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
261 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
262 *
263 * The 128-bit memory version of this instruction may elect to skip fetching the
264 * lower 64 bits of the operand. We, however, do not.
265 *
266 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
267 */
268FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
269{
270 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
271}
272
273
274/**
275 * Common worker for AVX2 instructions on the forms:
276 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
277 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
278 *
279 * The 128-bit memory version of this instruction may elect to skip fetching the
280 * higher 64 bits of the operand. We, however, do not.
281 *
282 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
283 */
284FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
285{
286 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
287}
288
289
290/**
291 * Common worker for AVX2 instructions on the forms:
292 * - vpxxx xmm0, xmm1/mem128
293 * - vpxxx ymm0, ymm1/mem256
294 *
295 * Takes function table for function w/o implicit state parameter.
296 *
297 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
298 */
299FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
300{
301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
302 if (IEM_IS_MODRM_REG_MODE(bRm))
303 {
304 /*
305 * Register, register.
306 */
307 if (pVCpu->iem.s.uVexLength)
308 {
309 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
310 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
311 IEM_MC_LOCAL(RTUINT256U, uDst);
312 IEM_MC_LOCAL(RTUINT256U, uSrc);
313 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
314 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
315 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
316 IEM_MC_PREPARE_AVX_USAGE();
317 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
318 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
319 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
320 IEM_MC_ADVANCE_RIP_AND_FINISH();
321 IEM_MC_END();
322 }
323 else
324 {
325 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
326 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
327 IEM_MC_ARG(PRTUINT128U, puDst, 0);
328 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
330 IEM_MC_PREPARE_AVX_USAGE();
331 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
332 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
333 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
334 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
335 IEM_MC_ADVANCE_RIP_AND_FINISH();
336 IEM_MC_END();
337 }
338 }
339 else
340 {
341 /*
342 * Register, memory.
343 */
344 if (pVCpu->iem.s.uVexLength)
345 {
346 IEM_MC_BEGIN(2, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
347 IEM_MC_LOCAL(RTUINT256U, uDst);
348 IEM_MC_LOCAL(RTUINT256U, uSrc);
349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
350 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
351 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
352
353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
354 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
355 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
356 IEM_MC_PREPARE_AVX_USAGE();
357
358 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
359 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
360 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
361
362 IEM_MC_ADVANCE_RIP_AND_FINISH();
363 IEM_MC_END();
364 }
365 else
366 {
367 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
368 IEM_MC_LOCAL(RTUINT128U, uSrc);
369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
370 IEM_MC_ARG(PRTUINT128U, puDst, 0);
371 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
372
373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
374 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
375 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
376 IEM_MC_PREPARE_AVX_USAGE();
377
378 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
379 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
380 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
381 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
382
383 IEM_MC_ADVANCE_RIP_AND_FINISH();
384 IEM_MC_END();
385 }
386 }
387}
388
389
390/* Opcode VEX.0F 0x00 - invalid */
391/* Opcode VEX.0F 0x01 - invalid */
392/* Opcode VEX.0F 0x02 - invalid */
393/* Opcode VEX.0F 0x03 - invalid */
394/* Opcode VEX.0F 0x04 - invalid */
395/* Opcode VEX.0F 0x05 - invalid */
396/* Opcode VEX.0F 0x06 - invalid */
397/* Opcode VEX.0F 0x07 - invalid */
398/* Opcode VEX.0F 0x08 - invalid */
399/* Opcode VEX.0F 0x09 - invalid */
400/* Opcode VEX.0F 0x0a - invalid */
401
402/** Opcode VEX.0F 0x0b. */
403FNIEMOP_DEF(iemOp_vud2)
404{
405 IEMOP_MNEMONIC(vud2, "vud2");
406 IEMOP_RAISE_INVALID_OPCODE_RET();
407}
408
409/* Opcode VEX.0F 0x0c - invalid */
410/* Opcode VEX.0F 0x0d - invalid */
411/* Opcode VEX.0F 0x0e - invalid */
412/* Opcode VEX.0F 0x0f - invalid */
413
414
415/**
416 * @opcode 0x10
417 * @oppfx none
418 * @opcpuid avx
419 * @opgroup og_avx_simdfp_datamove
420 * @opxcpttype 4UA
421 * @optest op1=1 op2=2 -> op1=2
422 * @optest op1=0 op2=-22 -> op1=-22
423 */
424FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
425{
426 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
427 Assert(pVCpu->iem.s.uVexLength <= 1);
428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
429 if (IEM_IS_MODRM_REG_MODE(bRm))
430 {
431 /*
432 * Register, register.
433 */
434 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
435 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
437 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
438 if (pVCpu->iem.s.uVexLength == 0)
439 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
440 IEM_GET_MODRM_RM(pVCpu, bRm));
441 else
442 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
443 IEM_GET_MODRM_RM(pVCpu, bRm));
444 IEM_MC_ADVANCE_RIP_AND_FINISH();
445 IEM_MC_END();
446 }
447 else if (pVCpu->iem.s.uVexLength == 0)
448 {
449 /*
450 * 128-bit: Register, Memory
451 */
452 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
453 IEM_MC_LOCAL(RTUINT128U, uSrc);
454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
455
456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
458 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
459 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
460
461 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
462 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
463
464 IEM_MC_ADVANCE_RIP_AND_FINISH();
465 IEM_MC_END();
466 }
467 else
468 {
469 /*
470 * 256-bit: Register, Memory
471 */
472 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEM_MC_LOCAL(RTUINT256U, uSrc);
474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
475
476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
477 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
478 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
479 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
480
481 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
482 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
483
484 IEM_MC_ADVANCE_RIP_AND_FINISH();
485 IEM_MC_END();
486 }
487}
488
489
490/**
491 * @opcode 0x10
492 * @oppfx 0x66
493 * @opcpuid avx
494 * @opgroup og_avx_simdfp_datamove
495 * @opxcpttype 4UA
496 * @optest op1=1 op2=2 -> op1=2
497 * @optest op1=0 op2=-22 -> op1=-22
498 */
499FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
500{
501 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
502 Assert(pVCpu->iem.s.uVexLength <= 1);
503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
504 if (IEM_IS_MODRM_REG_MODE(bRm))
505 {
506 /*
507 * Register, register.
508 */
509 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
510 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
512 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
513 if (pVCpu->iem.s.uVexLength == 0)
514 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
515 IEM_GET_MODRM_RM(pVCpu, bRm));
516 else
517 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
518 IEM_GET_MODRM_RM(pVCpu, bRm));
519 IEM_MC_ADVANCE_RIP_AND_FINISH();
520 IEM_MC_END();
521 }
522 else if (pVCpu->iem.s.uVexLength == 0)
523 {
524 /*
525 * 128-bit: Memory, register.
526 */
527 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
528 IEM_MC_LOCAL(RTUINT128U, uSrc);
529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
530
531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
534 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
535
536 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
537 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
538
539 IEM_MC_ADVANCE_RIP_AND_FINISH();
540 IEM_MC_END();
541 }
542 else
543 {
544 /*
545 * 256-bit: Memory, register.
546 */
547 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
548 IEM_MC_LOCAL(RTUINT256U, uSrc);
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
550
551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
552 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
553 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
554 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
555
556 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
557 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
558
559 IEM_MC_ADVANCE_RIP_AND_FINISH();
560 IEM_MC_END();
561 }
562}
563
564
565FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
566{
567 Assert(pVCpu->iem.s.uVexLength <= 1);
568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
569 if (IEM_IS_MODRM_REG_MODE(bRm))
570 {
571 /**
572 * @opcode 0x10
573 * @oppfx 0xf3
574 * @opcodesub 11 mr/reg
575 * @opcpuid avx
576 * @opgroup og_avx_simdfp_datamerge
577 * @opxcpttype 5
578 * @optest op1=1 op2=0 op3=2 -> op1=2
579 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
580 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
581 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
582 * @note HssHi refers to bits 127:32.
583 */
584 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
585 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
586 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
587 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
588 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
589 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
590 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
591 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
592 IEM_MC_ADVANCE_RIP_AND_FINISH();
593 IEM_MC_END();
594 }
595 else
596 {
597 /**
598 * @opdone
599 * @opcode 0x10
600 * @oppfx 0xf3
601 * @opcodesub !11 mr/reg
602 * @opcpuid avx
603 * @opgroup og_avx_simdfp_datamove
604 * @opxcpttype 5
605 * @opfunction iemOp_vmovss_Vss_Hss_Wss
606 * @optest op1=1 op2=2 -> op1=2
607 * @optest op1=0 op2=-22 -> op1=-22
608 */
609 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
610 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
611 IEM_MC_LOCAL(uint32_t, uSrc);
612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
613
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
615 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
616 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
617 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
618
619 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
620 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
621
622 IEM_MC_ADVANCE_RIP_AND_FINISH();
623 IEM_MC_END();
624 }
625}
626
627
628FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
629{
630 Assert(pVCpu->iem.s.uVexLength <= 1);
631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
632 if (IEM_IS_MODRM_REG_MODE(bRm))
633 {
634 /**
635 * @opcode 0x10
636 * @oppfx 0xf2
637 * @opcodesub 11 mr/reg
638 * @opcpuid avx
639 * @opgroup og_avx_simdfp_datamerge
640 * @opxcpttype 5
641 * @optest op1=1 op2=0 op3=2 -> op1=2
642 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
643 * @optest op1=3 op2=-1 op3=0x77 ->
644 * op1=0xffffffffffffffff0000000000000077
645 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
646 */
647 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
648 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
650
651 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
653 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
654 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
655 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /**
662 * @opdone
663 * @opcode 0x10
664 * @oppfx 0xf2
665 * @opcodesub !11 mr/reg
666 * @opcpuid avx
667 * @opgroup og_avx_simdfp_datamove
668 * @opxcpttype 5
669 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
670 * @optest op1=1 op2=2 -> op1=2
671 * @optest op1=0 op2=-22 -> op1=-22
672 */
673 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
674 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
675 IEM_MC_LOCAL(uint64_t, uSrc);
676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
677
678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
679 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
680 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
681 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
682
683 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
684 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
685
686 IEM_MC_ADVANCE_RIP_AND_FINISH();
687 IEM_MC_END();
688 }
689}
690
691
692/**
693 * @opcode 0x11
694 * @oppfx none
695 * @opcpuid avx
696 * @opgroup og_avx_simdfp_datamove
697 * @opxcpttype 4UA
698 * @optest op1=1 op2=2 -> op1=2
699 * @optest op1=0 op2=-22 -> op1=-22
700 */
701FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
702{
703 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
704 Assert(pVCpu->iem.s.uVexLength <= 1);
705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
706 if (IEM_IS_MODRM_REG_MODE(bRm))
707 {
708 /*
709 * Register, register.
710 */
711 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
712 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
715 if (pVCpu->iem.s.uVexLength == 0)
716 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
717 IEM_GET_MODRM_REG(pVCpu, bRm));
718 else
719 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
720 IEM_GET_MODRM_REG(pVCpu, bRm));
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else if (pVCpu->iem.s.uVexLength == 0)
725 {
726 /*
727 * 128-bit: Memory, register.
728 */
729 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
730 IEM_MC_LOCAL(RTUINT128U, uSrc);
731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
732
733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
734 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
735 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
736 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
737
738 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
739 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
740
741 IEM_MC_ADVANCE_RIP_AND_FINISH();
742 IEM_MC_END();
743 }
744 else
745 {
746 /*
747 * 256-bit: Memory, register.
748 */
749 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
750 IEM_MC_LOCAL(RTUINT256U, uSrc);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
755 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
756 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
757
758 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
759 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
760
761 IEM_MC_ADVANCE_RIP_AND_FINISH();
762 IEM_MC_END();
763 }
764}
765
766
767/**
768 * @opcode 0x11
769 * @oppfx 0x66
770 * @opcpuid avx
771 * @opgroup og_avx_simdfp_datamove
772 * @opxcpttype 4UA
773 * @optest op1=1 op2=2 -> op1=2
774 * @optest op1=0 op2=-22 -> op1=-22
775 */
776FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
777{
778 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
779 Assert(pVCpu->iem.s.uVexLength <= 1);
780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
781 if (IEM_IS_MODRM_REG_MODE(bRm))
782 {
783 /*
784 * Register, register.
785 */
786 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
787 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
788 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
789 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
790 if (pVCpu->iem.s.uVexLength == 0)
791 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
792 IEM_GET_MODRM_REG(pVCpu, bRm));
793 else
794 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
795 IEM_GET_MODRM_REG(pVCpu, bRm));
796 IEM_MC_ADVANCE_RIP_AND_FINISH();
797 IEM_MC_END();
798 }
799 else if (pVCpu->iem.s.uVexLength == 0)
800 {
801 /*
802 * 128-bit: Memory, register.
803 */
804 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
805 IEM_MC_LOCAL(RTUINT128U, uSrc);
806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
807
808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
809 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
810 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
811 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
812
813 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
814 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
815
816 IEM_MC_ADVANCE_RIP_AND_FINISH();
817 IEM_MC_END();
818 }
819 else
820 {
821 /*
822 * 256-bit: Memory, register.
823 */
824 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
825 IEM_MC_LOCAL(RTUINT256U, uSrc);
826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
827
828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
829 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
830 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
831 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
832
833 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
834 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
835
836 IEM_MC_ADVANCE_RIP_AND_FINISH();
837 IEM_MC_END();
838 }
839}
840
841
842FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
843{
844 Assert(pVCpu->iem.s.uVexLength <= 1);
845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
846 if (IEM_IS_MODRM_REG_MODE(bRm))
847 {
848 /**
849 * @opcode 0x11
850 * @oppfx 0xf3
851 * @opcodesub 11 mr/reg
852 * @opcpuid avx
853 * @opgroup og_avx_simdfp_datamerge
854 * @opxcpttype 5
855 * @optest op1=1 op2=0 op3=2 -> op1=2
856 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
857 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
858 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
859 */
860 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
861 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
862 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
863
864 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
865 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
866 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
867 IEM_GET_MODRM_REG(pVCpu, bRm),
868 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
869 IEM_MC_ADVANCE_RIP_AND_FINISH();
870 IEM_MC_END();
871 }
872 else
873 {
874 /**
875 * @opdone
876 * @opcode 0x11
877 * @oppfx 0xf3
878 * @opcodesub !11 mr/reg
879 * @opcpuid avx
880 * @opgroup og_avx_simdfp_datamove
881 * @opxcpttype 5
882 * @opfunction iemOp_vmovss_Vss_Hss_Wss
883 * @optest op1=1 op2=2 -> op1=2
884 * @optest op1=0 op2=-22 -> op1=-22
885 */
886 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
887 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
888 IEM_MC_LOCAL(uint32_t, uSrc);
889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
890
891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
892 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
893 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
894 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
895
896 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
897 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
898
899 IEM_MC_ADVANCE_RIP_AND_FINISH();
900 IEM_MC_END();
901 }
902}
903
904
905FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
906{
907 Assert(pVCpu->iem.s.uVexLength <= 1);
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 if (IEM_IS_MODRM_REG_MODE(bRm))
910 {
911 /**
912 * @opcode 0x11
913 * @oppfx 0xf2
914 * @opcodesub 11 mr/reg
915 * @opcpuid avx
916 * @opgroup og_avx_simdfp_datamerge
917 * @opxcpttype 5
918 * @optest op1=1 op2=0 op3=2 -> op1=2
919 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
920 * @optest op1=3 op2=-1 op3=0x77 ->
921 * op1=0xffffffffffffffff0000000000000077
922 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
923 */
924 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
925 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
926 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
927
928 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
929 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
930 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
931 IEM_GET_MODRM_REG(pVCpu, bRm),
932 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
933 IEM_MC_ADVANCE_RIP_AND_FINISH();
934 IEM_MC_END();
935 }
936 else
937 {
938 /**
939 * @opdone
940 * @opcode 0x11
941 * @oppfx 0xf2
942 * @opcodesub !11 mr/reg
943 * @opcpuid avx
944 * @opgroup og_avx_simdfp_datamove
945 * @opxcpttype 5
946 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
947 * @optest op1=1 op2=2 -> op1=2
948 * @optest op1=0 op2=-22 -> op1=-22
949 */
950 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
951 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
952 IEM_MC_LOCAL(uint64_t, uSrc);
953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
954
955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
956 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
957 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
958 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
959
960 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
961 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
962
963 IEM_MC_ADVANCE_RIP_AND_FINISH();
964 IEM_MC_END();
965 }
966}
967
968
969FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
970{
971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
972 if (IEM_IS_MODRM_REG_MODE(bRm))
973 {
974 /**
975 * @opcode 0x12
976 * @opcodesub 11 mr/reg
977 * @oppfx none
978 * @opcpuid avx
979 * @opgroup og_avx_simdfp_datamerge
980 * @opxcpttype 7LZ
981 * @optest op2=0x2200220122022203
982 * op3=0x3304330533063307
983 * -> op1=0x22002201220222033304330533063307
984 * @optest op2=-1 op3=-42 -> op1=-42
985 * @note op3 and op2 are only the 8-byte high XMM register halfs.
986 */
987 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
988 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
989 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
990
991 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
992 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
993 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
994 IEM_GET_MODRM_RM(pVCpu, bRm),
995 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
996
997 IEM_MC_ADVANCE_RIP_AND_FINISH();
998 IEM_MC_END();
999 }
1000 else
1001 {
1002 /**
1003 * @opdone
1004 * @opcode 0x12
1005 * @opcodesub !11 mr/reg
1006 * @oppfx none
1007 * @opcpuid avx
1008 * @opgroup og_avx_simdfp_datamove
1009 * @opxcpttype 5LZ
1010 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1011 * @optest op1=1 op2=0 op3=0 -> op1=0
1012 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1013 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1014 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1015 */
1016 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1017
1018 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1019 IEM_MC_LOCAL(uint64_t, uSrc);
1020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1021
1022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1023 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1024 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1025 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1026
1027 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1028 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1029 uSrc,
1030 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1031
1032 IEM_MC_ADVANCE_RIP_AND_FINISH();
1033 IEM_MC_END();
1034 }
1035}
1036
1037
1038/**
1039 * @opcode 0x12
1040 * @opcodesub !11 mr/reg
1041 * @oppfx 0x66
1042 * @opcpuid avx
1043 * @opgroup og_avx_pcksclr_datamerge
1044 * @opxcpttype 5LZ
1045 * @optest op2=0 op3=2 -> op1=2
1046 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1047 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1048 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1049 */
1050FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1051{
1052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1053 if (IEM_IS_MODRM_MEM_MODE(bRm))
1054 {
1055 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1056
1057 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1058 IEM_MC_LOCAL(uint64_t, uSrc);
1059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1060
1061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1062 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1063 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1064 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1065
1066 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1067 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1068 uSrc,
1069 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1070
1071 IEM_MC_ADVANCE_RIP_AND_FINISH();
1072 IEM_MC_END();
1073 }
1074
1075 /**
1076 * @opdone
1077 * @opmnemonic udvex660f12m3
1078 * @opcode 0x12
1079 * @opcodesub 11 mr/reg
1080 * @oppfx 0x66
1081 * @opunused immediate
1082 * @opcpuid avx
1083 * @optest ->
1084 */
1085 else
1086 IEMOP_RAISE_INVALID_OPCODE_RET();
1087}
1088
1089
1090/**
1091 * @opcode 0x12
1092 * @oppfx 0xf3
1093 * @opcpuid avx
1094 * @opgroup og_avx_pcksclr_datamove
1095 * @opxcpttype 4
1096 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1097 * -> op1=0x00000002000000020000000100000001
1098 * @optest vex.l==1 /
1099 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1100 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1101 */
1102FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1103{
1104 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1105 Assert(pVCpu->iem.s.uVexLength <= 1);
1106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1107 if (IEM_IS_MODRM_REG_MODE(bRm))
1108 {
1109 /*
1110 * Register, register.
1111 */
1112 if (pVCpu->iem.s.uVexLength == 0)
1113 {
1114 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1115 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1116 IEM_MC_LOCAL(RTUINT128U, uSrc);
1117
1118 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1119 IEM_MC_PREPARE_AVX_USAGE();
1120
1121 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1122 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1123 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1124 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1125 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1126 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1127
1128 IEM_MC_ADVANCE_RIP_AND_FINISH();
1129 IEM_MC_END();
1130 }
1131 else
1132 {
1133 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1134 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1135 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1136 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1137 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1138
1139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1140 IEM_MC_PREPARE_AVX_USAGE();
1141 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1142
1143 IEM_MC_ADVANCE_RIP_AND_FINISH();
1144 IEM_MC_END();
1145 }
1146 }
1147 else
1148 {
1149 /*
1150 * Register, memory.
1151 */
1152 if (pVCpu->iem.s.uVexLength == 0)
1153 {
1154 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1155 IEM_MC_LOCAL(RTUINT128U, uSrc);
1156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1157
1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1159 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1160 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1161 IEM_MC_PREPARE_AVX_USAGE();
1162
1163 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1164 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1165 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1166 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1167 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1168 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1169
1170 IEM_MC_ADVANCE_RIP_AND_FINISH();
1171 IEM_MC_END();
1172 }
1173 else
1174 {
1175 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1176 IEM_MC_LOCAL(RTUINT256U, uSrc);
1177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1178 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1179 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1181
1182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1183 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1184 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1185 IEM_MC_PREPARE_AVX_USAGE();
1186
1187 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1188 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1189
1190 IEM_MC_ADVANCE_RIP_AND_FINISH();
1191 IEM_MC_END();
1192 }
1193 }
1194}
1195
1196
1197/**
1198 * @opcode 0x12
1199 * @oppfx 0xf2
1200 * @opcpuid avx
1201 * @opgroup og_avx_pcksclr_datamove
1202 * @opxcpttype 5
1203 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1204 * -> op1=0x22222222111111112222222211111111
1205 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1206 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1207 */
1208FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1209{
1210 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1212 if (IEM_IS_MODRM_REG_MODE(bRm))
1213 {
1214 /*
1215 * Register, register.
1216 */
1217 if (pVCpu->iem.s.uVexLength == 0)
1218 {
1219 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1220 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1221 IEM_MC_LOCAL(uint64_t, uSrc);
1222
1223 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1224 IEM_MC_PREPARE_AVX_USAGE();
1225
1226 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1227 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1228 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1229 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1237 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1238 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1239 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1240 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1241
1242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1243 IEM_MC_PREPARE_AVX_USAGE();
1244 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1245
1246 IEM_MC_ADVANCE_RIP_AND_FINISH();
1247 IEM_MC_END();
1248 }
1249 }
1250 else
1251 {
1252 /*
1253 * Register, memory.
1254 */
1255 if (pVCpu->iem.s.uVexLength == 0)
1256 {
1257 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259 IEM_MC_LOCAL(uint64_t, uSrc);
1260
1261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1262 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1263 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1264 IEM_MC_PREPARE_AVX_USAGE();
1265
1266 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1267 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1268 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1269 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1270
1271 IEM_MC_ADVANCE_RIP_AND_FINISH();
1272 IEM_MC_END();
1273 }
1274 else
1275 {
1276 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1277 IEM_MC_LOCAL(RTUINT256U, uSrc);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1280 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1281 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1282
1283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1284 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1285 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1286 IEM_MC_PREPARE_AVX_USAGE();
1287
1288 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1289 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1290
1291 IEM_MC_ADVANCE_RIP_AND_FINISH();
1292 IEM_MC_END();
1293 }
1294 }
1295}
1296
1297
1298/**
1299 * @opcode 0x13
1300 * @opcodesub !11 mr/reg
1301 * @oppfx none
1302 * @opcpuid avx
1303 * @opgroup og_avx_simdfp_datamove
1304 * @opxcpttype 5
1305 * @optest op1=1 op2=2 -> op1=2
1306 * @optest op1=0 op2=-42 -> op1=-42
1307 */
1308FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1309{
1310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1311 if (IEM_IS_MODRM_MEM_MODE(bRm))
1312 {
1313 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1314
1315 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1316 IEM_MC_LOCAL(uint64_t, uSrc);
1317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1318
1319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1320 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1322 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1323
1324 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1325 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1326
1327 IEM_MC_ADVANCE_RIP_AND_FINISH();
1328 IEM_MC_END();
1329 }
1330
1331 /**
1332 * @opdone
1333 * @opmnemonic udvex0f13m3
1334 * @opcode 0x13
1335 * @opcodesub 11 mr/reg
1336 * @oppfx none
1337 * @opunused immediate
1338 * @opcpuid avx
1339 * @optest ->
1340 */
1341 else
1342 IEMOP_RAISE_INVALID_OPCODE_RET();
1343}
1344
1345
1346/**
1347 * @opcode 0x13
1348 * @opcodesub !11 mr/reg
1349 * @oppfx 0x66
1350 * @opcpuid avx
1351 * @opgroup og_avx_pcksclr_datamove
1352 * @opxcpttype 5
1353 * @optest op1=1 op2=2 -> op1=2
1354 * @optest op1=0 op2=-42 -> op1=-42
1355 */
1356FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1357{
1358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1359 if (IEM_IS_MODRM_MEM_MODE(bRm))
1360 {
1361 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1362 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1363 IEM_MC_LOCAL(uint64_t, uSrc);
1364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1365
1366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1367 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1369 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1370
1371 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1372 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1373
1374 IEM_MC_ADVANCE_RIP_AND_FINISH();
1375 IEM_MC_END();
1376 }
1377
1378 /**
1379 * @opdone
1380 * @opmnemonic udvex660f13m3
1381 * @opcode 0x13
1382 * @opcodesub 11 mr/reg
1383 * @oppfx 0x66
1384 * @opunused immediate
1385 * @opcpuid avx
1386 * @optest ->
1387 */
1388 else
1389 IEMOP_RAISE_INVALID_OPCODE_RET();
1390}
1391
1392/* Opcode VEX.F3.0F 0x13 - invalid */
1393/* Opcode VEX.F2.0F 0x13 - invalid */
1394
1395/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1396FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1397{
1398 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1399 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1400 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1401}
1402
1403
1404/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1405FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1406{
1407 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1408 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1409 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1410}
1411
1412
1413/* Opcode VEX.F3.0F 0x14 - invalid */
1414/* Opcode VEX.F2.0F 0x14 - invalid */
1415
1416
1417/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1418FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1419{
1420 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1421 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1422 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1423}
1424
1425
1426/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1427FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1428{
1429 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1430 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1431 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1432}
1433
1434
1435/* Opcode VEX.F3.0F 0x15 - invalid */
1436/* Opcode VEX.F2.0F 0x15 - invalid */
1437
1438
1439FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1440{
1441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1442 if (IEM_IS_MODRM_REG_MODE(bRm))
1443 {
1444 /**
1445 * @opcode 0x16
1446 * @opcodesub 11 mr/reg
1447 * @oppfx none
1448 * @opcpuid avx
1449 * @opgroup og_avx_simdfp_datamerge
1450 * @opxcpttype 7LZ
1451 */
1452 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1453
1454 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1455 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1456
1457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1458 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1459 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1460 IEM_GET_MODRM_RM(pVCpu, bRm),
1461 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1462
1463 IEM_MC_ADVANCE_RIP_AND_FINISH();
1464 IEM_MC_END();
1465 }
1466 else
1467 {
1468 /**
1469 * @opdone
1470 * @opcode 0x16
1471 * @opcodesub !11 mr/reg
1472 * @oppfx none
1473 * @opcpuid avx
1474 * @opgroup og_avx_simdfp_datamove
1475 * @opxcpttype 5LZ
1476 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1477 */
1478 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1479
1480 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1481 IEM_MC_LOCAL(uint64_t, uSrc);
1482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1483
1484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1485 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1487 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1488
1489 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1490 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1491 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1492 uSrc);
1493
1494 IEM_MC_ADVANCE_RIP_AND_FINISH();
1495 IEM_MC_END();
1496 }
1497}
1498
1499
1500/**
1501 * @opcode 0x16
1502 * @opcodesub !11 mr/reg
1503 * @oppfx 0x66
1504 * @opcpuid avx
1505 * @opgroup og_avx_pcksclr_datamerge
1506 * @opxcpttype 5LZ
1507 */
1508FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1509{
1510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1511 if (IEM_IS_MODRM_MEM_MODE(bRm))
1512 {
1513 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1514
1515 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1516 IEM_MC_LOCAL(uint64_t, uSrc);
1517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1518
1519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1520 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1521 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1522 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1523
1524 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1525 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1526 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1527 uSrc);
1528
1529 IEM_MC_ADVANCE_RIP_AND_FINISH();
1530 IEM_MC_END();
1531 }
1532
1533 /**
1534 * @opdone
1535 * @opmnemonic udvex660f16m3
1536 * @opcode 0x12
1537 * @opcodesub 11 mr/reg
1538 * @oppfx 0x66
1539 * @opunused immediate
1540 * @opcpuid avx
1541 * @optest ->
1542 */
1543 else
1544 IEMOP_RAISE_INVALID_OPCODE_RET();
1545}
1546
1547
1548/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1549/**
1550 * @opcode 0x16
1551 * @oppfx 0xf3
1552 * @opcpuid avx
1553 * @opgroup og_avx_pcksclr_datamove
1554 * @opxcpttype 4
1555 */
1556FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1557{
1558 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1559 Assert(pVCpu->iem.s.uVexLength <= 1);
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if (IEM_IS_MODRM_REG_MODE(bRm))
1562 {
1563 /*
1564 * Register, register.
1565 */
1566 if (pVCpu->iem.s.uVexLength == 0)
1567 {
1568 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1569 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1570 IEM_MC_LOCAL(RTUINT128U, uSrc);
1571
1572 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1573 IEM_MC_PREPARE_AVX_USAGE();
1574
1575 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1576 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1577 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1578 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1579 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1580 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1581
1582 IEM_MC_ADVANCE_RIP_AND_FINISH();
1583 IEM_MC_END();
1584 }
1585 else
1586 {
1587 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1588 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1589 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1590 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1591 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1592
1593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1594 IEM_MC_PREPARE_AVX_USAGE();
1595 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1596
1597 IEM_MC_ADVANCE_RIP_AND_FINISH();
1598 IEM_MC_END();
1599 }
1600 }
1601 else
1602 {
1603 /*
1604 * Register, memory.
1605 */
1606 if (pVCpu->iem.s.uVexLength == 0)
1607 {
1608 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1609 IEM_MC_LOCAL(RTUINT128U, uSrc);
1610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1611
1612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1613 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1614 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1615 IEM_MC_PREPARE_AVX_USAGE();
1616
1617 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1618 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1619 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1620 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1621 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1622 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1623
1624 IEM_MC_ADVANCE_RIP_AND_FINISH();
1625 IEM_MC_END();
1626 }
1627 else
1628 {
1629 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1630 IEM_MC_LOCAL(RTUINT256U, uSrc);
1631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1632 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1633 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1634 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1635
1636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1637 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1638 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1639 IEM_MC_PREPARE_AVX_USAGE();
1640
1641 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1642 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1643
1644 IEM_MC_ADVANCE_RIP_AND_FINISH();
1645 IEM_MC_END();
1646 }
1647 }
1648}
1649
1650
1651/* Opcode VEX.F2.0F 0x16 - invalid */
1652
1653
1654/**
1655 * @opcode 0x17
1656 * @opcodesub !11 mr/reg
1657 * @oppfx none
1658 * @opcpuid avx
1659 * @opgroup og_avx_simdfp_datamove
1660 * @opxcpttype 5
1661 */
1662FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1663{
1664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1665 if (IEM_IS_MODRM_MEM_MODE(bRm))
1666 {
1667 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1668
1669 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1670 IEM_MC_LOCAL(uint64_t, uSrc);
1671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1672
1673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1674 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1675 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1676 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1677
1678 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1679 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1680
1681 IEM_MC_ADVANCE_RIP_AND_FINISH();
1682 IEM_MC_END();
1683 }
1684
1685 /**
1686 * @opdone
1687 * @opmnemonic udvex0f17m3
1688 * @opcode 0x17
1689 * @opcodesub 11 mr/reg
1690 * @oppfx none
1691 * @opunused immediate
1692 * @opcpuid avx
1693 * @optest ->
1694 */
1695 else
1696 IEMOP_RAISE_INVALID_OPCODE_RET();
1697}
1698
1699
1700/**
1701 * @opcode 0x17
1702 * @opcodesub !11 mr/reg
1703 * @oppfx 0x66
1704 * @opcpuid avx
1705 * @opgroup og_avx_pcksclr_datamove
1706 * @opxcpttype 5
1707 */
1708FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1709{
1710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1711 if (IEM_IS_MODRM_MEM_MODE(bRm))
1712 {
1713 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1714
1715 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1716 IEM_MC_LOCAL(uint64_t, uSrc);
1717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1718
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1720 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1721 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1722 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1723
1724 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1725 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1726
1727 IEM_MC_ADVANCE_RIP_AND_FINISH();
1728 IEM_MC_END();
1729 }
1730
1731 /**
1732 * @opdone
1733 * @opmnemonic udvex660f17m3
1734 * @opcode 0x17
1735 * @opcodesub 11 mr/reg
1736 * @oppfx 0x66
1737 * @opunused immediate
1738 * @opcpuid avx
1739 * @optest ->
1740 */
1741 else
1742 IEMOP_RAISE_INVALID_OPCODE_RET();
1743}
1744
1745
1746/* Opcode VEX.F3.0F 0x17 - invalid */
1747/* Opcode VEX.F2.0F 0x17 - invalid */
1748
1749
1750/* Opcode VEX.0F 0x18 - invalid */
1751/* Opcode VEX.0F 0x19 - invalid */
1752/* Opcode VEX.0F 0x1a - invalid */
1753/* Opcode VEX.0F 0x1b - invalid */
1754/* Opcode VEX.0F 0x1c - invalid */
1755/* Opcode VEX.0F 0x1d - invalid */
1756/* Opcode VEX.0F 0x1e - invalid */
1757/* Opcode VEX.0F 0x1f - invalid */
1758
1759/* Opcode VEX.0F 0x20 - invalid */
1760/* Opcode VEX.0F 0x21 - invalid */
1761/* Opcode VEX.0F 0x22 - invalid */
1762/* Opcode VEX.0F 0x23 - invalid */
1763/* Opcode VEX.0F 0x24 - invalid */
1764/* Opcode VEX.0F 0x25 - invalid */
1765/* Opcode VEX.0F 0x26 - invalid */
1766/* Opcode VEX.0F 0x27 - invalid */
1767
1768/**
1769 * @opcode 0x28
1770 * @oppfx none
1771 * @opcpuid avx
1772 * @opgroup og_avx_pcksclr_datamove
1773 * @opxcpttype 1
1774 * @optest op1=1 op2=2 -> op1=2
1775 * @optest op1=0 op2=-42 -> op1=-42
1776 * @note Almost identical to vmovapd.
1777 */
1778FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1779{
1780 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1782 Assert(pVCpu->iem.s.uVexLength <= 1);
1783 if (IEM_IS_MODRM_REG_MODE(bRm))
1784 {
1785 /*
1786 * Register, register.
1787 */
1788 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1789 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1790
1791 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1792 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1793 if (pVCpu->iem.s.uVexLength == 0)
1794 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1795 IEM_GET_MODRM_RM(pVCpu, bRm));
1796 else
1797 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1798 IEM_GET_MODRM_RM(pVCpu, bRm));
1799 IEM_MC_ADVANCE_RIP_AND_FINISH();
1800 IEM_MC_END();
1801 }
1802 else
1803 {
1804 /*
1805 * Register, memory.
1806 */
1807 if (pVCpu->iem.s.uVexLength == 0)
1808 {
1809 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1811 IEM_MC_LOCAL(RTUINT128U, uSrc);
1812
1813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1814 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1815 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1816 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1817
1818 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1819 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1820
1821 IEM_MC_ADVANCE_RIP_AND_FINISH();
1822 IEM_MC_END();
1823 }
1824 else
1825 {
1826 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1828 IEM_MC_LOCAL(RTUINT256U, uSrc);
1829
1830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1831 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1832 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1833 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1834
1835 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1836 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1837
1838 IEM_MC_ADVANCE_RIP_AND_FINISH();
1839 IEM_MC_END();
1840 }
1841 }
1842}
1843
1844
1845/**
1846 * @opcode 0x28
1847 * @oppfx 66
1848 * @opcpuid avx
1849 * @opgroup og_avx_pcksclr_datamove
1850 * @opxcpttype 1
1851 * @optest op1=1 op2=2 -> op1=2
1852 * @optest op1=0 op2=-42 -> op1=-42
1853 * @note Almost identical to vmovaps
1854 */
1855FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1856{
1857 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1859 Assert(pVCpu->iem.s.uVexLength <= 1);
1860 if (IEM_IS_MODRM_REG_MODE(bRm))
1861 {
1862 /*
1863 * Register, register.
1864 */
1865 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1866 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1867
1868 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1869 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1870 if (pVCpu->iem.s.uVexLength == 0)
1871 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1872 IEM_GET_MODRM_RM(pVCpu, bRm));
1873 else
1874 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1875 IEM_GET_MODRM_RM(pVCpu, bRm));
1876 IEM_MC_ADVANCE_RIP_AND_FINISH();
1877 IEM_MC_END();
1878 }
1879 else
1880 {
1881 /*
1882 * Register, memory.
1883 */
1884 if (pVCpu->iem.s.uVexLength == 0)
1885 {
1886 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1888 IEM_MC_LOCAL(RTUINT128U, uSrc);
1889
1890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1891 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1892 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1893 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1894
1895 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1896 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1897
1898 IEM_MC_ADVANCE_RIP_AND_FINISH();
1899 IEM_MC_END();
1900 }
1901 else
1902 {
1903 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1905 IEM_MC_LOCAL(RTUINT256U, uSrc);
1906
1907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1908 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1909 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1910 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1911
1912 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1913 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1914
1915 IEM_MC_ADVANCE_RIP_AND_FINISH();
1916 IEM_MC_END();
1917 }
1918 }
1919}
1920
1921/**
1922 * @opmnemonic udvexf30f28
1923 * @opcode 0x28
1924 * @oppfx 0xf3
1925 * @opunused vex.modrm
1926 * @opcpuid avx
1927 * @optest ->
1928 * @opdone
1929 */
1930
1931/**
1932 * @opmnemonic udvexf20f28
1933 * @opcode 0x28
1934 * @oppfx 0xf2
1935 * @opunused vex.modrm
1936 * @opcpuid avx
1937 * @optest ->
1938 * @opdone
1939 */
1940
1941/**
1942 * @opcode 0x29
1943 * @oppfx none
1944 * @opcpuid avx
1945 * @opgroup og_avx_pcksclr_datamove
1946 * @opxcpttype 1
1947 * @optest op1=1 op2=2 -> op1=2
1948 * @optest op1=0 op2=-42 -> op1=-42
1949 * @note Almost identical to vmovapd.
1950 */
1951FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1952{
1953 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1955 Assert(pVCpu->iem.s.uVexLength <= 1);
1956 if (IEM_IS_MODRM_REG_MODE(bRm))
1957 {
1958 /*
1959 * Register, register.
1960 */
1961 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1962 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1963
1964 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1965 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1966 if (pVCpu->iem.s.uVexLength == 0)
1967 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1968 IEM_GET_MODRM_REG(pVCpu, bRm));
1969 else
1970 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1971 IEM_GET_MODRM_REG(pVCpu, bRm));
1972 IEM_MC_ADVANCE_RIP_AND_FINISH();
1973 IEM_MC_END();
1974 }
1975 else
1976 {
1977 /*
1978 * Register, memory.
1979 */
1980 if (pVCpu->iem.s.uVexLength == 0)
1981 {
1982 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1984 IEM_MC_LOCAL(RTUINT128U, uSrc);
1985
1986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1987 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1988 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1989 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1990
1991 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1992 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1993
1994 IEM_MC_ADVANCE_RIP_AND_FINISH();
1995 IEM_MC_END();
1996 }
1997 else
1998 {
1999 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2001 IEM_MC_LOCAL(RTUINT256U, uSrc);
2002
2003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2004 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2005 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2007
2008 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2009 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP_AND_FINISH();
2012 IEM_MC_END();
2013 }
2014 }
2015}
2016
2017/**
2018 * @opcode 0x29
2019 * @oppfx 66
2020 * @opcpuid avx
2021 * @opgroup og_avx_pcksclr_datamove
2022 * @opxcpttype 1
2023 * @optest op1=1 op2=2 -> op1=2
2024 * @optest op1=0 op2=-42 -> op1=-42
2025 * @note Almost identical to vmovaps
2026 */
2027FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2028{
2029 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2030 Assert(pVCpu->iem.s.uVexLength <= 1);
2031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2032 if (IEM_IS_MODRM_REG_MODE(bRm))
2033 {
2034 /*
2035 * Register, register.
2036 */
2037 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
2038 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2039
2040 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2041 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2042 if (pVCpu->iem.s.uVexLength == 0)
2043 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2044 IEM_GET_MODRM_REG(pVCpu, bRm));
2045 else
2046 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2047 IEM_GET_MODRM_REG(pVCpu, bRm));
2048 IEM_MC_ADVANCE_RIP_AND_FINISH();
2049 IEM_MC_END();
2050 }
2051 else
2052 {
2053 /*
2054 * Register, memory.
2055 */
2056 if (pVCpu->iem.s.uVexLength == 0)
2057 {
2058 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2060 IEM_MC_LOCAL(RTUINT128U, uSrc);
2061
2062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2066
2067 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2068 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2069
2070 IEM_MC_ADVANCE_RIP_AND_FINISH();
2071 IEM_MC_END();
2072 }
2073 else
2074 {
2075 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_LOCAL(RTUINT256U, uSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2083
2084 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2085 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090 }
2091}
2092
2093
2094/**
2095 * @opmnemonic udvexf30f29
2096 * @opcode 0x29
2097 * @oppfx 0xf3
2098 * @opunused vex.modrm
2099 * @opcpuid avx
2100 * @optest ->
2101 * @opdone
2102 */
2103
2104/**
2105 * @opmnemonic udvexf20f29
2106 * @opcode 0x29
2107 * @oppfx 0xf2
2108 * @opunused vex.modrm
2109 * @opcpuid avx
2110 * @optest ->
2111 * @opdone
2112 */
2113
2114
2115/** Opcode VEX.0F 0x2a - invalid */
2116/** Opcode VEX.66.0F 0x2a - invalid */
2117/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2118FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2119/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2120FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2121
2122
2123/**
2124 * @opcode 0x2b
2125 * @opcodesub !11 mr/reg
2126 * @oppfx none
2127 * @opcpuid avx
2128 * @opgroup og_avx_cachect
2129 * @opxcpttype 1
2130 * @optest op1=1 op2=2 -> op1=2
2131 * @optest op1=0 op2=-42 -> op1=-42
2132 * @note Identical implementation to vmovntpd
2133 */
2134FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2135{
2136 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2137 Assert(pVCpu->iem.s.uVexLength <= 1);
2138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2139 if (IEM_IS_MODRM_MEM_MODE(bRm))
2140 {
2141 /*
2142 * memory, register.
2143 */
2144 if (pVCpu->iem.s.uVexLength == 0)
2145 {
2146 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2147 IEM_MC_LOCAL(RTUINT128U, uSrc);
2148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2149
2150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2151 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2152 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2153 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2154
2155 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2156 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2157
2158 IEM_MC_ADVANCE_RIP_AND_FINISH();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2164 IEM_MC_LOCAL(RTUINT256U, uSrc);
2165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2166
2167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2168 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2170 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2171
2172 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2173 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2174
2175 IEM_MC_ADVANCE_RIP_AND_FINISH();
2176 IEM_MC_END();
2177 }
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 IEMOP_RAISE_INVALID_OPCODE_RET();
2182}
2183
2184/**
2185 * @opcode 0x2b
2186 * @opcodesub !11 mr/reg
2187 * @oppfx 0x66
2188 * @opcpuid avx
2189 * @opgroup og_avx_cachect
2190 * @opxcpttype 1
2191 * @optest op1=1 op2=2 -> op1=2
2192 * @optest op1=0 op2=-42 -> op1=-42
2193 * @note Identical implementation to vmovntps
2194 */
2195FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2196{
2197 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2198 Assert(pVCpu->iem.s.uVexLength <= 1);
2199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2200 if (IEM_IS_MODRM_MEM_MODE(bRm))
2201 {
2202 /*
2203 * memory, register.
2204 */
2205 if (pVCpu->iem.s.uVexLength == 0)
2206 {
2207 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2208 IEM_MC_LOCAL(RTUINT128U, uSrc);
2209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2210
2211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2212 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2213 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2214 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2215
2216 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2217 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2218
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 }
2222 else
2223 {
2224 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2225 IEM_MC_LOCAL(RTUINT256U, uSrc);
2226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2227
2228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2229 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2232
2233 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2234 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2235
2236 IEM_MC_ADVANCE_RIP_AND_FINISH();
2237 IEM_MC_END();
2238 }
2239 }
2240 /* The register, register encoding is invalid. */
2241 else
2242 IEMOP_RAISE_INVALID_OPCODE_RET();
2243}
2244
2245/**
2246 * @opmnemonic udvexf30f2b
2247 * @opcode 0x2b
2248 * @oppfx 0xf3
2249 * @opunused vex.modrm
2250 * @opcpuid avx
2251 * @optest ->
2252 * @opdone
2253 */
2254
2255/**
2256 * @opmnemonic udvexf20f2b
2257 * @opcode 0x2b
2258 * @oppfx 0xf2
2259 * @opunused vex.modrm
2260 * @opcpuid avx
2261 * @optest ->
2262 * @opdone
2263 */
2264
2265
2266/* Opcode VEX.0F 0x2c - invalid */
2267/* Opcode VEX.66.0F 0x2c - invalid */
2268/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2269FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2270/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2271FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2272
2273/* Opcode VEX.0F 0x2d - invalid */
2274/* Opcode VEX.66.0F 0x2d - invalid */
2275/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2276FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2277/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2278FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2279
2280
2281/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
2282FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2283{
2284 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2286 if (IEM_IS_MODRM_REG_MODE(bRm))
2287 {
2288 /*
2289 * Register, register.
2290 */
2291 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2292 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2293 IEM_MC_LOCAL(uint32_t, fEFlags);
2294 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2295 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2296 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2297 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2298 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2299 IEM_MC_PREPARE_AVX_USAGE();
2300 IEM_MC_FETCH_EFLAGS(fEFlags);
2301 IEM_MC_REF_MXCSR(pfMxcsr);
2302 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2303 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2304 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2305 pfMxcsr, pEFlags, puSrc1, puSrc2);
2306 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2307 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2308 } IEM_MC_ELSE() {
2309 IEM_MC_COMMIT_EFLAGS(fEFlags);
2310 } IEM_MC_ENDIF();
2311
2312 IEM_MC_ADVANCE_RIP_AND_FINISH();
2313 IEM_MC_END();
2314 }
2315 else
2316 {
2317 /*
2318 * Register, memory.
2319 */
2320 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2321 IEM_MC_LOCAL(uint32_t, fEFlags);
2322 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2323 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2324 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2325 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2326 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2328
2329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2330 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2331 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2332 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2333
2334 IEM_MC_PREPARE_AVX_USAGE();
2335 IEM_MC_FETCH_EFLAGS(fEFlags);
2336 IEM_MC_REF_MXCSR(pfMxcsr);
2337 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2338 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2339 pfMxcsr, pEFlags, puSrc1, puSrc2);
2340 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2341 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2342 } IEM_MC_ELSE() {
2343 IEM_MC_COMMIT_EFLAGS(fEFlags);
2344 } IEM_MC_ENDIF();
2345
2346 IEM_MC_ADVANCE_RIP_AND_FINISH();
2347 IEM_MC_END();
2348 }
2349}
2350
2351
2352/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
2353FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2354{
2355 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2357 if (IEM_IS_MODRM_REG_MODE(bRm))
2358 {
2359 /*
2360 * Register, register.
2361 */
2362 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2363 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2364 IEM_MC_LOCAL(uint32_t, fEFlags);
2365 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2366 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2367 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2368 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2370 IEM_MC_PREPARE_AVX_USAGE();
2371 IEM_MC_FETCH_EFLAGS(fEFlags);
2372 IEM_MC_REF_MXCSR(pfMxcsr);
2373 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2374 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2375 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2376 pfMxcsr, pEFlags, puSrc1, puSrc2);
2377 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2378 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2379 } IEM_MC_ELSE() {
2380 IEM_MC_COMMIT_EFLAGS(fEFlags);
2381 } IEM_MC_ENDIF();
2382
2383 IEM_MC_ADVANCE_RIP_AND_FINISH();
2384 IEM_MC_END();
2385 }
2386 else
2387 {
2388 /*
2389 * Register, memory.
2390 */
2391 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2392 IEM_MC_LOCAL(uint32_t, fEFlags);
2393 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2394 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2395 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2396 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2397 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2399
2400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2401 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2402 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2403 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2404
2405 IEM_MC_PREPARE_AVX_USAGE();
2406 IEM_MC_FETCH_EFLAGS(fEFlags);
2407 IEM_MC_REF_MXCSR(pfMxcsr);
2408 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2409 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2410 pfMxcsr, pEFlags, puSrc1, puSrc2);
2411 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2412 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2413 } IEM_MC_ELSE() {
2414 IEM_MC_COMMIT_EFLAGS(fEFlags);
2415 } IEM_MC_ENDIF();
2416
2417 IEM_MC_ADVANCE_RIP_AND_FINISH();
2418 IEM_MC_END();
2419 }
2420}
2421
2422
2423/* Opcode VEX.F3.0F 0x2e - invalid */
2424/* Opcode VEX.F2.0F 0x2e - invalid */
2425
2426/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
2427FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2428{
2429 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2431 if (IEM_IS_MODRM_REG_MODE(bRm))
2432 {
2433 /*
2434 * Register, register.
2435 */
2436 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2437 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2438 IEM_MC_LOCAL(uint32_t, fEFlags);
2439 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2440 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2441 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2442 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2443 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2444 IEM_MC_PREPARE_AVX_USAGE();
2445 IEM_MC_FETCH_EFLAGS(fEFlags);
2446 IEM_MC_REF_MXCSR(pfMxcsr);
2447 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2448 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2449 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2450 pfMxcsr, pEFlags, puSrc1, puSrc2);
2451 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2452 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2453 } IEM_MC_ELSE() {
2454 IEM_MC_COMMIT_EFLAGS(fEFlags);
2455 } IEM_MC_ENDIF();
2456
2457 IEM_MC_ADVANCE_RIP_AND_FINISH();
2458 IEM_MC_END();
2459 }
2460 else
2461 {
2462 /*
2463 * Register, memory.
2464 */
2465 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2466 IEM_MC_LOCAL(uint32_t, fEFlags);
2467 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2468 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2469 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2470 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2471 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2473
2474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2475 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2476 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2477 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2478
2479 IEM_MC_PREPARE_AVX_USAGE();
2480 IEM_MC_FETCH_EFLAGS(fEFlags);
2481 IEM_MC_REF_MXCSR(pfMxcsr);
2482 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2483 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2484 pfMxcsr, pEFlags, puSrc1, puSrc2);
2485 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2486 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2487 } IEM_MC_ELSE() {
2488 IEM_MC_COMMIT_EFLAGS(fEFlags);
2489 } IEM_MC_ENDIF();
2490
2491 IEM_MC_ADVANCE_RIP_AND_FINISH();
2492 IEM_MC_END();
2493 }
2494}
2495
2496
2497/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
2498FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2499{
2500 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2502 if (IEM_IS_MODRM_REG_MODE(bRm))
2503 {
2504 /*
2505 * Register, register.
2506 */
2507 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2508 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2509 IEM_MC_LOCAL(uint32_t, fEFlags);
2510 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2511 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2512 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2513 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2514 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2515 IEM_MC_PREPARE_AVX_USAGE();
2516 IEM_MC_FETCH_EFLAGS(fEFlags);
2517 IEM_MC_REF_MXCSR(pfMxcsr);
2518 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2519 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2520 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2521 pfMxcsr, pEFlags, puSrc1, puSrc2);
2522 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2523 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2524 } IEM_MC_ELSE() {
2525 IEM_MC_COMMIT_EFLAGS(fEFlags);
2526 } IEM_MC_ENDIF();
2527
2528 IEM_MC_ADVANCE_RIP_AND_FINISH();
2529 IEM_MC_END();
2530 }
2531 else
2532 {
2533 /*
2534 * Register, memory.
2535 */
2536 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2537 IEM_MC_LOCAL(uint32_t, fEFlags);
2538 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2539 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2540 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2541 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2542 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2544
2545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2546 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2547 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2548 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2549
2550 IEM_MC_PREPARE_AVX_USAGE();
2551 IEM_MC_FETCH_EFLAGS(fEFlags);
2552 IEM_MC_REF_MXCSR(pfMxcsr);
2553 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2554 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2555 pfMxcsr, pEFlags, puSrc1, puSrc2);
2556 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2557 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2558 } IEM_MC_ELSE() {
2559 IEM_MC_COMMIT_EFLAGS(fEFlags);
2560 } IEM_MC_ENDIF();
2561
2562 IEM_MC_ADVANCE_RIP_AND_FINISH();
2563 IEM_MC_END();
2564 }
2565}
2566
2567
2568/* Opcode VEX.F3.0F 0x2f - invalid */
2569/* Opcode VEX.F2.0F 0x2f - invalid */
2570
2571/* Opcode VEX.0F 0x30 - invalid */
2572/* Opcode VEX.0F 0x31 - invalid */
2573/* Opcode VEX.0F 0x32 - invalid */
2574/* Opcode VEX.0F 0x33 - invalid */
2575/* Opcode VEX.0F 0x34 - invalid */
2576/* Opcode VEX.0F 0x35 - invalid */
2577/* Opcode VEX.0F 0x36 - invalid */
2578/* Opcode VEX.0F 0x37 - invalid */
2579/* Opcode VEX.0F 0x38 - invalid */
2580/* Opcode VEX.0F 0x39 - invalid */
2581/* Opcode VEX.0F 0x3a - invalid */
2582/* Opcode VEX.0F 0x3b - invalid */
2583/* Opcode VEX.0F 0x3c - invalid */
2584/* Opcode VEX.0F 0x3d - invalid */
2585/* Opcode VEX.0F 0x3e - invalid */
2586/* Opcode VEX.0F 0x3f - invalid */
2587/* Opcode VEX.0F 0x40 - invalid */
2588/* Opcode VEX.0F 0x41 - invalid */
2589/* Opcode VEX.0F 0x42 - invalid */
2590/* Opcode VEX.0F 0x43 - invalid */
2591/* Opcode VEX.0F 0x44 - invalid */
2592/* Opcode VEX.0F 0x45 - invalid */
2593/* Opcode VEX.0F 0x46 - invalid */
2594/* Opcode VEX.0F 0x47 - invalid */
2595/* Opcode VEX.0F 0x48 - invalid */
2596/* Opcode VEX.0F 0x49 - invalid */
2597/* Opcode VEX.0F 0x4a - invalid */
2598/* Opcode VEX.0F 0x4b - invalid */
2599/* Opcode VEX.0F 0x4c - invalid */
2600/* Opcode VEX.0F 0x4d - invalid */
2601/* Opcode VEX.0F 0x4e - invalid */
2602/* Opcode VEX.0F 0x4f - invalid */
2603
2604
2605/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2606FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2607{
2608 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2610 if (IEM_IS_MODRM_REG_MODE(bRm))
2611 {
2612 /*
2613 * Register, register.
2614 */
2615 if (pVCpu->iem.s.uVexLength == 0)
2616 {
2617 IEM_MC_BEGIN(2, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2618 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2619 IEM_MC_LOCAL(uint8_t, u8Dst);
2620 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2621 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2623 IEM_MC_PREPARE_AVX_USAGE();
2624 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2625 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2626 pu8Dst, puSrc);
2627 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2628 IEM_MC_ADVANCE_RIP_AND_FINISH();
2629 IEM_MC_END();
2630 }
2631 else
2632 {
2633 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2634 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2635 IEM_MC_LOCAL(uint8_t, u8Dst);
2636 IEM_MC_LOCAL(RTUINT256U, uSrc);
2637 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2638 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2639
2640 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2641 IEM_MC_PREPARE_AVX_USAGE();
2642 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2643 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2644 pu8Dst, puSrc);
2645 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2646 IEM_MC_ADVANCE_RIP_AND_FINISH();
2647 IEM_MC_END();
2648 }
2649 }
2650 /* No memory operand. */
2651 else
2652 IEMOP_RAISE_INVALID_OPCODE_RET();
2653}
2654
2655
2656/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2657FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2658{
2659 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2661 if (IEM_IS_MODRM_REG_MODE(bRm))
2662 {
2663 /*
2664 * Register, register.
2665 */
2666 if (pVCpu->iem.s.uVexLength == 0)
2667 {
2668 IEM_MC_BEGIN(2, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2669 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2670 IEM_MC_LOCAL(uint8_t, u8Dst);
2671 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2672 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2673 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2674 IEM_MC_PREPARE_AVX_USAGE();
2675 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2676 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2677 pu8Dst, puSrc);
2678 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2679 IEM_MC_ADVANCE_RIP_AND_FINISH();
2680 IEM_MC_END();
2681 }
2682 else
2683 {
2684 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2685 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2686 IEM_MC_LOCAL(uint8_t, u8Dst);
2687 IEM_MC_LOCAL(RTUINT256U, uSrc);
2688 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2689 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2690
2691 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2692 IEM_MC_PREPARE_AVX_USAGE();
2693 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2694 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2695 pu8Dst, puSrc);
2696 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2697 IEM_MC_ADVANCE_RIP_AND_FINISH();
2698 IEM_MC_END();
2699 }
2700 }
2701 /* No memory operand. */
2702 else
2703 IEMOP_RAISE_INVALID_OPCODE_RET();
2704}
2705
2706
2707/* Opcode VEX.F3.0F 0x50 - invalid */
2708/* Opcode VEX.F2.0F 0x50 - invalid */
2709
2710/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2711FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2712/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2713FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2714/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2715FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2716/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2717FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2718
2719/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2720FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2721/* Opcode VEX.66.0F 0x52 - invalid */
2722/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2723FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2724/* Opcode VEX.F2.0F 0x52 - invalid */
2725
2726/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2727FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2728/* Opcode VEX.66.0F 0x53 - invalid */
2729/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2730FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2731/* Opcode VEX.F2.0F 0x53 - invalid */
2732
2733
2734/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2735FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2736{
2737 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2738 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2739 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2740}
2741
2742
2743/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2744FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2745{
2746 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2747 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2748 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2749}
2750
2751
2752/* Opcode VEX.F3.0F 0x54 - invalid */
2753/* Opcode VEX.F2.0F 0x54 - invalid */
2754
2755
2756/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2757FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2758{
2759 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2760 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2761 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2762}
2763
2764
2765/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2766FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2767{
2768 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2769 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2770 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2771}
2772
2773
2774/* Opcode VEX.F3.0F 0x55 - invalid */
2775/* Opcode VEX.F2.0F 0x55 - invalid */
2776
2777/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2778FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2779{
2780 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2781 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2782 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2783}
2784
2785
2786/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2787FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2788{
2789 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2790 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2791 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2792}
2793
2794
2795/* Opcode VEX.F3.0F 0x56 - invalid */
2796/* Opcode VEX.F2.0F 0x56 - invalid */
2797
2798
2799/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2800FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2801{
2802 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2803 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2804 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2805}
2806
2807
2808/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2809FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2810{
2811 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2812 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2813 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2814}
2815
2816
2817/* Opcode VEX.F3.0F 0x57 - invalid */
2818/* Opcode VEX.F2.0F 0x57 - invalid */
2819
2820/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2821FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2822/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2823FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2824/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2825FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2826/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2827FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2828
2829/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2830FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2831/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2832FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2833/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2834FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2835/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2836FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2837
2838/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2839FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2840/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2841FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2842/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2843FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2844/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2845FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2846
2847/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2848FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2849/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2850FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2851/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2852FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2853/* Opcode VEX.F2.0F 0x5b - invalid */
2854
2855/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2856FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2857/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2858FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2859/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2860FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2861/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2862FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2863
2864/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2865FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2866/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2867FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2868/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2869FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2870/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2871FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2872
2873/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2874FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2875/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2876FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2877/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2878FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2879/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2880FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2881
2882/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2883FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2884/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2885FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2886/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2887FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2888/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2889FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2890
2891
2892/* Opcode VEX.0F 0x60 - invalid */
2893
2894
2895/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2896FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2897{
2898 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2899 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2900 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2901}
2902
2903
2904/* Opcode VEX.F3.0F 0x60 - invalid */
2905
2906
2907/* Opcode VEX.0F 0x61 - invalid */
2908
2909
2910/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2911FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2912{
2913 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2914 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2915 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2916}
2917
2918
2919/* Opcode VEX.F3.0F 0x61 - invalid */
2920
2921
2922/* Opcode VEX.0F 0x62 - invalid */
2923
2924/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2925FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2926{
2927 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2928 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2929 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2930}
2931
2932
2933/* Opcode VEX.F3.0F 0x62 - invalid */
2934
2935
2936
2937/* Opcode VEX.0F 0x63 - invalid */
2938
2939
2940/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2941FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2942{
2943 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2944 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2945 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2946}
2947
2948
2949/* Opcode VEX.F3.0F 0x63 - invalid */
2950
2951/* Opcode VEX.0F 0x64 - invalid */
2952
2953
2954/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2955FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2956{
2957 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2958 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2959 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2960}
2961
2962
2963/* Opcode VEX.F3.0F 0x64 - invalid */
2964
2965/* Opcode VEX.0F 0x65 - invalid */
2966
2967
2968/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2969FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2970{
2971 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2972 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
2973 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2974}
2975
2976
2977/* Opcode VEX.F3.0F 0x65 - invalid */
2978
2979/* Opcode VEX.0F 0x66 - invalid */
2980
2981
2982/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
2983FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
2984{
2985 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2986 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
2987 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2988}
2989
2990
2991/* Opcode VEX.F3.0F 0x66 - invalid */
2992
2993/* Opcode VEX.0F 0x67 - invalid */
2994
2995
2996/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
2997FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
2998{
2999 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3000 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3001 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3002}
3003
3004
3005/* Opcode VEX.F3.0F 0x67 - invalid */
3006
3007
3008///**
3009// * Common worker for SSE2 instructions on the form:
3010// * pxxxx xmm1, xmm2/mem128
3011// *
3012// * The 2nd operand is the second half of a register, which in the memory case
3013// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3014// * where it may read the full 128 bits or only the upper 64 bits.
3015// *
3016// * Exceptions type 4.
3017// */
3018//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3019//{
3020// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3021// if (IEM_IS_MODRM_REG_MODE(bRm))
3022// {
3023// /*
3024// * Register, register.
3025// */
3026// IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3027// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3028// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3029// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3030// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3031// IEM_MC_PREPARE_SSE_USAGE();
3032// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3033// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3034// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3035// IEM_MC_ADVANCE_RIP_AND_FINISH();
3036// IEM_MC_END();
3037// }
3038// else
3039// {
3040// /*
3041// * Register, memory.
3042// */
3043// IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3044// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3045// IEM_MC_LOCAL(RTUINT128U, uSrc);
3046// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3047// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3048//
3049// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3050// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3051// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3052// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3053//
3054// IEM_MC_PREPARE_SSE_USAGE();
3055// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3056// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3057//
3058// IEM_MC_ADVANCE_RIP_AND_FINISH();
3059// IEM_MC_END();
3060// }
3061// return VINF_SUCCESS;
3062//}
3063
3064
3065/* Opcode VEX.0F 0x68 - invalid */
3066
3067/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3068FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3069{
3070 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3071 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3072 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3073}
3074
3075
3076/* Opcode VEX.F3.0F 0x68 - invalid */
3077
3078
3079/* Opcode VEX.0F 0x69 - invalid */
3080
3081
3082/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3083FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3084{
3085 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3086 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3087 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3088}
3089
3090
3091/* Opcode VEX.F3.0F 0x69 - invalid */
3092
3093
3094/* Opcode VEX.0F 0x6a - invalid */
3095
3096
3097/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3098FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3099{
3100 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3101 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3102 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3103}
3104
3105
3106/* Opcode VEX.F3.0F 0x6a - invalid */
3107
3108
3109/* Opcode VEX.0F 0x6b - invalid */
3110
3111
3112/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3113FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3114{
3115 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3116 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3117 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3118}
3119
3120
3121/* Opcode VEX.F3.0F 0x6b - invalid */
3122
3123
3124/* Opcode VEX.0F 0x6c - invalid */
3125
3126
3127/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3128FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3129{
3130 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3131 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3132 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3133}
3134
3135
3136/* Opcode VEX.F3.0F 0x6c - invalid */
3137/* Opcode VEX.F2.0F 0x6c - invalid */
3138
3139
3140/* Opcode VEX.0F 0x6d - invalid */
3141
3142
3143/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3144FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3145{
3146 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3147 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3148 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3149}
3150
3151
3152/* Opcode VEX.F3.0F 0x6d - invalid */
3153
3154
3155/* Opcode VEX.0F 0x6e - invalid */
3156
3157FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3158{
3159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3160 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3161 {
3162 /**
3163 * @opcode 0x6e
3164 * @opcodesub rex.w=1
3165 * @oppfx 0x66
3166 * @opcpuid avx
3167 * @opgroup og_avx_simdint_datamov
3168 * @opxcpttype 5
3169 * @optest 64-bit / op1=1 op2=2 -> op1=2
3170 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3171 */
3172 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3173 if (IEM_IS_MODRM_REG_MODE(bRm))
3174 {
3175 /* XMM, greg64 */
3176 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3177 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3178 IEM_MC_LOCAL(uint64_t, u64Tmp);
3179
3180 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3181 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3182
3183 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3184 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3185
3186 IEM_MC_ADVANCE_RIP_AND_FINISH();
3187 IEM_MC_END();
3188 }
3189 else
3190 {
3191 /* XMM, [mem64] */
3192 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3194 IEM_MC_LOCAL(uint64_t, u64Tmp);
3195
3196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3197 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3198 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3199 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3200
3201 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3202 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3203
3204 IEM_MC_ADVANCE_RIP_AND_FINISH();
3205 IEM_MC_END();
3206 }
3207 }
3208 else
3209 {
3210 /**
3211 * @opdone
3212 * @opcode 0x6e
3213 * @opcodesub rex.w=0
3214 * @oppfx 0x66
3215 * @opcpuid avx
3216 * @opgroup og_avx_simdint_datamov
3217 * @opxcpttype 5
3218 * @opfunction iemOp_vmovd_q_Vy_Ey
3219 * @optest op1=1 op2=2 -> op1=2
3220 * @optest op1=0 op2=-42 -> op1=-42
3221 */
3222 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3223 if (IEM_IS_MODRM_REG_MODE(bRm))
3224 {
3225 /* XMM, greg32 */
3226 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3227 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3228 IEM_MC_LOCAL(uint32_t, u32Tmp);
3229
3230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3232
3233 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3234 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3235
3236 IEM_MC_ADVANCE_RIP_AND_FINISH();
3237 IEM_MC_END();
3238 }
3239 else
3240 {
3241 /* XMM, [mem32] */
3242 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3244 IEM_MC_LOCAL(uint32_t, u32Tmp);
3245
3246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3247 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3248 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3249 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3250
3251 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3252 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3253
3254 IEM_MC_ADVANCE_RIP_AND_FINISH();
3255 IEM_MC_END();
3256 }
3257 }
3258}
3259
3260
3261/* Opcode VEX.F3.0F 0x6e - invalid */
3262
3263
3264/* Opcode VEX.0F 0x6f - invalid */
3265
3266/**
3267 * @opcode 0x6f
3268 * @oppfx 0x66
3269 * @opcpuid avx
3270 * @opgroup og_avx_simdint_datamove
3271 * @opxcpttype 1
3272 * @optest op1=1 op2=2 -> op1=2
3273 * @optest op1=0 op2=-42 -> op1=-42
3274 */
3275FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3276{
3277 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3278 Assert(pVCpu->iem.s.uVexLength <= 1);
3279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3280 if (IEM_IS_MODRM_REG_MODE(bRm))
3281 {
3282 /*
3283 * Register, register.
3284 */
3285 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3286 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3287
3288 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3289 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3290 if (pVCpu->iem.s.uVexLength == 0)
3291 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3292 IEM_GET_MODRM_RM(pVCpu, bRm));
3293 else
3294 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3295 IEM_GET_MODRM_RM(pVCpu, bRm));
3296 IEM_MC_ADVANCE_RIP_AND_FINISH();
3297 IEM_MC_END();
3298 }
3299 else if (pVCpu->iem.s.uVexLength == 0)
3300 {
3301 /*
3302 * Register, memory128.
3303 */
3304 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3305 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3307
3308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3309 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3310 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3311 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3312
3313 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3314 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3315
3316 IEM_MC_ADVANCE_RIP_AND_FINISH();
3317 IEM_MC_END();
3318 }
3319 else
3320 {
3321 /*
3322 * Register, memory256.
3323 */
3324 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3325 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3327
3328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3332
3333 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3334 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3335
3336 IEM_MC_ADVANCE_RIP_AND_FINISH();
3337 IEM_MC_END();
3338 }
3339}
3340
3341/**
3342 * @opcode 0x6f
3343 * @oppfx 0xf3
3344 * @opcpuid avx
3345 * @opgroup og_avx_simdint_datamove
3346 * @opxcpttype 4UA
3347 * @optest op1=1 op2=2 -> op1=2
3348 * @optest op1=0 op2=-42 -> op1=-42
3349 */
3350FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3351{
3352 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3353 Assert(pVCpu->iem.s.uVexLength <= 1);
3354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3355 if (IEM_IS_MODRM_REG_MODE(bRm))
3356 {
3357 /*
3358 * Register, register.
3359 */
3360 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3361 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3362
3363 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3364 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3365 if (pVCpu->iem.s.uVexLength == 0)
3366 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3367 IEM_GET_MODRM_RM(pVCpu, bRm));
3368 else
3369 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3370 IEM_GET_MODRM_RM(pVCpu, bRm));
3371 IEM_MC_ADVANCE_RIP_AND_FINISH();
3372 IEM_MC_END();
3373 }
3374 else if (pVCpu->iem.s.uVexLength == 0)
3375 {
3376 /*
3377 * Register, memory128.
3378 */
3379 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3380 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3382
3383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3384 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3385 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3386 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3387
3388 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3389 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3390
3391 IEM_MC_ADVANCE_RIP_AND_FINISH();
3392 IEM_MC_END();
3393 }
3394 else
3395 {
3396 /*
3397 * Register, memory256.
3398 */
3399 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3400 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3402
3403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3404 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3405 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3406 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3407
3408 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3409 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3410
3411 IEM_MC_ADVANCE_RIP_AND_FINISH();
3412 IEM_MC_END();
3413 }
3414}
3415
3416
3417/* Opcode VEX.0F 0x70 - invalid */
3418
3419
3420/**
3421 * Common worker for AVX/AVX2 instructions on the forms:
3422 * - vpxxx xmm0, xmm2/mem128, imm8
3423 * - vpxxx ymm0, ymm2/mem256, imm8
3424 *
3425 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3426 */
3427FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3428{
3429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3430 if (IEM_IS_MODRM_REG_MODE(bRm))
3431 {
3432 /*
3433 * Register, register.
3434 */
3435 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3436 if (pVCpu->iem.s.uVexLength)
3437 {
3438 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3439 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3440 IEM_MC_LOCAL(RTUINT256U, uDst);
3441 IEM_MC_LOCAL(RTUINT256U, uSrc);
3442 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3443 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3444 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3445 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3446 IEM_MC_PREPARE_AVX_USAGE();
3447 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3448 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3449 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3450 IEM_MC_ADVANCE_RIP_AND_FINISH();
3451 IEM_MC_END();
3452 }
3453 else
3454 {
3455 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3456 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3457 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3458 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3459 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3460 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3461 IEM_MC_PREPARE_AVX_USAGE();
3462 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3463 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3464 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3465 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3466 IEM_MC_ADVANCE_RIP_AND_FINISH();
3467 IEM_MC_END();
3468 }
3469 }
3470 else
3471 {
3472 /*
3473 * Register, memory.
3474 */
3475 if (pVCpu->iem.s.uVexLength)
3476 {
3477 IEM_MC_BEGIN(3, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
3478 IEM_MC_LOCAL(RTUINT256U, uDst);
3479 IEM_MC_LOCAL(RTUINT256U, uSrc);
3480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3481 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3482 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3483
3484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3485 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3486 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3487 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3488 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3489 IEM_MC_PREPARE_AVX_USAGE();
3490
3491 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3492 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3493 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3494
3495 IEM_MC_ADVANCE_RIP_AND_FINISH();
3496 IEM_MC_END();
3497 }
3498 else
3499 {
3500 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3501 IEM_MC_LOCAL(RTUINT128U, uSrc);
3502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3503 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3504 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3505
3506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3507 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3508 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3509 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3510 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3511 IEM_MC_PREPARE_AVX_USAGE();
3512
3513 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3514 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3515 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3516 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3517
3518 IEM_MC_ADVANCE_RIP_AND_FINISH();
3519 IEM_MC_END();
3520 }
3521 }
3522}
3523
3524
3525/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3526FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3527{
3528 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3529 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3530 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3531
3532}
3533
3534
3535/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3536FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3537{
3538 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3539 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3540 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3541
3542}
3543
3544
3545/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3546FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3547{
3548 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3549 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3550 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3551}
3552
3553
3554/**
3555 * Common worker(s) for AVX/AVX2 instructions on the forms:
3556 * - vpxxx xmm0, xmm2, imm8
3557 * - vpxxx ymm0, ymm2, imm8
3558 *
3559 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3560 */
3561FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
3562{
3563 if (IEM_IS_MODRM_REG_MODE(bRm))
3564 {
3565 /*
3566 * Register, register.
3567 */
3568 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3569 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3570 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3571 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3572 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3573 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3574 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3575 IEM_MC_PREPARE_AVX_USAGE();
3576 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3577 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3578 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3579 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
3580 IEM_MC_ADVANCE_RIP_AND_FINISH();
3581 IEM_MC_END();
3582 }
3583 /* No memory operand. */
3584 else
3585 IEMOP_RAISE_INVALID_OPCODE_RET();
3586}
3587
3588FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3589{
3590 if (IEM_IS_MODRM_REG_MODE(bRm))
3591 {
3592 /*
3593 * Register, register.
3594 */
3595 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3596 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3597 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3598 IEM_MC_LOCAL(RTUINT256U, uDst);
3599 IEM_MC_LOCAL(RTUINT256U, uSrc);
3600 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3601 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3602 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3603 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3604 IEM_MC_PREPARE_AVX_USAGE();
3605 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3606 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3607 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
3608 IEM_MC_ADVANCE_RIP_AND_FINISH();
3609 IEM_MC_END();
3610 }
3611 /* No memory operand. */
3612 else
3613 IEMOP_RAISE_INVALID_OPCODE_RET();
3614}
3615
3616
3617/* Opcode VEX.0F 0x71 11/2 - invalid. */
3618/** Opcode VEX.66.0F 0x71 11/2. */
3619FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3620
3621/* Opcode VEX.0F 0x71 11/4 - invalid */
3622/** Opcode VEX.66.0F 0x71 11/4. */
3623FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3624
3625/* Opcode VEX.0F 0x71 11/6 - invalid */
3626
3627/** Opcode VEX.66.0F 0x71 11/6. */
3628FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
3629{
3630 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3631 if (pVCpu->iem.s.uVexLength)
3632 {
3633 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3634 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
3635 }
3636 else
3637 {
3638 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3639 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
3640 }
3641}
3642
3643
3644/**
3645 * VEX Group 12 jump table for register variant.
3646 */
3647IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3648{
3649 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3650 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3651 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3652 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3653 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3654 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3655 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3656 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3657};
3658AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3659
3660
3661/** Opcode VEX.0F 0x71. */
3662FNIEMOP_DEF(iemOp_VGrp12)
3663{
3664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3665 if (IEM_IS_MODRM_REG_MODE(bRm))
3666 /* register, register */
3667 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3668 + pVCpu->iem.s.idxPrefix], bRm);
3669 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3670}
3671
3672
3673/* Opcode VEX.0F 0x72 11/2 - invalid. */
3674/** Opcode VEX.66.0F 0x72 11/2. */
3675FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3676
3677/* Opcode VEX.0F 0x72 11/4 - invalid. */
3678/** Opcode VEX.66.0F 0x72 11/4. */
3679FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3680
3681/* Opcode VEX.0F 0x72 11/6 - invalid. */
3682
3683/** Opcode VEX.66.0F 0x72 11/6. */
3684FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
3685{
3686(void)bRm;
3687 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3688 if (pVCpu->iem.s.uVexLength)
3689 {
3690 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3691 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
3692 }
3693 else
3694 {
3695 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3696 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
3697 }
3698}
3699
3700
3701/**
3702 * Group 13 jump table for register variant.
3703 */
3704IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3705{
3706 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3707 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3708 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3709 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3710 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3711 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3712 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3713 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3714};
3715AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3716
3717/** Opcode VEX.0F 0x72. */
3718FNIEMOP_DEF(iemOp_VGrp13)
3719{
3720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3721 if (IEM_IS_MODRM_REG_MODE(bRm))
3722 /* register, register */
3723 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3724 + pVCpu->iem.s.idxPrefix], bRm);
3725 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3726}
3727
3728
3729/* Opcode VEX.0F 0x73 11/2 - invalid. */
3730/** Opcode VEX.66.0F 0x73 11/2. */
3731FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3732
3733/** Opcode VEX.66.0F 0x73 11/3. */
3734FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3735
3736/* Opcode VEX.0F 0x73 11/6 - invalid. */
3737
3738/** Opcode VEX.66.0F 0x73 11/6. */
3739FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
3740{
3741 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3742 if (pVCpu->iem.s.uVexLength)
3743 {
3744 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3745 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
3746 }
3747 else
3748 {
3749 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3750 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
3751 }
3752}
3753
3754/** Opcode VEX.66.0F 0x73 11/7. */
3755FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3756
3757/**
3758 * Group 14 jump table for register variant.
3759 */
3760IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3761{
3762 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3763 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3764 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3765 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3766 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3767 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3768 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3769 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3770};
3771AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3772
3773
3774/** Opcode VEX.0F 0x73. */
3775FNIEMOP_DEF(iemOp_VGrp14)
3776{
3777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3778 if (IEM_IS_MODRM_REG_MODE(bRm))
3779 /* register, register */
3780 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3781 + pVCpu->iem.s.idxPrefix], bRm);
3782 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3783}
3784
3785
3786/* Opcode VEX.0F 0x74 - invalid */
3787
3788
3789/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3790FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3791{
3792 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3793 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3794 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3795}
3796
3797/* Opcode VEX.F3.0F 0x74 - invalid */
3798/* Opcode VEX.F2.0F 0x74 - invalid */
3799
3800
3801/* Opcode VEX.0F 0x75 - invalid */
3802
3803
3804/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3805FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3806{
3807 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3808 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3809 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3810}
3811
3812
3813/* Opcode VEX.F3.0F 0x75 - invalid */
3814/* Opcode VEX.F2.0F 0x75 - invalid */
3815
3816
3817/* Opcode VEX.0F 0x76 - invalid */
3818
3819
3820/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3821FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3822{
3823 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3824 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3825 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3826}
3827
3828
3829/* Opcode VEX.F3.0F 0x76 - invalid */
3830/* Opcode VEX.F2.0F 0x76 - invalid */
3831
3832
3833/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3834FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
3835{
3836 Assert(pVCpu->iem.s.uVexLength <= 1);
3837 if (pVCpu->iem.s.uVexLength == 0)
3838 {
3839 /*
3840 * 128-bit: vzeroupper
3841 */
3842 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
3843 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3844
3845 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3846 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3847 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3848
3849 IEM_MC_CLEAR_YREG_128_UP(0);
3850 IEM_MC_CLEAR_YREG_128_UP(1);
3851 IEM_MC_CLEAR_YREG_128_UP(2);
3852 IEM_MC_CLEAR_YREG_128_UP(3);
3853 IEM_MC_CLEAR_YREG_128_UP(4);
3854 IEM_MC_CLEAR_YREG_128_UP(5);
3855 IEM_MC_CLEAR_YREG_128_UP(6);
3856 IEM_MC_CLEAR_YREG_128_UP(7);
3857
3858 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3859 {
3860 IEM_MC_CLEAR_YREG_128_UP( 8);
3861 IEM_MC_CLEAR_YREG_128_UP( 9);
3862 IEM_MC_CLEAR_YREG_128_UP(10);
3863 IEM_MC_CLEAR_YREG_128_UP(11);
3864 IEM_MC_CLEAR_YREG_128_UP(12);
3865 IEM_MC_CLEAR_YREG_128_UP(13);
3866 IEM_MC_CLEAR_YREG_128_UP(14);
3867 IEM_MC_CLEAR_YREG_128_UP(15);
3868 }
3869
3870 IEM_MC_ADVANCE_RIP_AND_FINISH();
3871 IEM_MC_END();
3872 }
3873 else
3874 {
3875 /*
3876 * 256-bit: vzeroall
3877 */
3878 IEMOP_MNEMONIC(vzeroall, "vzeroall");
3879 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3880
3881 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3882 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3883 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3884
3885 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
3886 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
3887 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
3888 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
3889 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
3890 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
3891 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
3892 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
3893 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
3894
3895 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3896 {
3897 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
3898 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
3899 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
3900 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
3901 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
3902 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
3903 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
3904 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
3905 }
3906
3907 IEM_MC_ADVANCE_RIP_AND_FINISH();
3908 IEM_MC_END();
3909 }
3910}
3911
3912
3913/* Opcode VEX.66.0F 0x77 - invalid */
3914/* Opcode VEX.F3.0F 0x77 - invalid */
3915/* Opcode VEX.F2.0F 0x77 - invalid */
3916
3917/* Opcode VEX.0F 0x78 - invalid */
3918/* Opcode VEX.66.0F 0x78 - invalid */
3919/* Opcode VEX.F3.0F 0x78 - invalid */
3920/* Opcode VEX.F2.0F 0x78 - invalid */
3921
3922/* Opcode VEX.0F 0x79 - invalid */
3923/* Opcode VEX.66.0F 0x79 - invalid */
3924/* Opcode VEX.F3.0F 0x79 - invalid */
3925/* Opcode VEX.F2.0F 0x79 - invalid */
3926
3927/* Opcode VEX.0F 0x7a - invalid */
3928/* Opcode VEX.66.0F 0x7a - invalid */
3929/* Opcode VEX.F3.0F 0x7a - invalid */
3930/* Opcode VEX.F2.0F 0x7a - invalid */
3931
3932/* Opcode VEX.0F 0x7b - invalid */
3933/* Opcode VEX.66.0F 0x7b - invalid */
3934/* Opcode VEX.F3.0F 0x7b - invalid */
3935/* Opcode VEX.F2.0F 0x7b - invalid */
3936
3937/* Opcode VEX.0F 0x7c - invalid */
3938/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
3939FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3940/* Opcode VEX.F3.0F 0x7c - invalid */
3941/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
3942FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3943
3944/* Opcode VEX.0F 0x7d - invalid */
3945/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
3946FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3947/* Opcode VEX.F3.0F 0x7d - invalid */
3948/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
3949FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3950
3951
3952/* Opcode VEX.0F 0x7e - invalid */
3953
3954FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3955{
3956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3957 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3958 {
3959 /**
3960 * @opcode 0x7e
3961 * @opcodesub rex.w=1
3962 * @oppfx 0x66
3963 * @opcpuid avx
3964 * @opgroup og_avx_simdint_datamov
3965 * @opxcpttype 5
3966 * @optest 64-bit / op1=1 op2=2 -> op1=2
3967 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3968 */
3969 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3970 if (IEM_IS_MODRM_REG_MODE(bRm))
3971 {
3972 /* greg64, XMM */
3973 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3974 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3975 IEM_MC_LOCAL(uint64_t, u64Tmp);
3976
3977 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3978 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3979
3980 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3981 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
3982
3983 IEM_MC_ADVANCE_RIP_AND_FINISH();
3984 IEM_MC_END();
3985 }
3986 else
3987 {
3988 /* [mem64], XMM */
3989 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
3990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3991 IEM_MC_LOCAL(uint64_t, u64Tmp);
3992
3993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3994 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3995 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3996 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3997
3998 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3999 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4000
4001 IEM_MC_ADVANCE_RIP_AND_FINISH();
4002 IEM_MC_END();
4003 }
4004 }
4005 else
4006 {
4007 /**
4008 * @opdone
4009 * @opcode 0x7e
4010 * @opcodesub rex.w=0
4011 * @oppfx 0x66
4012 * @opcpuid avx
4013 * @opgroup og_avx_simdint_datamov
4014 * @opxcpttype 5
4015 * @opfunction iemOp_vmovd_q_Vy_Ey
4016 * @optest op1=1 op2=2 -> op1=2
4017 * @optest op1=0 op2=-42 -> op1=-42
4018 */
4019 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4020 if (IEM_IS_MODRM_REG_MODE(bRm))
4021 {
4022 /* greg32, XMM */
4023 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4024 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4025 IEM_MC_LOCAL(uint32_t, u32Tmp);
4026
4027 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4028 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4029
4030 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4031 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4032
4033 IEM_MC_ADVANCE_RIP_AND_FINISH();
4034 IEM_MC_END();
4035 }
4036 else
4037 {
4038 /* [mem32], XMM */
4039 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4041 IEM_MC_LOCAL(uint32_t, u32Tmp);
4042
4043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4044 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4045 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4046 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4047
4048 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4049 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4050
4051 IEM_MC_ADVANCE_RIP_AND_FINISH();
4052 IEM_MC_END();
4053 }
4054 }
4055}
4056
4057
4058/**
4059 * @opcode 0x7e
4060 * @oppfx 0xf3
4061 * @opcpuid avx
4062 * @opgroup og_avx_pcksclr_datamove
4063 * @opxcpttype none
4064 * @optest op1=1 op2=2 -> op1=2
4065 * @optest op1=0 op2=-42 -> op1=-42
4066 */
4067FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4068{
4069 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4071 if (IEM_IS_MODRM_REG_MODE(bRm))
4072 {
4073 /*
4074 * Register, register.
4075 */
4076 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4077 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4078
4079 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4080 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4081
4082 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4083 IEM_GET_MODRM_RM(pVCpu, bRm));
4084 IEM_MC_ADVANCE_RIP_AND_FINISH();
4085 IEM_MC_END();
4086 }
4087 else
4088 {
4089 /*
4090 * Memory, register.
4091 */
4092 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4093 IEM_MC_LOCAL(uint64_t, uSrc);
4094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4095
4096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4097 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4100
4101 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4102 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4103
4104 IEM_MC_ADVANCE_RIP_AND_FINISH();
4105 IEM_MC_END();
4106 }
4107
4108}
4109/* Opcode VEX.F2.0F 0x7e - invalid */
4110
4111
4112/* Opcode VEX.0F 0x7f - invalid */
4113
4114/**
4115 * @opcode 0x7f
4116 * @oppfx 0x66
4117 * @opcpuid avx
4118 * @opgroup og_avx_simdint_datamove
4119 * @opxcpttype 1
4120 * @optest op1=1 op2=2 -> op1=2
4121 * @optest op1=0 op2=-42 -> op1=-42
4122 */
4123FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4124{
4125 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4126 Assert(pVCpu->iem.s.uVexLength <= 1);
4127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4128 if (IEM_IS_MODRM_REG_MODE(bRm))
4129 {
4130 /*
4131 * Register, register.
4132 */
4133 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4134 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4135
4136 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4137 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4138 if (pVCpu->iem.s.uVexLength == 0)
4139 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4140 IEM_GET_MODRM_REG(pVCpu, bRm));
4141 else
4142 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4143 IEM_GET_MODRM_REG(pVCpu, bRm));
4144 IEM_MC_ADVANCE_RIP_AND_FINISH();
4145 IEM_MC_END();
4146 }
4147 else if (pVCpu->iem.s.uVexLength == 0)
4148 {
4149 /*
4150 * Register, memory128.
4151 */
4152 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4153 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4155
4156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4157 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4158 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4159 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4160
4161 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4162 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4163
4164 IEM_MC_ADVANCE_RIP_AND_FINISH();
4165 IEM_MC_END();
4166 }
4167 else
4168 {
4169 /*
4170 * Register, memory256.
4171 */
4172 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4173 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4175
4176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4177 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4178 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4179 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4180
4181 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4182 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4183
4184 IEM_MC_ADVANCE_RIP_AND_FINISH();
4185 IEM_MC_END();
4186 }
4187}
4188
4189
4190/**
4191 * @opcode 0x7f
4192 * @oppfx 0xf3
4193 * @opcpuid avx
4194 * @opgroup og_avx_simdint_datamove
4195 * @opxcpttype 4UA
4196 * @optest op1=1 op2=2 -> op1=2
4197 * @optest op1=0 op2=-42 -> op1=-42
4198 */
4199FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4200{
4201 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4202 Assert(pVCpu->iem.s.uVexLength <= 1);
4203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4204 if (IEM_IS_MODRM_REG_MODE(bRm))
4205 {
4206 /*
4207 * Register, register.
4208 */
4209 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4210 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4211
4212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4213 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4214 if (pVCpu->iem.s.uVexLength == 0)
4215 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4216 IEM_GET_MODRM_REG(pVCpu, bRm));
4217 else
4218 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4219 IEM_GET_MODRM_REG(pVCpu, bRm));
4220 IEM_MC_ADVANCE_RIP_AND_FINISH();
4221 IEM_MC_END();
4222 }
4223 else if (pVCpu->iem.s.uVexLength == 0)
4224 {
4225 /*
4226 * Register, memory128.
4227 */
4228 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4229 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4231
4232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4233 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4234 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4235 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4236
4237 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4238 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4239
4240 IEM_MC_ADVANCE_RIP_AND_FINISH();
4241 IEM_MC_END();
4242 }
4243 else
4244 {
4245 /*
4246 * Register, memory256.
4247 */
4248 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4249 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4251
4252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4253 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4256
4257 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4258 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4259
4260 IEM_MC_ADVANCE_RIP_AND_FINISH();
4261 IEM_MC_END();
4262 }
4263}
4264
4265/* Opcode VEX.F2.0F 0x7f - invalid */
4266
4267
4268/* Opcode VEX.0F 0x80 - invalid */
4269/* Opcode VEX.0F 0x81 - invalid */
4270/* Opcode VEX.0F 0x82 - invalid */
4271/* Opcode VEX.0F 0x83 - invalid */
4272/* Opcode VEX.0F 0x84 - invalid */
4273/* Opcode VEX.0F 0x85 - invalid */
4274/* Opcode VEX.0F 0x86 - invalid */
4275/* Opcode VEX.0F 0x87 - invalid */
4276/* Opcode VEX.0F 0x88 - invalid */
4277/* Opcode VEX.0F 0x89 - invalid */
4278/* Opcode VEX.0F 0x8a - invalid */
4279/* Opcode VEX.0F 0x8b - invalid */
4280/* Opcode VEX.0F 0x8c - invalid */
4281/* Opcode VEX.0F 0x8d - invalid */
4282/* Opcode VEX.0F 0x8e - invalid */
4283/* Opcode VEX.0F 0x8f - invalid */
4284/* Opcode VEX.0F 0x90 - invalid */
4285/* Opcode VEX.0F 0x91 - invalid */
4286/* Opcode VEX.0F 0x92 - invalid */
4287/* Opcode VEX.0F 0x93 - invalid */
4288/* Opcode VEX.0F 0x94 - invalid */
4289/* Opcode VEX.0F 0x95 - invalid */
4290/* Opcode VEX.0F 0x96 - invalid */
4291/* Opcode VEX.0F 0x97 - invalid */
4292/* Opcode VEX.0F 0x98 - invalid */
4293/* Opcode VEX.0F 0x99 - invalid */
4294/* Opcode VEX.0F 0x9a - invalid */
4295/* Opcode VEX.0F 0x9b - invalid */
4296/* Opcode VEX.0F 0x9c - invalid */
4297/* Opcode VEX.0F 0x9d - invalid */
4298/* Opcode VEX.0F 0x9e - invalid */
4299/* Opcode VEX.0F 0x9f - invalid */
4300/* Opcode VEX.0F 0xa0 - invalid */
4301/* Opcode VEX.0F 0xa1 - invalid */
4302/* Opcode VEX.0F 0xa2 - invalid */
4303/* Opcode VEX.0F 0xa3 - invalid */
4304/* Opcode VEX.0F 0xa4 - invalid */
4305/* Opcode VEX.0F 0xa5 - invalid */
4306/* Opcode VEX.0F 0xa6 - invalid */
4307/* Opcode VEX.0F 0xa7 - invalid */
4308/* Opcode VEX.0F 0xa8 - invalid */
4309/* Opcode VEX.0F 0xa9 - invalid */
4310/* Opcode VEX.0F 0xaa - invalid */
4311/* Opcode VEX.0F 0xab - invalid */
4312/* Opcode VEX.0F 0xac - invalid */
4313/* Opcode VEX.0F 0xad - invalid */
4314
4315
4316/* Opcode VEX.0F 0xae mem/0 - invalid. */
4317/* Opcode VEX.0F 0xae mem/1 - invalid. */
4318
4319/**
4320 * @ opmaps grp15
4321 * @ opcode !11/2
4322 * @ oppfx none
4323 * @ opcpuid sse
4324 * @ opgroup og_sse_mxcsrsm
4325 * @ opxcpttype 5
4326 * @ optest op1=0 -> mxcsr=0
4327 * @ optest op1=0x2083 -> mxcsr=0x2083
4328 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4329 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4330 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4331 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4332 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4333 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4334 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4335 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4336 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4337 */
4338FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4339//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4340//{
4341// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4342// IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4343// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4344// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4345// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4346// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4347// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4348// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4349// IEM_MC_END();
4350// return VINF_SUCCESS;
4351//}
4352
4353
4354/**
4355 * @opmaps vexgrp15
4356 * @opcode !11/3
4357 * @oppfx none
4358 * @opcpuid avx
4359 * @opgroup og_avx_mxcsrsm
4360 * @opxcpttype 5
4361 * @optest mxcsr=0 -> op1=0
4362 * @optest mxcsr=0x2083 -> op1=0x2083
4363 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4364 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4365 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4366 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4367 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4368 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4369 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4370 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4371 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4372 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4373 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4374 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4375 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4376 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4377 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4378 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4379 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4380 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4381 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4382 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4383 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4384 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4385 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4386 * -> value.xcpt=0x6
4387 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4388 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4389 * APMv4 rev 3.17 page 509.
4390 * @todo Test this instruction on AMD Ryzen.
4391 */
4392FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4393{
4394 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4395 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4396 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4398 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4399 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4400 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4401 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4402 IEM_MC_END();
4403}
4404
4405/* Opcode VEX.0F 0xae mem/4 - invalid. */
4406/* Opcode VEX.0F 0xae mem/5 - invalid. */
4407/* Opcode VEX.0F 0xae mem/6 - invalid. */
4408/* Opcode VEX.0F 0xae mem/7 - invalid. */
4409
4410/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4411/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4412/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4413/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4414/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4415/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4416/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4417/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4418
4419/**
4420 * Vex group 15 jump table for memory variant.
4421 */
4422IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4423{ /* pfx: none, 066h, 0f3h, 0f2h */
4424 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4425 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4426 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4427 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4428 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4429 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4430 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4431 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4432};
4433AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4434
4435
4436/** Opcode vex. 0xae. */
4437FNIEMOP_DEF(iemOp_VGrp15)
4438{
4439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4440 if (IEM_IS_MODRM_REG_MODE(bRm))
4441 /* register, register */
4442 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4443
4444 /* memory, register */
4445 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4446 + pVCpu->iem.s.idxPrefix], bRm);
4447}
4448
4449
4450/* Opcode VEX.0F 0xaf - invalid. */
4451
4452/* Opcode VEX.0F 0xb0 - invalid. */
4453/* Opcode VEX.0F 0xb1 - invalid. */
4454/* Opcode VEX.0F 0xb2 - invalid. */
4455/* Opcode VEX.0F 0xb2 - invalid. */
4456/* Opcode VEX.0F 0xb3 - invalid. */
4457/* Opcode VEX.0F 0xb4 - invalid. */
4458/* Opcode VEX.0F 0xb5 - invalid. */
4459/* Opcode VEX.0F 0xb6 - invalid. */
4460/* Opcode VEX.0F 0xb7 - invalid. */
4461/* Opcode VEX.0F 0xb8 - invalid. */
4462/* Opcode VEX.0F 0xb9 - invalid. */
4463/* Opcode VEX.0F 0xba - invalid. */
4464/* Opcode VEX.0F 0xbb - invalid. */
4465/* Opcode VEX.0F 0xbc - invalid. */
4466/* Opcode VEX.0F 0xbd - invalid. */
4467/* Opcode VEX.0F 0xbe - invalid. */
4468/* Opcode VEX.0F 0xbf - invalid. */
4469
4470/* Opcode VEX.0F 0xc0 - invalid. */
4471/* Opcode VEX.66.0F 0xc0 - invalid. */
4472/* Opcode VEX.F3.0F 0xc0 - invalid. */
4473/* Opcode VEX.F2.0F 0xc0 - invalid. */
4474
4475/* Opcode VEX.0F 0xc1 - invalid. */
4476/* Opcode VEX.66.0F 0xc1 - invalid. */
4477/* Opcode VEX.F3.0F 0xc1 - invalid. */
4478/* Opcode VEX.F2.0F 0xc1 - invalid. */
4479
4480/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4481FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4482/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4483FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4484/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4485FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4486/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4487FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4488
4489/* Opcode VEX.0F 0xc3 - invalid */
4490/* Opcode VEX.66.0F 0xc3 - invalid */
4491/* Opcode VEX.F3.0F 0xc3 - invalid */
4492/* Opcode VEX.F2.0F 0xc3 - invalid */
4493
4494/* Opcode VEX.0F 0xc4 - invalid */
4495
4496
4497/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4498FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4499{
4500 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4502 if (IEM_IS_MODRM_REG_MODE(bRm))
4503 {
4504 /*
4505 * Register, register.
4506 */
4507 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4508 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4509 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4510 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4511 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4512 IEM_MC_ARG(uint16_t, u16Src, 2);
4513 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4514 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4515 IEM_MC_PREPARE_AVX_USAGE();
4516 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4517 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4518 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4519 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4520 puDst, puSrc, u16Src, bImmArg);
4521 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4522 IEM_MC_ADVANCE_RIP_AND_FINISH();
4523 IEM_MC_END();
4524 }
4525 else
4526 {
4527 /*
4528 * Register, memory.
4529 */
4530 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4532 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4533 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4534 IEM_MC_ARG(uint16_t, u16Src, 2);
4535
4536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4537 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4538 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4539 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4540 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4541 IEM_MC_PREPARE_AVX_USAGE();
4542
4543 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4544 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4545 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4546 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4547 puDst, puSrc, u16Src, bImmArg);
4548 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4549
4550 IEM_MC_ADVANCE_RIP_AND_FINISH();
4551 IEM_MC_END();
4552 }
4553}
4554
4555
4556/* Opcode VEX.F3.0F 0xc4 - invalid */
4557/* Opcode VEX.F2.0F 0xc4 - invalid */
4558
4559/* Opcode VEX.0F 0xc5 - invlid */
4560
4561
4562/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4563FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4564{
4565 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4567 if (IEM_IS_MODRM_REG_MODE(bRm))
4568 {
4569 /*
4570 * Register, register.
4571 */
4572 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4573 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4574 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4575 IEM_MC_LOCAL(uint16_t, u16Dst);
4576 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
4577 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4578 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4579 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4580 IEM_MC_PREPARE_AVX_USAGE();
4581 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4582 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
4583 pu16Dst, puSrc, bImmArg);
4584 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
4585 IEM_MC_ADVANCE_RIP_AND_FINISH();
4586 IEM_MC_END();
4587 }
4588 /* No memory operand. */
4589 else
4590 IEMOP_RAISE_INVALID_OPCODE_RET();
4591}
4592
4593
4594/* Opcode VEX.F3.0F 0xc5 - invalid */
4595/* Opcode VEX.F2.0F 0xc5 - invalid */
4596
4597
4598#define VSHUFP_X(a_Instr) \
4599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4600 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4601 { \
4602 /* \
4603 * Register, register. \
4604 */ \
4605 if (pVCpu->iem.s.uVexLength) \
4606 { \
4607 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4608 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4609 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4610 IEM_MC_LOCAL(RTUINT256U, uDst); \
4611 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4612 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4613 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4614 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4615 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4616 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4617 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4618 IEM_MC_PREPARE_AVX_USAGE(); \
4619 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4620 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4621 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4622 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4623 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4624 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4625 IEM_MC_END(); \
4626 } \
4627 else \
4628 { \
4629 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4630 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4631 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4632 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4633 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4634 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4635 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4636 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4637 IEM_MC_PREPARE_AVX_USAGE(); \
4638 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4639 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4640 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4641 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4642 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4643 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4644 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4645 IEM_MC_END(); \
4646 } \
4647 } \
4648 else \
4649 { \
4650 /* \
4651 * Register, memory. \
4652 */ \
4653 if (pVCpu->iem.s.uVexLength) \
4654 { \
4655 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4656 IEM_MC_LOCAL(RTUINT256U, uDst); \
4657 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4658 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4660 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4661 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4662 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4664 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4665 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4666 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4667 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4668 IEM_MC_PREPARE_AVX_USAGE(); \
4669 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4670 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4671 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4672 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4673 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4674 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4675 IEM_MC_END(); \
4676 } \
4677 else \
4678 { \
4679 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4680 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4682 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4683 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4684 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4686 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4687 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4688 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4689 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4690 IEM_MC_PREPARE_AVX_USAGE(); \
4691 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4692 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4693 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4694 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4695 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4696 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4697 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4698 IEM_MC_END(); \
4699 } \
4700 } \
4701 (void)0
4702
4703/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4704FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4705{
4706 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4707 VSHUFP_X(vshufps);
4708}
4709
4710
4711/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4712FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4713{
4714 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4715 VSHUFP_X(vshufpd);
4716}
4717#undef VSHUFP_X
4718
4719
4720/* Opcode VEX.F3.0F 0xc6 - invalid */
4721/* Opcode VEX.F2.0F 0xc6 - invalid */
4722
4723/* Opcode VEX.0F 0xc7 - invalid */
4724/* Opcode VEX.66.0F 0xc7 - invalid */
4725/* Opcode VEX.F3.0F 0xc7 - invalid */
4726/* Opcode VEX.F2.0F 0xc7 - invalid */
4727
4728/* Opcode VEX.0F 0xc8 - invalid */
4729/* Opcode VEX.0F 0xc9 - invalid */
4730/* Opcode VEX.0F 0xca - invalid */
4731/* Opcode VEX.0F 0xcb - invalid */
4732/* Opcode VEX.0F 0xcc - invalid */
4733/* Opcode VEX.0F 0xcd - invalid */
4734/* Opcode VEX.0F 0xce - invalid */
4735/* Opcode VEX.0F 0xcf - invalid */
4736
4737
4738/* Opcode VEX.0F 0xd0 - invalid */
4739/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4740FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4741/* Opcode VEX.F3.0F 0xd0 - invalid */
4742/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4743FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4744
4745/* Opcode VEX.0F 0xd1 - invalid */
4746/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4747FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
4748/* Opcode VEX.F3.0F 0xd1 - invalid */
4749/* Opcode VEX.F2.0F 0xd1 - invalid */
4750
4751/* Opcode VEX.0F 0xd2 - invalid */
4752/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4753FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
4754/* Opcode VEX.F3.0F 0xd2 - invalid */
4755/* Opcode VEX.F2.0F 0xd2 - invalid */
4756
4757/* Opcode VEX.0F 0xd3 - invalid */
4758/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4759FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
4760/* Opcode VEX.F3.0F 0xd3 - invalid */
4761/* Opcode VEX.F2.0F 0xd3 - invalid */
4762
4763/* Opcode VEX.0F 0xd4 - invalid */
4764
4765
4766/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4767FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4768{
4769 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4770 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4771 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4772}
4773
4774
4775/* Opcode VEX.F3.0F 0xd4 - invalid */
4776/* Opcode VEX.F2.0F 0xd4 - invalid */
4777
4778/* Opcode VEX.0F 0xd5 - invalid */
4779
4780
4781/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4782FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4783{
4784 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4785 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4786 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4787}
4788
4789
4790/* Opcode VEX.F3.0F 0xd5 - invalid */
4791/* Opcode VEX.F2.0F 0xd5 - invalid */
4792
4793/* Opcode VEX.0F 0xd6 - invalid */
4794
4795/**
4796 * @opcode 0xd6
4797 * @oppfx 0x66
4798 * @opcpuid avx
4799 * @opgroup og_avx_pcksclr_datamove
4800 * @opxcpttype none
4801 * @optest op1=-1 op2=2 -> op1=2
4802 * @optest op1=0 op2=-42 -> op1=-42
4803 */
4804FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4805{
4806 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4808 if (IEM_IS_MODRM_REG_MODE(bRm))
4809 {
4810 /*
4811 * Register, register.
4812 */
4813 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4814 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4815
4816 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4817 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4818
4819 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4820 IEM_GET_MODRM_REG(pVCpu, bRm));
4821 IEM_MC_ADVANCE_RIP_AND_FINISH();
4822 IEM_MC_END();
4823 }
4824 else
4825 {
4826 /*
4827 * Memory, register.
4828 */
4829 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4830 IEM_MC_LOCAL(uint64_t, uSrc);
4831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4832
4833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4834 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4835 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4836 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4837
4838 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4839 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4840
4841 IEM_MC_ADVANCE_RIP_AND_FINISH();
4842 IEM_MC_END();
4843 }
4844}
4845
4846/* Opcode VEX.F3.0F 0xd6 - invalid */
4847/* Opcode VEX.F2.0F 0xd6 - invalid */
4848
4849
4850/* Opcode VEX.0F 0xd7 - invalid */
4851
4852/** Opcode VEX.66.0F 0xd7 - */
4853FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4854{
4855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4856 /* Docs says register only. */
4857 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4858 {
4859 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4860 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
4861 if (pVCpu->iem.s.uVexLength)
4862 {
4863 IEM_MC_BEGIN(2, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4864 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4865 IEM_MC_ARG(uint64_t *, puDst, 0);
4866 IEM_MC_LOCAL(RTUINT256U, uSrc);
4867 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4868 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4869 IEM_MC_PREPARE_AVX_USAGE();
4870 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4871 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4872 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4873 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4874 IEM_MC_ADVANCE_RIP_AND_FINISH();
4875 IEM_MC_END();
4876 }
4877 else
4878 {
4879 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4880 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4881 IEM_MC_ARG(uint64_t *, puDst, 0);
4882 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4883 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4884 IEM_MC_PREPARE_AVX_USAGE();
4885 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4886 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4887 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4888 IEM_MC_ADVANCE_RIP_AND_FINISH();
4889 IEM_MC_END();
4890 }
4891 }
4892 else
4893 IEMOP_RAISE_INVALID_OPCODE_RET();
4894}
4895
4896
4897/* Opcode VEX.F3.0F 0xd7 - invalid */
4898/* Opcode VEX.F2.0F 0xd7 - invalid */
4899
4900
4901/* Opcode VEX.0F 0xd8 - invalid */
4902
4903/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
4904FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
4905{
4906 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4907 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
4908 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4909}
4910
4911
4912/* Opcode VEX.F3.0F 0xd8 - invalid */
4913/* Opcode VEX.F2.0F 0xd8 - invalid */
4914
4915/* Opcode VEX.0F 0xd9 - invalid */
4916
4917
4918/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
4919FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
4920{
4921 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4922 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
4923 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4924}
4925
4926
4927/* Opcode VEX.F3.0F 0xd9 - invalid */
4928/* Opcode VEX.F2.0F 0xd9 - invalid */
4929
4930/* Opcode VEX.0F 0xda - invalid */
4931
4932
4933/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
4934FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
4935{
4936 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4937 IEMOPMEDIAF3_INIT_VARS(vpminub);
4938 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4939}
4940
4941
4942/* Opcode VEX.F3.0F 0xda - invalid */
4943/* Opcode VEX.F2.0F 0xda - invalid */
4944
4945/* Opcode VEX.0F 0xdb - invalid */
4946
4947
4948/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
4949FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
4950{
4951 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4952 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4953 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
4954}
4955
4956
4957/* Opcode VEX.F3.0F 0xdb - invalid */
4958/* Opcode VEX.F2.0F 0xdb - invalid */
4959
4960/* Opcode VEX.0F 0xdc - invalid */
4961
4962
4963/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
4964FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
4965{
4966 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4967 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
4968 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4969}
4970
4971
4972/* Opcode VEX.F3.0F 0xdc - invalid */
4973/* Opcode VEX.F2.0F 0xdc - invalid */
4974
4975/* Opcode VEX.0F 0xdd - invalid */
4976
4977
4978/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
4979FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
4980{
4981 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4982 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
4983 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4984}
4985
4986
4987/* Opcode VEX.F3.0F 0xdd - invalid */
4988/* Opcode VEX.F2.0F 0xdd - invalid */
4989
4990/* Opcode VEX.0F 0xde - invalid */
4991
4992
4993/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
4994FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
4995{
4996 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4997 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
4998 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4999}
5000
5001
5002/* Opcode VEX.F3.0F 0xde - invalid */
5003/* Opcode VEX.F2.0F 0xde - invalid */
5004
5005/* Opcode VEX.0F 0xdf - invalid */
5006
5007
5008/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5009FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5010{
5011 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5012 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5013 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5014}
5015
5016
5017/* Opcode VEX.F3.0F 0xdf - invalid */
5018/* Opcode VEX.F2.0F 0xdf - invalid */
5019
5020/* Opcode VEX.0F 0xe0 - invalid */
5021
5022
5023/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5024FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5025{
5026 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5027 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5028 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5029}
5030
5031
5032/* Opcode VEX.F3.0F 0xe0 - invalid */
5033/* Opcode VEX.F2.0F 0xe0 - invalid */
5034
5035/* Opcode VEX.0F 0xe1 - invalid */
5036/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5037FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
5038/* Opcode VEX.F3.0F 0xe1 - invalid */
5039/* Opcode VEX.F2.0F 0xe1 - invalid */
5040
5041/* Opcode VEX.0F 0xe2 - invalid */
5042/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5043FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
5044/* Opcode VEX.F3.0F 0xe2 - invalid */
5045/* Opcode VEX.F2.0F 0xe2 - invalid */
5046
5047/* Opcode VEX.0F 0xe3 - invalid */
5048
5049
5050/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5051FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5052{
5053 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5054 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5055 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5056}
5057
5058
5059/* Opcode VEX.F3.0F 0xe3 - invalid */
5060/* Opcode VEX.F2.0F 0xe3 - invalid */
5061
5062/* Opcode VEX.0F 0xe4 - invalid */
5063
5064
5065/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5066FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5067{
5068 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5069 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5070 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5071}
5072
5073
5074/* Opcode VEX.F3.0F 0xe4 - invalid */
5075/* Opcode VEX.F2.0F 0xe4 - invalid */
5076
5077/* Opcode VEX.0F 0xe5 - invalid */
5078
5079
5080/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5081FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5082{
5083 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5084 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5085 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5086}
5087
5088
5089/* Opcode VEX.F3.0F 0xe5 - invalid */
5090/* Opcode VEX.F2.0F 0xe5 - invalid */
5091
5092/* Opcode VEX.0F 0xe6 - invalid */
5093/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5094FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5095/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5096FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5097/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5098FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5099
5100
5101/* Opcode VEX.0F 0xe7 - invalid */
5102
5103/**
5104 * @opcode 0xe7
5105 * @opcodesub !11 mr/reg
5106 * @oppfx 0x66
5107 * @opcpuid avx
5108 * @opgroup og_avx_cachect
5109 * @opxcpttype 1
5110 * @optest op1=-1 op2=2 -> op1=2
5111 * @optest op1=0 op2=-42 -> op1=-42
5112 */
5113FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5114{
5115 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5116 Assert(pVCpu->iem.s.uVexLength <= 1);
5117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5118 if (IEM_IS_MODRM_MEM_MODE(bRm))
5119 {
5120 if (pVCpu->iem.s.uVexLength == 0)
5121 {
5122 /*
5123 * 128-bit: Memory, register.
5124 */
5125 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5126 IEM_MC_LOCAL(RTUINT128U, uSrc);
5127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5128
5129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5130 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5131 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5132 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5133
5134 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5135 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5136
5137 IEM_MC_ADVANCE_RIP_AND_FINISH();
5138 IEM_MC_END();
5139 }
5140 else
5141 {
5142 /*
5143 * 256-bit: Memory, register.
5144 */
5145 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5146 IEM_MC_LOCAL(RTUINT256U, uSrc);
5147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5148
5149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5150 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5151 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5152 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5153
5154 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5155 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5156
5157 IEM_MC_ADVANCE_RIP_AND_FINISH();
5158 IEM_MC_END();
5159 }
5160 }
5161 /**
5162 * @opdone
5163 * @opmnemonic udvex660fe7reg
5164 * @opcode 0xe7
5165 * @opcodesub 11 mr/reg
5166 * @oppfx 0x66
5167 * @opunused immediate
5168 * @opcpuid avx
5169 * @optest ->
5170 */
5171 else
5172 IEMOP_RAISE_INVALID_OPCODE_RET();
5173}
5174
5175/* Opcode VEX.F3.0F 0xe7 - invalid */
5176/* Opcode VEX.F2.0F 0xe7 - invalid */
5177
5178
5179/* Opcode VEX.0F 0xe8 - invalid */
5180
5181
5182/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5183FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5184{
5185 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5186 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5187 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5188}
5189
5190
5191/* Opcode VEX.F3.0F 0xe8 - invalid */
5192/* Opcode VEX.F2.0F 0xe8 - invalid */
5193
5194/* Opcode VEX.0F 0xe9 - invalid */
5195
5196
5197/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5198FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5199{
5200 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5201 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5202 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5203}
5204
5205
5206/* Opcode VEX.F3.0F 0xe9 - invalid */
5207/* Opcode VEX.F2.0F 0xe9 - invalid */
5208
5209/* Opcode VEX.0F 0xea - invalid */
5210
5211
5212/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5213FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5214{
5215 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5216 IEMOPMEDIAF3_INIT_VARS(vpminsw);
5217 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5218}
5219
5220
5221/* Opcode VEX.F3.0F 0xea - invalid */
5222/* Opcode VEX.F2.0F 0xea - invalid */
5223
5224/* Opcode VEX.0F 0xeb - invalid */
5225
5226
5227/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5228FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5229{
5230 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5231 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5232 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5233}
5234
5235
5236
5237/* Opcode VEX.F3.0F 0xeb - invalid */
5238/* Opcode VEX.F2.0F 0xeb - invalid */
5239
5240/* Opcode VEX.0F 0xec - invalid */
5241
5242
5243/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5244FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5245{
5246 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5247 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5248 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5249}
5250
5251
5252/* Opcode VEX.F3.0F 0xec - invalid */
5253/* Opcode VEX.F2.0F 0xec - invalid */
5254
5255/* Opcode VEX.0F 0xed - invalid */
5256
5257
5258/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5259FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5260{
5261 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5262 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5263 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5264}
5265
5266
5267/* Opcode VEX.F3.0F 0xed - invalid */
5268/* Opcode VEX.F2.0F 0xed - invalid */
5269
5270/* Opcode VEX.0F 0xee - invalid */
5271
5272
5273/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5274FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5275{
5276 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5277 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
5278 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5279}
5280
5281
5282/* Opcode VEX.F3.0F 0xee - invalid */
5283/* Opcode VEX.F2.0F 0xee - invalid */
5284
5285
5286/* Opcode VEX.0F 0xef - invalid */
5287
5288
5289/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5290FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5291{
5292 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5293 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5294 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5295}
5296
5297
5298/* Opcode VEX.F3.0F 0xef - invalid */
5299/* Opcode VEX.F2.0F 0xef - invalid */
5300
5301/* Opcode VEX.0F 0xf0 - invalid */
5302/* Opcode VEX.66.0F 0xf0 - invalid */
5303
5304
5305/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5306FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5307{
5308 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5309 Assert(pVCpu->iem.s.uVexLength <= 1);
5310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5311 if (IEM_IS_MODRM_REG_MODE(bRm))
5312 {
5313 /*
5314 * Register, register - (not implemented, assuming it raises \#UD).
5315 */
5316 IEMOP_RAISE_INVALID_OPCODE_RET();
5317 }
5318 else if (pVCpu->iem.s.uVexLength == 0)
5319 {
5320 /*
5321 * Register, memory128.
5322 */
5323 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5324 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5326
5327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5328 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5330 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5331
5332 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5333 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5334
5335 IEM_MC_ADVANCE_RIP_AND_FINISH();
5336 IEM_MC_END();
5337 }
5338 else
5339 {
5340 /*
5341 * Register, memory256.
5342 */
5343 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5344 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5346
5347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5348 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5349 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5350 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5351
5352 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5353 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5354
5355 IEM_MC_ADVANCE_RIP_AND_FINISH();
5356 IEM_MC_END();
5357 }
5358}
5359
5360
5361/* Opcode VEX.0F 0xf1 - invalid */
5362/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5363FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
5364{
5365 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5366 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
5367 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5368}
5369
5370/* Opcode VEX.F2.0F 0xf1 - invalid */
5371
5372/* Opcode VEX.0F 0xf2 - invalid */
5373/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5374FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
5375{
5376 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5377 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
5378 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5379}
5380/* Opcode VEX.F2.0F 0xf2 - invalid */
5381
5382/* Opcode VEX.0F 0xf3 - invalid */
5383/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5384FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
5385{
5386 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5387 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
5388 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5389}
5390/* Opcode VEX.F2.0F 0xf3 - invalid */
5391
5392/* Opcode VEX.0F 0xf4 - invalid */
5393
5394
5395/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5396FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5397{
5398 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5399 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5400 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5401}
5402
5403
5404/* Opcode VEX.F2.0F 0xf4 - invalid */
5405
5406/* Opcode VEX.0F 0xf5 - invalid */
5407/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5408FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
5409/* Opcode VEX.F2.0F 0xf5 - invalid */
5410
5411/* Opcode VEX.0F 0xf6 - invalid */
5412
5413
5414/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5415FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5416{
5417 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5418 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5419 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5420}
5421
5422
5423/* Opcode VEX.F2.0F 0xf6 - invalid */
5424
5425/* Opcode VEX.0F 0xf7 - invalid */
5426/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5427FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5428/* Opcode VEX.F2.0F 0xf7 - invalid */
5429
5430/* Opcode VEX.0F 0xf8 - invalid */
5431
5432
5433/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5434FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5435{
5436 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5437 IEMOPMEDIAF3_INIT_VARS( vpsubb);
5438 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5439}
5440
5441
5442/* Opcode VEX.F2.0F 0xf8 - invalid */
5443
5444/* Opcode VEX.0F 0xf9 - invalid */
5445
5446
5447/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5448FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5449{
5450 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5451 IEMOPMEDIAF3_INIT_VARS( vpsubw);
5452 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5453}
5454
5455
5456/* Opcode VEX.F2.0F 0xf9 - invalid */
5457
5458/* Opcode VEX.0F 0xfa - invalid */
5459
5460
5461/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5462FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5463{
5464 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5465 IEMOPMEDIAF3_INIT_VARS( vpsubd);
5466 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5467}
5468
5469
5470/* Opcode VEX.F2.0F 0xfa - invalid */
5471
5472/* Opcode VEX.0F 0xfb - invalid */
5473
5474
5475/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5476FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5477{
5478 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5479 IEMOPMEDIAF3_INIT_VARS( vpsubq);
5480 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5481}
5482
5483
5484/* Opcode VEX.F2.0F 0xfb - invalid */
5485
5486/* Opcode VEX.0F 0xfc - invalid */
5487
5488
5489/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5490FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5491{
5492 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5493 IEMOPMEDIAF3_INIT_VARS( vpaddb);
5494 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5495}
5496
5497
5498/* Opcode VEX.F2.0F 0xfc - invalid */
5499
5500/* Opcode VEX.0F 0xfd - invalid */
5501
5502
5503/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5504FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5505{
5506 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5507 IEMOPMEDIAF3_INIT_VARS( vpaddw);
5508 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5509}
5510
5511
5512/* Opcode VEX.F2.0F 0xfd - invalid */
5513
5514/* Opcode VEX.0F 0xfe - invalid */
5515
5516
5517/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5518FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5519{
5520 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5521 IEMOPMEDIAF3_INIT_VARS( vpaddd);
5522 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5523}
5524
5525
5526/* Opcode VEX.F2.0F 0xfe - invalid */
5527
5528
5529/** Opcode **** 0x0f 0xff - UD0 */
5530FNIEMOP_DEF(iemOp_vud0)
5531{
5532/** @todo testcase: vud0 */
5533 IEMOP_MNEMONIC(vud0, "vud0");
5534 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5535 {
5536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5537 if (IEM_IS_MODRM_MEM_MODE(bRm))
5538 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
5539 }
5540 IEMOP_HLP_DONE_DECODING();
5541 IEMOP_RAISE_INVALID_OPCODE_RET();
5542}
5543
5544
5545
5546/**
5547 * VEX opcode map \#1.
5548 *
5549 * @sa g_apfnTwoByteMap
5550 */
5551const PFNIEMOP g_apfnVexMap1[] =
5552{
5553 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5554 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5555 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5556 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5557 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5558 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5559 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5560 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5561 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5562 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5563 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5564 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5565 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5566 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5567 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5568 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5569 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5570
5571 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5572 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5573 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5574 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5575 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5576 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5577 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5578 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5579 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5580 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5581 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5582 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5583 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5584 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5585 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5586 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5587
5588 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5589 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5590 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5591 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5592 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5593 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5594 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5595 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5596 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5597 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5598 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5599 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5600 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5601 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5602 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5603 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5604
5605 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5606 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5607 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5608 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5609 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5610 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5611 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5612 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5613 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5614 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5615 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5616 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5617 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5618 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5619 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5620 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5621
5622 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5623 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5624 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5625 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5626 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5627 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5628 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5629 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5630 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5631 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5632 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5633 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5634 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5635 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5636 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5637 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5638
5639 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5640 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5641 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5642 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5643 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5644 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5645 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5646 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5647 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5648 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5649 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5650 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5651 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5652 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5653 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5654 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5655
5656 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5657 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5658 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5659 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5660 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5661 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5662 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5663 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5664 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5665 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5666 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5667 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5668 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5669 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5670 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5671 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5672
5673 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5674 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5675 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5676 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5677 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5678 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5679 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5680 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5681 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5682 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5683 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5684 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5685 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5686 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5687 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5688 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5689
5690 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5691 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5692 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5693 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5694 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5695 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5696 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5697 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5698 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5699 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5700 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5701 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5702 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5703 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5704 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5705 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5706
5707 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5708 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5709 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5710 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5711 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5712 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5713 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5714 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5715 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5716 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5717 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5718 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5719 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5720 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5721 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5722 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5723
5724 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5725 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5726 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5727 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5728 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5729 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5730 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5731 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5732 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5733 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5734 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5735 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5736 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5737 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5738 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5739 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5740
5741 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5742 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5743 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5744 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5745 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5746 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5747 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5748 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5749 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5750 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5751 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5752 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5753 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5754 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5755 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5756 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5757
5758 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5759 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5760 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5761 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5762 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5763 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5764 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5765 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5766 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5767 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5768 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5769 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5770 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5771 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5772 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5773 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5774
5775 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5776 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5777 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5778 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5779 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5780 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5781 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5782 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5783 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5784 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5785 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5786 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5787 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5788 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5789 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5790 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5791
5792 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5793 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5794 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5795 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5796 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5797 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5798 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5799 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5800 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5801 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5802 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5803 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5804 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5805 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5806 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5807 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5808
5809 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5810 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5811 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5812 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5813 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5814 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5815 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5816 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5817 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5818 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5819 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5820 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5821 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5822 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5823 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5824 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5825};
5826AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5827/** @} */
5828
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette