VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 29687

最後變更 在這個檔案從29687是 29687,由 vboxsync 提交於 15 年 前

Make sure the right descriptor attributes (0xf3) are loaded in V86 mode (VT-x is picky)

  • 屬性 svn:eol-style 設為 native
檔案大小: 276.6 KB
 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int pvi; /* CR4.PVI */
148 int record_call; /* record calls for CSAM or not? */
149#endif
150 int cpl;
151 int iopl;
152 int tf; /* TF cpu flag */
153 int singlestep_enabled; /* "hardware" single step enabled */
154 int jmp_opt; /* use direct block chaining for direct jumps */
155 int mem_index; /* select memory access functions */
156 uint64_t flags; /* all execution flags */
157 struct TranslationBlock *tb;
158 int popl_esp_hack; /* for correct popl with esp base handling */
159 int rip_offset; /* only used in x86_64, but left for simplicity */
160 int cpuid_features;
161 int cpuid_ext_features;
162 int cpuid_ext2_features;
163 int cpuid_ext3_features;
164} DisasContext;
165
166static void gen_eob(DisasContext *s);
167static void gen_jmp(DisasContext *s, target_ulong eip);
168static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
169
170#ifdef VBOX
171static void gen_check_external_event();
172#endif
173
174/* i386 arith/logic operations */
175enum {
176 OP_ADDL,
177 OP_ORL,
178 OP_ADCL,
179 OP_SBBL,
180 OP_ANDL,
181 OP_SUBL,
182 OP_XORL,
183 OP_CMPL,
184};
185
186/* i386 shift ops */
187enum {
188 OP_ROL,
189 OP_ROR,
190 OP_RCL,
191 OP_RCR,
192 OP_SHL,
193 OP_SHR,
194 OP_SHL1, /* undocumented */
195 OP_SAR = 7,
196};
197
198enum {
199 JCC_O,
200 JCC_B,
201 JCC_Z,
202 JCC_BE,
203 JCC_S,
204 JCC_P,
205 JCC_L,
206 JCC_LE,
207};
208
209/* operand size */
210enum {
211 OT_BYTE = 0,
212 OT_WORD,
213 OT_LONG,
214 OT_QUAD,
215};
216
217enum {
218 /* I386 int registers */
219 OR_EAX, /* MUST be even numbered */
220 OR_ECX,
221 OR_EDX,
222 OR_EBX,
223 OR_ESP,
224 OR_EBP,
225 OR_ESI,
226 OR_EDI,
227
228 OR_TMP0 = 16, /* temporary operand register */
229 OR_TMP1,
230 OR_A0, /* temporary register used when doing address evaluation */
231};
232
233#ifndef VBOX
234static inline void gen_op_movl_T0_0(void)
235#else /* VBOX */
236DECLINLINE(void) gen_op_movl_T0_0(void)
237#endif /* VBOX */
238{
239 tcg_gen_movi_tl(cpu_T[0], 0);
240}
241
242#ifndef VBOX
243static inline void gen_op_movl_T0_im(int32_t val)
244#else /* VBOX */
245DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
246#endif /* VBOX */
247{
248 tcg_gen_movi_tl(cpu_T[0], val);
249}
250
251#ifndef VBOX
252static inline void gen_op_movl_T0_imu(uint32_t val)
253#else /* VBOX */
254DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
255#endif /* VBOX */
256{
257 tcg_gen_movi_tl(cpu_T[0], val);
258}
259
260#ifndef VBOX
261static inline void gen_op_movl_T1_im(int32_t val)
262#else /* VBOX */
263DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
264#endif /* VBOX */
265{
266 tcg_gen_movi_tl(cpu_T[1], val);
267}
268
269#ifndef VBOX
270static inline void gen_op_movl_T1_imu(uint32_t val)
271#else /* VBOX */
272DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
273#endif /* VBOX */
274{
275 tcg_gen_movi_tl(cpu_T[1], val);
276}
277
278#ifndef VBOX
279static inline void gen_op_movl_A0_im(uint32_t val)
280#else /* VBOX */
281DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
282#endif /* VBOX */
283{
284 tcg_gen_movi_tl(cpu_A0, val);
285}
286
287#ifdef TARGET_X86_64
288#ifndef VBOX
289static inline void gen_op_movq_A0_im(int64_t val)
290#else /* VBOX */
291DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
292#endif /* VBOX */
293{
294 tcg_gen_movi_tl(cpu_A0, val);
295}
296#endif
297
298#ifndef VBOX
299static inline void gen_movtl_T0_im(target_ulong val)
300#else /* VBOX */
301DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
302#endif /* VBOX */
303{
304 tcg_gen_movi_tl(cpu_T[0], val);
305}
306
307#ifndef VBOX
308static inline void gen_movtl_T1_im(target_ulong val)
309#else /* VBOX */
310DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
311#endif /* VBOX */
312{
313 tcg_gen_movi_tl(cpu_T[1], val);
314}
315
316#ifndef VBOX
317static inline void gen_op_andl_T0_ffff(void)
318#else /* VBOX */
319DECLINLINE(void) gen_op_andl_T0_ffff(void)
320#endif /* VBOX */
321{
322 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
323}
324
325#ifndef VBOX
326static inline void gen_op_andl_T0_im(uint32_t val)
327#else /* VBOX */
328DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
329#endif /* VBOX */
330{
331 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
332}
333
334#ifndef VBOX
335static inline void gen_op_movl_T0_T1(void)
336#else /* VBOX */
337DECLINLINE(void) gen_op_movl_T0_T1(void)
338#endif /* VBOX */
339{
340 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
341}
342
343#ifndef VBOX
344static inline void gen_op_andl_A0_ffff(void)
345#else /* VBOX */
346DECLINLINE(void) gen_op_andl_A0_ffff(void)
347#endif /* VBOX */
348{
349 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
350}
351
352#ifdef TARGET_X86_64
353
354#define NB_OP_SIZES 4
355
356#else /* !TARGET_X86_64 */
357
358#define NB_OP_SIZES 3
359
360#endif /* !TARGET_X86_64 */
361
362#if defined(WORDS_BIGENDIAN)
363#define REG_B_OFFSET (sizeof(target_ulong) - 1)
364#define REG_H_OFFSET (sizeof(target_ulong) - 2)
365#define REG_W_OFFSET (sizeof(target_ulong) - 2)
366#define REG_L_OFFSET (sizeof(target_ulong) - 4)
367#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
368#else
369#define REG_B_OFFSET 0
370#define REG_H_OFFSET 1
371#define REG_W_OFFSET 0
372#define REG_L_OFFSET 0
373#define REG_LH_OFFSET 4
374#endif
375
376#ifndef VBOX
377static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
378#else /* VBOX */
379DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
380#endif /* VBOX */
381{
382 switch(ot) {
383 case OT_BYTE:
384 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
385 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
386 } else {
387 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
388 }
389 break;
390 case OT_WORD:
391 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
392 break;
393#ifdef TARGET_X86_64
394 case OT_LONG:
395 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
396 /* high part of register set to zero */
397 tcg_gen_movi_tl(cpu_tmp0, 0);
398 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
399 break;
400 default:
401 case OT_QUAD:
402 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
403 break;
404#else
405 default:
406 case OT_LONG:
407 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
408 break;
409#endif
410 }
411}
412
413#ifndef VBOX
414static inline void gen_op_mov_reg_T0(int ot, int reg)
415#else /* VBOX */
416DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
417#endif /* VBOX */
418{
419 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
420}
421
422#ifndef VBOX
423static inline void gen_op_mov_reg_T1(int ot, int reg)
424#else /* VBOX */
425DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
426#endif /* VBOX */
427{
428 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
429}
430
431#ifndef VBOX
432static inline void gen_op_mov_reg_A0(int size, int reg)
433#else /* VBOX */
434DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
435#endif /* VBOX */
436{
437 switch(size) {
438 case 0:
439 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
440 break;
441#ifdef TARGET_X86_64
442 case 1:
443 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
444 /* high part of register set to zero */
445 tcg_gen_movi_tl(cpu_tmp0, 0);
446 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
447 break;
448 default:
449 case 2:
450 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
451 break;
452#else
453 default:
454 case 1:
455 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
456 break;
457#endif
458 }
459}
460
461#ifndef VBOX
462static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
463#else /* VBOX */
464DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
465#endif /* VBOX */
466{
467 switch(ot) {
468 case OT_BYTE:
469 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
470#ifndef VBOX
471 goto std_case;
472#else
473 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
474#endif
475 } else {
476 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
477 }
478 break;
479 default:
480#ifndef VBOX
481 std_case:
482#endif
483 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
484 break;
485 }
486}
487
488#ifndef VBOX
489static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
490#else /* VBOX */
491DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
492#endif /* VBOX */
493{
494 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
495}
496
497#ifndef VBOX
498static inline void gen_op_movl_A0_reg(int reg)
499#else /* VBOX */
500DECLINLINE(void) gen_op_movl_A0_reg(int reg)
501#endif /* VBOX */
502{
503 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
504}
505
506#ifndef VBOX
507static inline void gen_op_addl_A0_im(int32_t val)
508#else /* VBOX */
509DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
510#endif /* VBOX */
511{
512 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
513#ifdef TARGET_X86_64
514 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
515#endif
516}
517
518#ifdef TARGET_X86_64
519#ifndef VBOX
520static inline void gen_op_addq_A0_im(int64_t val)
521#else /* VBOX */
522DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
523#endif /* VBOX */
524{
525 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
526}
527#endif
528
529static void gen_add_A0_im(DisasContext *s, int val)
530{
531#ifdef TARGET_X86_64
532 if (CODE64(s))
533 gen_op_addq_A0_im(val);
534 else
535#endif
536 gen_op_addl_A0_im(val);
537}
538
539#ifndef VBOX
540static inline void gen_op_addl_T0_T1(void)
541#else /* VBOX */
542DECLINLINE(void) gen_op_addl_T0_T1(void)
543#endif /* VBOX */
544{
545 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
546}
547
548#ifndef VBOX
549static inline void gen_op_jmp_T0(void)
550#else /* VBOX */
551DECLINLINE(void) gen_op_jmp_T0(void)
552#endif /* VBOX */
553{
554 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
555}
556
557#ifndef VBOX
558static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
559#else /* VBOX */
560DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
561#endif /* VBOX */
562{
563 switch(size) {
564 case 0:
565 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
566 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
567 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
568 break;
569 case 1:
570 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
571 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
572#ifdef TARGET_X86_64
573 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
574#endif
575 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
576 break;
577#ifdef TARGET_X86_64
578 case 2:
579 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
580 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
581 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
582 break;
583#endif
584 }
585}
586
587#ifndef VBOX
588static inline void gen_op_add_reg_T0(int size, int reg)
589#else /* VBOX */
590DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
591#endif /* VBOX */
592{
593 switch(size) {
594 case 0:
595 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
596 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
597 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
598 break;
599 case 1:
600 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
601 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
602#ifdef TARGET_X86_64
603 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
604#endif
605 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
606 break;
607#ifdef TARGET_X86_64
608 case 2:
609 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
610 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
611 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
612 break;
613#endif
614 }
615}
616
617#ifndef VBOX
618static inline void gen_op_set_cc_op(int32_t val)
619#else /* VBOX */
620DECLINLINE(void) gen_op_set_cc_op(int32_t val)
621#endif /* VBOX */
622{
623 tcg_gen_movi_i32(cpu_cc_op, val);
624}
625
626#ifndef VBOX
627static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
628#else /* VBOX */
629DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
630#endif /* VBOX */
631{
632 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
633 if (shift != 0)
634 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
635 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
636#ifdef TARGET_X86_64
637 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
638#endif
639}
640#ifdef VBOX
641DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
642{
643 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
644#ifdef FORCE_SEGMENT_SYNC
645#if 1
646 TCGv t0;
647
648 /* Considering poor quality of TCG optimizer - better call directly */
649 t0 = tcg_temp_local_new(TCG_TYPE_TL);
650 tcg_gen_movi_tl(t0, reg);
651 tcg_gen_helper_0_1(helper_sync_seg, t0);
652 tcg_temp_free(t0);
653#else
654 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
655 int skip_label;
656 TCGv t0, a0;
657
658 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
659 for data/stack segments, as expects alive cpu_T[0] */
660 if (reg != R_GS)
661 return;
662
663 if (keepA0)
664 {
665 /* we need to store old cpu_A0 */
666 a0 = tcg_temp_local_new(TCG_TYPE_TL);
667 tcg_gen_mov_tl(a0, cpu_A0);
668 }
669
670 skip_label = gen_new_label();
671 t0 = tcg_temp_local_new(TCG_TYPE_TL);
672
673 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
674 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
675 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
676 tcg_gen_andi_tl(t0, t0, VM_MASK);
677 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
678 tcg_gen_movi_tl(t0, reg);
679
680 tcg_gen_helper_0_1(helper_sync_seg, t0);
681
682 tcg_temp_free(t0);
683
684 gen_set_label(skip_label);
685 if (keepA0)
686 {
687 tcg_gen_mov_tl(cpu_A0, a0);
688 tcg_temp_free(a0);
689 }
690#endif /* 0 */
691#endif /* FORCE_SEGMENT_SYNC */
692}
693#endif
694
695#ifndef VBOX
696static inline void gen_op_movl_A0_seg(int reg)
697#else /* VBOX */
698DECLINLINE(void) gen_op_movl_A0_seg(int reg)
699#endif /* VBOX */
700{
701#ifdef VBOX
702 gen_op_seg_check(reg, false);
703#endif
704 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
705}
706
707#ifndef VBOX
708static inline void gen_op_addl_A0_seg(int reg)
709#else /* VBOX */
710DECLINLINE(void) gen_op_addl_A0_seg(int reg)
711#endif /* VBOX */
712{
713#ifdef VBOX
714 gen_op_seg_check(reg, true);
715#endif
716 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
717 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
718#ifdef TARGET_X86_64
719 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
720#endif
721}
722
723#ifdef TARGET_X86_64
724#ifndef VBOX
725static inline void gen_op_movq_A0_seg(int reg)
726#else /* VBOX */
727DECLINLINE(void) gen_op_movq_A0_seg(int reg)
728#endif /* VBOX */
729{
730#ifdef VBOX
731 gen_op_seg_check(reg, false);
732#endif
733 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
734}
735
736#ifndef VBOX
737static inline void gen_op_addq_A0_seg(int reg)
738#else /* VBOX */
739DECLINLINE(void) gen_op_addq_A0_seg(int reg)
740#endif /* VBOX */
741{
742#ifdef VBOX
743 gen_op_seg_check(reg, true);
744#endif
745 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
746 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
747}
748
749#ifndef VBOX
750static inline void gen_op_movq_A0_reg(int reg)
751#else /* VBOX */
752DECLINLINE(void) gen_op_movq_A0_reg(int reg)
753#endif /* VBOX */
754{
755 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
756}
757
758#ifndef VBOX
759static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
760#else /* VBOX */
761DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
762#endif /* VBOX */
763{
764 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
765 if (shift != 0)
766 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
767 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
768}
769#endif
770
771#ifndef VBOX
772static inline void gen_op_lds_T0_A0(int idx)
773#else /* VBOX */
774DECLINLINE(void) gen_op_lds_T0_A0(int idx)
775#endif /* VBOX */
776{
777 int mem_index = (idx >> 2) - 1;
778 switch(idx & 3) {
779 case 0:
780 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
781 break;
782 case 1:
783 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
784 break;
785 default:
786 case 2:
787 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
788 break;
789 }
790}
791
792#ifndef VBOX
793static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
794#else /* VBOX */
795DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
796#endif /* VBOX */
797{
798 int mem_index = (idx >> 2) - 1;
799 switch(idx & 3) {
800 case 0:
801 tcg_gen_qemu_ld8u(t0, a0, mem_index);
802 break;
803 case 1:
804 tcg_gen_qemu_ld16u(t0, a0, mem_index);
805 break;
806 case 2:
807 tcg_gen_qemu_ld32u(t0, a0, mem_index);
808 break;
809 default:
810 case 3:
811 tcg_gen_qemu_ld64(t0, a0, mem_index);
812 break;
813 }
814}
815
816/* XXX: always use ldu or lds */
817#ifndef VBOX
818static inline void gen_op_ld_T0_A0(int idx)
819#else /* VBOX */
820DECLINLINE(void) gen_op_ld_T0_A0(int idx)
821#endif /* VBOX */
822{
823 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
824}
825
826#ifndef VBOX
827static inline void gen_op_ldu_T0_A0(int idx)
828#else /* VBOX */
829DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
830#endif /* VBOX */
831{
832 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
833}
834
835#ifndef VBOX
836static inline void gen_op_ld_T1_A0(int idx)
837#else /* VBOX */
838DECLINLINE(void) gen_op_ld_T1_A0(int idx)
839#endif /* VBOX */
840{
841 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
842}
843
844#ifndef VBOX
845static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
846#else /* VBOX */
847DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
848#endif /* VBOX */
849{
850 int mem_index = (idx >> 2) - 1;
851 switch(idx & 3) {
852 case 0:
853 tcg_gen_qemu_st8(t0, a0, mem_index);
854 break;
855 case 1:
856 tcg_gen_qemu_st16(t0, a0, mem_index);
857 break;
858 case 2:
859 tcg_gen_qemu_st32(t0, a0, mem_index);
860 break;
861 default:
862 case 3:
863 tcg_gen_qemu_st64(t0, a0, mem_index);
864 break;
865 }
866}
867
868#ifndef VBOX
869static inline void gen_op_st_T0_A0(int idx)
870#else /* VBOX */
871DECLINLINE(void) gen_op_st_T0_A0(int idx)
872#endif /* VBOX */
873{
874 gen_op_st_v(idx, cpu_T[0], cpu_A0);
875}
876
877#ifndef VBOX
878static inline void gen_op_st_T1_A0(int idx)
879#else /* VBOX */
880DECLINLINE(void) gen_op_st_T1_A0(int idx)
881#endif /* VBOX */
882{
883 gen_op_st_v(idx, cpu_T[1], cpu_A0);
884}
885
886#ifdef VBOX
887static void gen_check_external_event()
888{
889#if 1
890 /** @todo: once TCG codegen improves, we may want to use version
891 from else version */
892 tcg_gen_helper_0_0(helper_check_external_event);
893#else
894 int skip_label;
895 TCGv t0;
896
897 skip_label = gen_new_label();
898 t0 = tcg_temp_local_new(TCG_TYPE_TL);
899 /* t0 = cpu_tmp0; */
900
901 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
902 /* Keep in sync with helper_check_external_event() */
903 tcg_gen_andi_tl(t0, t0,
904 CPU_INTERRUPT_EXTERNAL_EXIT
905 | CPU_INTERRUPT_EXTERNAL_TIMER
906 | CPU_INTERRUPT_EXTERNAL_DMA
907 | CPU_INTERRUPT_EXTERNAL_HARD);
908 /** @todo: predict branch as taken */
909 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
910 tcg_temp_free(t0);
911
912 tcg_gen_helper_0_0(helper_check_external_event);
913
914 gen_set_label(skip_label);
915#endif
916}
917
918#if 0 /* unused code? */
919static void gen_check_external_event2()
920{
921 tcg_gen_helper_0_0(helper_check_external_event);
922}
923#endif
924
925#endif
926
927#ifndef VBOX
928static inline void gen_jmp_im(target_ulong pc)
929#else /* VBOX */
930DECLINLINE(void) gen_jmp_im(target_ulong pc)
931#endif /* VBOX */
932{
933 tcg_gen_movi_tl(cpu_tmp0, pc);
934 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
935}
936
937#ifdef VBOX
938DECLINLINE(void) gen_update_eip(target_ulong pc)
939{
940 gen_jmp_im(pc);
941#ifdef VBOX_DUMP_STATE
942 tcg_gen_helper_0_0(helper_dump_state);
943#endif
944}
945
946#endif
947
948#ifndef VBOX
949static inline void gen_string_movl_A0_ESI(DisasContext *s)
950#else /* VBOX */
951DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
952#endif /* VBOX */
953{
954 int override;
955
956 override = s->override;
957#ifdef TARGET_X86_64
958 if (s->aflag == 2) {
959 if (override >= 0) {
960 gen_op_movq_A0_seg(override);
961 gen_op_addq_A0_reg_sN(0, R_ESI);
962 } else {
963 gen_op_movq_A0_reg(R_ESI);
964 }
965 } else
966#endif
967 if (s->aflag) {
968 /* 32 bit address */
969 if (s->addseg && override < 0)
970 override = R_DS;
971 if (override >= 0) {
972 gen_op_movl_A0_seg(override);
973 gen_op_addl_A0_reg_sN(0, R_ESI);
974 } else {
975 gen_op_movl_A0_reg(R_ESI);
976 }
977 } else {
978 /* 16 address, always override */
979 if (override < 0)
980 override = R_DS;
981 gen_op_movl_A0_reg(R_ESI);
982 gen_op_andl_A0_ffff();
983 gen_op_addl_A0_seg(override);
984 }
985}
986
987#ifndef VBOX
988static inline void gen_string_movl_A0_EDI(DisasContext *s)
989#else /* VBOX */
990DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
991#endif /* VBOX */
992{
993#ifdef TARGET_X86_64
994 if (s->aflag == 2) {
995 gen_op_movq_A0_reg(R_EDI);
996 } else
997#endif
998 if (s->aflag) {
999 if (s->addseg) {
1000 gen_op_movl_A0_seg(R_ES);
1001 gen_op_addl_A0_reg_sN(0, R_EDI);
1002 } else {
1003 gen_op_movl_A0_reg(R_EDI);
1004 }
1005 } else {
1006 gen_op_movl_A0_reg(R_EDI);
1007 gen_op_andl_A0_ffff();
1008 gen_op_addl_A0_seg(R_ES);
1009 }
1010}
1011
1012#ifndef VBOX
1013static inline void gen_op_movl_T0_Dshift(int ot)
1014#else /* VBOX */
1015DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
1016#endif /* VBOX */
1017{
1018 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
1019 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
1020};
1021
1022static void gen_extu(int ot, TCGv reg)
1023{
1024 switch(ot) {
1025 case OT_BYTE:
1026 tcg_gen_ext8u_tl(reg, reg);
1027 break;
1028 case OT_WORD:
1029 tcg_gen_ext16u_tl(reg, reg);
1030 break;
1031 case OT_LONG:
1032 tcg_gen_ext32u_tl(reg, reg);
1033 break;
1034 default:
1035 break;
1036 }
1037}
1038
1039static void gen_exts(int ot, TCGv reg)
1040{
1041 switch(ot) {
1042 case OT_BYTE:
1043 tcg_gen_ext8s_tl(reg, reg);
1044 break;
1045 case OT_WORD:
1046 tcg_gen_ext16s_tl(reg, reg);
1047 break;
1048 case OT_LONG:
1049 tcg_gen_ext32s_tl(reg, reg);
1050 break;
1051 default:
1052 break;
1053 }
1054}
1055
1056#ifndef VBOX
1057static inline void gen_op_jnz_ecx(int size, int label1)
1058#else /* VBOX */
1059DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1060#endif /* VBOX */
1061{
1062 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1063 gen_extu(size + 1, cpu_tmp0);
1064 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1065}
1066
1067#ifndef VBOX
1068static inline void gen_op_jz_ecx(int size, int label1)
1069#else /* VBOX */
1070DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1071#endif /* VBOX */
1072{
1073 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1074 gen_extu(size + 1, cpu_tmp0);
1075 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1076}
1077
1078static void *helper_in_func[3] = {
1079 helper_inb,
1080 helper_inw,
1081 helper_inl,
1082};
1083
1084static void *helper_out_func[3] = {
1085 helper_outb,
1086 helper_outw,
1087 helper_outl,
1088};
1089
1090static void *gen_check_io_func[3] = {
1091 helper_check_iob,
1092 helper_check_iow,
1093 helper_check_iol,
1094};
1095
1096static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1097 uint32_t svm_flags)
1098{
1099 int state_saved;
1100 target_ulong next_eip;
1101
1102 state_saved = 0;
1103 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1104 if (s->cc_op != CC_OP_DYNAMIC)
1105 gen_op_set_cc_op(s->cc_op);
1106 gen_jmp_im(cur_eip);
1107 state_saved = 1;
1108 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1109 tcg_gen_helper_0_1(gen_check_io_func[ot],
1110 cpu_tmp2_i32);
1111 }
1112 if(s->flags & HF_SVMI_MASK) {
1113 if (!state_saved) {
1114 if (s->cc_op != CC_OP_DYNAMIC)
1115 gen_op_set_cc_op(s->cc_op);
1116 gen_jmp_im(cur_eip);
1117 state_saved = 1;
1118 }
1119 svm_flags |= (1 << (4 + ot));
1120 next_eip = s->pc - s->cs_base;
1121 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1122 tcg_gen_helper_0_3(helper_svm_check_io,
1123 cpu_tmp2_i32,
1124 tcg_const_i32(svm_flags),
1125 tcg_const_i32(next_eip - cur_eip));
1126 }
1127}
1128
1129#ifndef VBOX
1130static inline void gen_movs(DisasContext *s, int ot)
1131#else /* VBOX */
1132DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1133#endif /* VBOX */
1134{
1135 gen_string_movl_A0_ESI(s);
1136 gen_op_ld_T0_A0(ot + s->mem_index);
1137 gen_string_movl_A0_EDI(s);
1138 gen_op_st_T0_A0(ot + s->mem_index);
1139 gen_op_movl_T0_Dshift(ot);
1140 gen_op_add_reg_T0(s->aflag, R_ESI);
1141 gen_op_add_reg_T0(s->aflag, R_EDI);
1142}
1143
1144#ifndef VBOX
1145static inline void gen_update_cc_op(DisasContext *s)
1146#else /* VBOX */
1147DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1148#endif /* VBOX */
1149{
1150 if (s->cc_op != CC_OP_DYNAMIC) {
1151 gen_op_set_cc_op(s->cc_op);
1152 s->cc_op = CC_OP_DYNAMIC;
1153 }
1154}
1155
1156static void gen_op_update1_cc(void)
1157{
1158 tcg_gen_discard_tl(cpu_cc_src);
1159 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1160}
1161
1162static void gen_op_update2_cc(void)
1163{
1164 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1165 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1166}
1167
1168#ifndef VBOX
1169static inline void gen_op_cmpl_T0_T1_cc(void)
1170#else /* VBOX */
1171DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1172#endif /* VBOX */
1173{
1174 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1175 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1176}
1177
1178#ifndef VBOX
1179static inline void gen_op_testl_T0_T1_cc(void)
1180#else /* VBOX */
1181DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1182#endif /* VBOX */
1183{
1184 tcg_gen_discard_tl(cpu_cc_src);
1185 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1186}
1187
1188static void gen_op_update_neg_cc(void)
1189{
1190 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1191 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1192}
1193
1194/* compute eflags.C to reg */
1195static void gen_compute_eflags_c(TCGv reg)
1196{
1197#if TCG_TARGET_REG_BITS == 32
1198 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1199 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1200 (long)cc_table + offsetof(CCTable, compute_c));
1201 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1202 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1203 1, &cpu_tmp2_i32, 0, NULL);
1204#else
1205 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1206 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1207 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1208 (long)cc_table + offsetof(CCTable, compute_c));
1209 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1210 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1211 1, &cpu_tmp2_i32, 0, NULL);
1212#endif
1213 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1214}
1215
1216/* compute all eflags to cc_src */
1217static void gen_compute_eflags(TCGv reg)
1218{
1219#if TCG_TARGET_REG_BITS == 32
1220 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1221 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1222 (long)cc_table + offsetof(CCTable, compute_all));
1223 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1224 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1225 1, &cpu_tmp2_i32, 0, NULL);
1226#else
1227 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1228 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1229 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1230 (long)cc_table + offsetof(CCTable, compute_all));
1231 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1232 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1233 1, &cpu_tmp2_i32, 0, NULL);
1234#endif
1235 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1236}
1237
1238#ifndef VBOX
1239static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1240#else /* VBOX */
1241DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1242#endif /* VBOX */
1243{
1244 if (s->cc_op != CC_OP_DYNAMIC)
1245 gen_op_set_cc_op(s->cc_op);
1246 switch(jcc_op) {
1247 case JCC_O:
1248 gen_compute_eflags(cpu_T[0]);
1249 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1250 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1251 break;
1252 case JCC_B:
1253 gen_compute_eflags_c(cpu_T[0]);
1254 break;
1255 case JCC_Z:
1256 gen_compute_eflags(cpu_T[0]);
1257 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1258 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1259 break;
1260 case JCC_BE:
1261 gen_compute_eflags(cpu_tmp0);
1262 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1263 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1264 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1265 break;
1266 case JCC_S:
1267 gen_compute_eflags(cpu_T[0]);
1268 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1269 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1270 break;
1271 case JCC_P:
1272 gen_compute_eflags(cpu_T[0]);
1273 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1274 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1275 break;
1276 case JCC_L:
1277 gen_compute_eflags(cpu_tmp0);
1278 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1279 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1280 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1281 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1282 break;
1283 default:
1284 case JCC_LE:
1285 gen_compute_eflags(cpu_tmp0);
1286 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1287 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1288 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1289 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1290 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1291 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1292 break;
1293 }
1294}
1295
1296/* return true if setcc_slow is not needed (WARNING: must be kept in
1297 sync with gen_jcc1) */
1298static int is_fast_jcc_case(DisasContext *s, int b)
1299{
1300 int jcc_op;
1301 jcc_op = (b >> 1) & 7;
1302 switch(s->cc_op) {
1303 /* we optimize the cmp/jcc case */
1304 case CC_OP_SUBB:
1305 case CC_OP_SUBW:
1306 case CC_OP_SUBL:
1307 case CC_OP_SUBQ:
1308 if (jcc_op == JCC_O || jcc_op == JCC_P)
1309 goto slow_jcc;
1310 break;
1311
1312 /* some jumps are easy to compute */
1313 case CC_OP_ADDB:
1314 case CC_OP_ADDW:
1315 case CC_OP_ADDL:
1316 case CC_OP_ADDQ:
1317
1318 case CC_OP_LOGICB:
1319 case CC_OP_LOGICW:
1320 case CC_OP_LOGICL:
1321 case CC_OP_LOGICQ:
1322
1323 case CC_OP_INCB:
1324 case CC_OP_INCW:
1325 case CC_OP_INCL:
1326 case CC_OP_INCQ:
1327
1328 case CC_OP_DECB:
1329 case CC_OP_DECW:
1330 case CC_OP_DECL:
1331 case CC_OP_DECQ:
1332
1333 case CC_OP_SHLB:
1334 case CC_OP_SHLW:
1335 case CC_OP_SHLL:
1336 case CC_OP_SHLQ:
1337 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1338 goto slow_jcc;
1339 break;
1340 default:
1341 slow_jcc:
1342 return 0;
1343 }
1344 return 1;
1345}
1346
1347/* generate a conditional jump to label 'l1' according to jump opcode
1348 value 'b'. In the fast case, T0 is guaranted not to be used. */
1349#ifndef VBOX
1350static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1351#else /* VBOX */
1352DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1353#endif /* VBOX */
1354{
1355 int inv, jcc_op, size, cond;
1356 TCGv t0;
1357
1358 inv = b & 1;
1359 jcc_op = (b >> 1) & 7;
1360
1361 switch(cc_op) {
1362 /* we optimize the cmp/jcc case */
1363 case CC_OP_SUBB:
1364 case CC_OP_SUBW:
1365 case CC_OP_SUBL:
1366 case CC_OP_SUBQ:
1367
1368 size = cc_op - CC_OP_SUBB;
1369 switch(jcc_op) {
1370 case JCC_Z:
1371 fast_jcc_z:
1372 switch(size) {
1373 case 0:
1374 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1375 t0 = cpu_tmp0;
1376 break;
1377 case 1:
1378 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1379 t0 = cpu_tmp0;
1380 break;
1381#ifdef TARGET_X86_64
1382 case 2:
1383 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1384 t0 = cpu_tmp0;
1385 break;
1386#endif
1387 default:
1388 t0 = cpu_cc_dst;
1389 break;
1390 }
1391 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1392 break;
1393 case JCC_S:
1394 fast_jcc_s:
1395 switch(size) {
1396 case 0:
1397 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1398 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1399 0, l1);
1400 break;
1401 case 1:
1402 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1403 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1404 0, l1);
1405 break;
1406#ifdef TARGET_X86_64
1407 case 2:
1408 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1409 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1410 0, l1);
1411 break;
1412#endif
1413 default:
1414 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1415 0, l1);
1416 break;
1417 }
1418 break;
1419
1420 case JCC_B:
1421 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1422 goto fast_jcc_b;
1423 case JCC_BE:
1424 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1425 fast_jcc_b:
1426 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1427 switch(size) {
1428 case 0:
1429 t0 = cpu_tmp0;
1430 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1431 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1432 break;
1433 case 1:
1434 t0 = cpu_tmp0;
1435 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1436 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1437 break;
1438#ifdef TARGET_X86_64
1439 case 2:
1440 t0 = cpu_tmp0;
1441 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1442 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1443 break;
1444#endif
1445 default:
1446 t0 = cpu_cc_src;
1447 break;
1448 }
1449 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1450 break;
1451
1452 case JCC_L:
1453 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1454 goto fast_jcc_l;
1455 case JCC_LE:
1456 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1457 fast_jcc_l:
1458 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1459 switch(size) {
1460 case 0:
1461 t0 = cpu_tmp0;
1462 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1463 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1464 break;
1465 case 1:
1466 t0 = cpu_tmp0;
1467 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1468 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1469 break;
1470#ifdef TARGET_X86_64
1471 case 2:
1472 t0 = cpu_tmp0;
1473 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1474 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1475 break;
1476#endif
1477 default:
1478 t0 = cpu_cc_src;
1479 break;
1480 }
1481 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1482 break;
1483
1484 default:
1485 goto slow_jcc;
1486 }
1487 break;
1488
1489 /* some jumps are easy to compute */
1490 case CC_OP_ADDB:
1491 case CC_OP_ADDW:
1492 case CC_OP_ADDL:
1493 case CC_OP_ADDQ:
1494
1495 case CC_OP_ADCB:
1496 case CC_OP_ADCW:
1497 case CC_OP_ADCL:
1498 case CC_OP_ADCQ:
1499
1500 case CC_OP_SBBB:
1501 case CC_OP_SBBW:
1502 case CC_OP_SBBL:
1503 case CC_OP_SBBQ:
1504
1505 case CC_OP_LOGICB:
1506 case CC_OP_LOGICW:
1507 case CC_OP_LOGICL:
1508 case CC_OP_LOGICQ:
1509
1510 case CC_OP_INCB:
1511 case CC_OP_INCW:
1512 case CC_OP_INCL:
1513 case CC_OP_INCQ:
1514
1515 case CC_OP_DECB:
1516 case CC_OP_DECW:
1517 case CC_OP_DECL:
1518 case CC_OP_DECQ:
1519
1520 case CC_OP_SHLB:
1521 case CC_OP_SHLW:
1522 case CC_OP_SHLL:
1523 case CC_OP_SHLQ:
1524
1525 case CC_OP_SARB:
1526 case CC_OP_SARW:
1527 case CC_OP_SARL:
1528 case CC_OP_SARQ:
1529 switch(jcc_op) {
1530 case JCC_Z:
1531 size = (cc_op - CC_OP_ADDB) & 3;
1532 goto fast_jcc_z;
1533 case JCC_S:
1534 size = (cc_op - CC_OP_ADDB) & 3;
1535 goto fast_jcc_s;
1536 default:
1537 goto slow_jcc;
1538 }
1539 break;
1540 default:
1541 slow_jcc:
1542 gen_setcc_slow_T0(s, jcc_op);
1543 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1544 cpu_T[0], 0, l1);
1545 break;
1546 }
1547}
1548
1549/* XXX: does not work with gdbstub "ice" single step - not a
1550 serious problem */
1551static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1552{
1553 int l1, l2;
1554
1555 l1 = gen_new_label();
1556 l2 = gen_new_label();
1557 gen_op_jnz_ecx(s->aflag, l1);
1558 gen_set_label(l2);
1559 gen_jmp_tb(s, next_eip, 1);
1560 gen_set_label(l1);
1561 return l2;
1562}
1563
1564#ifndef VBOX
1565static inline void gen_stos(DisasContext *s, int ot)
1566#else /* VBOX */
1567DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1568#endif /* VBOX */
1569{
1570 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1571 gen_string_movl_A0_EDI(s);
1572 gen_op_st_T0_A0(ot + s->mem_index);
1573 gen_op_movl_T0_Dshift(ot);
1574 gen_op_add_reg_T0(s->aflag, R_EDI);
1575}
1576
1577#ifndef VBOX
1578static inline void gen_lods(DisasContext *s, int ot)
1579#else /* VBOX */
1580DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1581#endif /* VBOX */
1582{
1583 gen_string_movl_A0_ESI(s);
1584 gen_op_ld_T0_A0(ot + s->mem_index);
1585 gen_op_mov_reg_T0(ot, R_EAX);
1586 gen_op_movl_T0_Dshift(ot);
1587 gen_op_add_reg_T0(s->aflag, R_ESI);
1588}
1589
1590#ifndef VBOX
1591static inline void gen_scas(DisasContext *s, int ot)
1592#else /* VBOX */
1593DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1594#endif /* VBOX */
1595{
1596 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1597 gen_string_movl_A0_EDI(s);
1598 gen_op_ld_T1_A0(ot + s->mem_index);
1599 gen_op_cmpl_T0_T1_cc();
1600 gen_op_movl_T0_Dshift(ot);
1601 gen_op_add_reg_T0(s->aflag, R_EDI);
1602}
1603
1604#ifndef VBOX
1605static inline void gen_cmps(DisasContext *s, int ot)
1606#else /* VBOX */
1607DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1608#endif /* VBOX */
1609{
1610 gen_string_movl_A0_ESI(s);
1611 gen_op_ld_T0_A0(ot + s->mem_index);
1612 gen_string_movl_A0_EDI(s);
1613 gen_op_ld_T1_A0(ot + s->mem_index);
1614 gen_op_cmpl_T0_T1_cc();
1615 gen_op_movl_T0_Dshift(ot);
1616 gen_op_add_reg_T0(s->aflag, R_ESI);
1617 gen_op_add_reg_T0(s->aflag, R_EDI);
1618}
1619
1620#ifndef VBOX
1621static inline void gen_ins(DisasContext *s, int ot)
1622#else /* VBOX */
1623DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1624#endif /* VBOX */
1625{
1626 if (use_icount)
1627 gen_io_start();
1628 gen_string_movl_A0_EDI(s);
1629 /* Note: we must do this dummy write first to be restartable in
1630 case of page fault. */
1631 gen_op_movl_T0_0();
1632 gen_op_st_T0_A0(ot + s->mem_index);
1633 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1634 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1635 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1636 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1637 gen_op_st_T0_A0(ot + s->mem_index);
1638 gen_op_movl_T0_Dshift(ot);
1639 gen_op_add_reg_T0(s->aflag, R_EDI);
1640 if (use_icount)
1641 gen_io_end();
1642}
1643
1644#ifndef VBOX
1645static inline void gen_outs(DisasContext *s, int ot)
1646#else /* VBOX */
1647DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1648#endif /* VBOX */
1649{
1650 if (use_icount)
1651 gen_io_start();
1652 gen_string_movl_A0_ESI(s);
1653 gen_op_ld_T0_A0(ot + s->mem_index);
1654
1655 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1656 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1657 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1658 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1659 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1660
1661 gen_op_movl_T0_Dshift(ot);
1662 gen_op_add_reg_T0(s->aflag, R_ESI);
1663 if (use_icount)
1664 gen_io_end();
1665}
1666
1667/* same method as Valgrind : we generate jumps to current or next
1668 instruction */
1669#ifndef VBOX
1670#define GEN_REPZ(op) \
1671static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1672 target_ulong cur_eip, target_ulong next_eip) \
1673{ \
1674 int l2; \
1675 gen_update_cc_op(s); \
1676 l2 = gen_jz_ecx_string(s, next_eip); \
1677 gen_ ## op(s, ot); \
1678 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1679 /* a loop would cause two single step exceptions if ECX = 1 \
1680 before rep string_insn */ \
1681 if (!s->jmp_opt) \
1682 gen_op_jz_ecx(s->aflag, l2); \
1683 gen_jmp(s, cur_eip); \
1684}
1685#else /* VBOX */
1686#define GEN_REPZ(op) \
1687DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1688 target_ulong cur_eip, target_ulong next_eip) \
1689{ \
1690 int l2; \
1691 gen_update_cc_op(s); \
1692 l2 = gen_jz_ecx_string(s, next_eip); \
1693 gen_ ## op(s, ot); \
1694 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1695 /* a loop would cause two single step exceptions if ECX = 1 \
1696 before rep string_insn */ \
1697 if (!s->jmp_opt) \
1698 gen_op_jz_ecx(s->aflag, l2); \
1699 gen_jmp(s, cur_eip); \
1700}
1701#endif /* VBOX */
1702
1703#ifndef VBOX
1704#define GEN_REPZ2(op) \
1705static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1706 target_ulong cur_eip, \
1707 target_ulong next_eip, \
1708 int nz) \
1709{ \
1710 int l2; \
1711 gen_update_cc_op(s); \
1712 l2 = gen_jz_ecx_string(s, next_eip); \
1713 gen_ ## op(s, ot); \
1714 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1715 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1716 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1717 if (!s->jmp_opt) \
1718 gen_op_jz_ecx(s->aflag, l2); \
1719 gen_jmp(s, cur_eip); \
1720}
1721#else /* VBOX */
1722#define GEN_REPZ2(op) \
1723DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1724 target_ulong cur_eip, \
1725 target_ulong next_eip, \
1726 int nz) \
1727{ \
1728 int l2;\
1729 gen_update_cc_op(s); \
1730 l2 = gen_jz_ecx_string(s, next_eip); \
1731 gen_ ## op(s, ot); \
1732 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1733 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1734 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1735 if (!s->jmp_opt) \
1736 gen_op_jz_ecx(s->aflag, l2); \
1737 gen_jmp(s, cur_eip); \
1738}
1739#endif /* VBOX */
1740
1741GEN_REPZ(movs)
1742GEN_REPZ(stos)
1743GEN_REPZ(lods)
1744GEN_REPZ(ins)
1745GEN_REPZ(outs)
1746GEN_REPZ2(scas)
1747GEN_REPZ2(cmps)
1748
1749static void *helper_fp_arith_ST0_FT0[8] = {
1750 helper_fadd_ST0_FT0,
1751 helper_fmul_ST0_FT0,
1752 helper_fcom_ST0_FT0,
1753 helper_fcom_ST0_FT0,
1754 helper_fsub_ST0_FT0,
1755 helper_fsubr_ST0_FT0,
1756 helper_fdiv_ST0_FT0,
1757 helper_fdivr_ST0_FT0,
1758};
1759
1760/* NOTE the exception in "r" op ordering */
1761static void *helper_fp_arith_STN_ST0[8] = {
1762 helper_fadd_STN_ST0,
1763 helper_fmul_STN_ST0,
1764 NULL,
1765 NULL,
1766 helper_fsubr_STN_ST0,
1767 helper_fsub_STN_ST0,
1768 helper_fdivr_STN_ST0,
1769 helper_fdiv_STN_ST0,
1770};
1771
1772/* if d == OR_TMP0, it means memory operand (address in A0) */
1773static void gen_op(DisasContext *s1, int op, int ot, int d)
1774{
1775 if (d != OR_TMP0) {
1776 gen_op_mov_TN_reg(ot, 0, d);
1777 } else {
1778 gen_op_ld_T0_A0(ot + s1->mem_index);
1779 }
1780 switch(op) {
1781 case OP_ADCL:
1782 if (s1->cc_op != CC_OP_DYNAMIC)
1783 gen_op_set_cc_op(s1->cc_op);
1784 gen_compute_eflags_c(cpu_tmp4);
1785 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1786 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1787 if (d != OR_TMP0)
1788 gen_op_mov_reg_T0(ot, d);
1789 else
1790 gen_op_st_T0_A0(ot + s1->mem_index);
1791 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1792 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1793 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1794 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1795 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1796 s1->cc_op = CC_OP_DYNAMIC;
1797 break;
1798 case OP_SBBL:
1799 if (s1->cc_op != CC_OP_DYNAMIC)
1800 gen_op_set_cc_op(s1->cc_op);
1801 gen_compute_eflags_c(cpu_tmp4);
1802 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1803 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1804 if (d != OR_TMP0)
1805 gen_op_mov_reg_T0(ot, d);
1806 else
1807 gen_op_st_T0_A0(ot + s1->mem_index);
1808 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1809 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1810 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1811 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1812 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1813 s1->cc_op = CC_OP_DYNAMIC;
1814 break;
1815 case OP_ADDL:
1816 gen_op_addl_T0_T1();
1817 if (d != OR_TMP0)
1818 gen_op_mov_reg_T0(ot, d);
1819 else
1820 gen_op_st_T0_A0(ot + s1->mem_index);
1821 gen_op_update2_cc();
1822 s1->cc_op = CC_OP_ADDB + ot;
1823 break;
1824 case OP_SUBL:
1825 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1826 if (d != OR_TMP0)
1827 gen_op_mov_reg_T0(ot, d);
1828 else
1829 gen_op_st_T0_A0(ot + s1->mem_index);
1830 gen_op_update2_cc();
1831 s1->cc_op = CC_OP_SUBB + ot;
1832 break;
1833 default:
1834 case OP_ANDL:
1835 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1836 if (d != OR_TMP0)
1837 gen_op_mov_reg_T0(ot, d);
1838 else
1839 gen_op_st_T0_A0(ot + s1->mem_index);
1840 gen_op_update1_cc();
1841 s1->cc_op = CC_OP_LOGICB + ot;
1842 break;
1843 case OP_ORL:
1844 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1845 if (d != OR_TMP0)
1846 gen_op_mov_reg_T0(ot, d);
1847 else
1848 gen_op_st_T0_A0(ot + s1->mem_index);
1849 gen_op_update1_cc();
1850 s1->cc_op = CC_OP_LOGICB + ot;
1851 break;
1852 case OP_XORL:
1853 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1854 if (d != OR_TMP0)
1855 gen_op_mov_reg_T0(ot, d);
1856 else
1857 gen_op_st_T0_A0(ot + s1->mem_index);
1858 gen_op_update1_cc();
1859 s1->cc_op = CC_OP_LOGICB + ot;
1860 break;
1861 case OP_CMPL:
1862 gen_op_cmpl_T0_T1_cc();
1863 s1->cc_op = CC_OP_SUBB + ot;
1864 break;
1865 }
1866}
1867
1868/* if d == OR_TMP0, it means memory operand (address in A0) */
1869static void gen_inc(DisasContext *s1, int ot, int d, int c)
1870{
1871 if (d != OR_TMP0)
1872 gen_op_mov_TN_reg(ot, 0, d);
1873 else
1874 gen_op_ld_T0_A0(ot + s1->mem_index);
1875 if (s1->cc_op != CC_OP_DYNAMIC)
1876 gen_op_set_cc_op(s1->cc_op);
1877 if (c > 0) {
1878 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1879 s1->cc_op = CC_OP_INCB + ot;
1880 } else {
1881 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1882 s1->cc_op = CC_OP_DECB + ot;
1883 }
1884 if (d != OR_TMP0)
1885 gen_op_mov_reg_T0(ot, d);
1886 else
1887 gen_op_st_T0_A0(ot + s1->mem_index);
1888 gen_compute_eflags_c(cpu_cc_src);
1889 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1890}
1891
1892static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1893 int is_right, int is_arith)
1894{
1895 target_ulong mask;
1896 int shift_label;
1897 TCGv t0, t1;
1898
1899 if (ot == OT_QUAD)
1900 mask = 0x3f;
1901 else
1902 mask = 0x1f;
1903
1904 /* load */
1905 if (op1 == OR_TMP0)
1906 gen_op_ld_T0_A0(ot + s->mem_index);
1907 else
1908 gen_op_mov_TN_reg(ot, 0, op1);
1909
1910 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1911
1912 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1913
1914 if (is_right) {
1915 if (is_arith) {
1916 gen_exts(ot, cpu_T[0]);
1917 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1918 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1919 } else {
1920 gen_extu(ot, cpu_T[0]);
1921 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1922 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1923 }
1924 } else {
1925 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1926 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1927 }
1928
1929 /* store */
1930 if (op1 == OR_TMP0)
1931 gen_op_st_T0_A0(ot + s->mem_index);
1932 else
1933 gen_op_mov_reg_T0(ot, op1);
1934
1935 /* update eflags if non zero shift */
1936 if (s->cc_op != CC_OP_DYNAMIC)
1937 gen_op_set_cc_op(s->cc_op);
1938
1939 /* XXX: inefficient */
1940 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1941 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1942
1943 tcg_gen_mov_tl(t0, cpu_T[0]);
1944 tcg_gen_mov_tl(t1, cpu_T3);
1945
1946 shift_label = gen_new_label();
1947 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1948
1949 tcg_gen_mov_tl(cpu_cc_src, t1);
1950 tcg_gen_mov_tl(cpu_cc_dst, t0);
1951 if (is_right)
1952 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1953 else
1954 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1955
1956 gen_set_label(shift_label);
1957 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1958
1959 tcg_temp_free(t0);
1960 tcg_temp_free(t1);
1961}
1962
1963static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1964 int is_right, int is_arith)
1965{
1966 int mask;
1967
1968 if (ot == OT_QUAD)
1969 mask = 0x3f;
1970 else
1971 mask = 0x1f;
1972
1973 /* load */
1974 if (op1 == OR_TMP0)
1975 gen_op_ld_T0_A0(ot + s->mem_index);
1976 else
1977 gen_op_mov_TN_reg(ot, 0, op1);
1978
1979 op2 &= mask;
1980 if (op2 != 0) {
1981 if (is_right) {
1982 if (is_arith) {
1983 gen_exts(ot, cpu_T[0]);
1984 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1985 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1986 } else {
1987 gen_extu(ot, cpu_T[0]);
1988 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1989 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1990 }
1991 } else {
1992 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1993 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1994 }
1995 }
1996
1997 /* store */
1998 if (op1 == OR_TMP0)
1999 gen_op_st_T0_A0(ot + s->mem_index);
2000 else
2001 gen_op_mov_reg_T0(ot, op1);
2002
2003 /* update eflags if non zero shift */
2004 if (op2 != 0) {
2005 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
2006 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
2007 if (is_right)
2008 s->cc_op = CC_OP_SARB + ot;
2009 else
2010 s->cc_op = CC_OP_SHLB + ot;
2011 }
2012}
2013
2014#ifndef VBOX
2015static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2016#else /* VBOX */
2017DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2018#endif /* VBOX */
2019{
2020 if (arg2 >= 0)
2021 tcg_gen_shli_tl(ret, arg1, arg2);
2022 else
2023 tcg_gen_shri_tl(ret, arg1, -arg2);
2024}
2025
2026/* XXX: add faster immediate case */
2027static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2028 int is_right)
2029{
2030 target_ulong mask;
2031 int label1, label2, data_bits;
2032 TCGv t0, t1, t2, a0;
2033
2034 /* XXX: inefficient, but we must use local temps */
2035 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2036 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2037 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2038 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2039
2040 if (ot == OT_QUAD)
2041 mask = 0x3f;
2042 else
2043 mask = 0x1f;
2044
2045 /* load */
2046 if (op1 == OR_TMP0) {
2047 tcg_gen_mov_tl(a0, cpu_A0);
2048 gen_op_ld_v(ot + s->mem_index, t0, a0);
2049 } else {
2050 gen_op_mov_v_reg(ot, t0, op1);
2051 }
2052
2053 tcg_gen_mov_tl(t1, cpu_T[1]);
2054
2055 tcg_gen_andi_tl(t1, t1, mask);
2056
2057 /* Must test zero case to avoid using undefined behaviour in TCG
2058 shifts. */
2059 label1 = gen_new_label();
2060 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2061
2062 if (ot <= OT_WORD)
2063 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2064 else
2065 tcg_gen_mov_tl(cpu_tmp0, t1);
2066
2067 gen_extu(ot, t0);
2068 tcg_gen_mov_tl(t2, t0);
2069
2070 data_bits = 8 << ot;
2071 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2072 fix TCG definition) */
2073 if (is_right) {
2074 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2075 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2076 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2077 } else {
2078 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2079 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2080 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2081 }
2082 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2083
2084 gen_set_label(label1);
2085 /* store */
2086 if (op1 == OR_TMP0) {
2087 gen_op_st_v(ot + s->mem_index, t0, a0);
2088 } else {
2089 gen_op_mov_reg_v(ot, op1, t0);
2090 }
2091
2092 /* update eflags */
2093 if (s->cc_op != CC_OP_DYNAMIC)
2094 gen_op_set_cc_op(s->cc_op);
2095
2096 label2 = gen_new_label();
2097 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2098
2099 gen_compute_eflags(cpu_cc_src);
2100 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2101 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2102 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2103 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2104 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2105 if (is_right) {
2106 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2107 }
2108 tcg_gen_andi_tl(t0, t0, CC_C);
2109 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2110
2111 tcg_gen_discard_tl(cpu_cc_dst);
2112 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2113
2114 gen_set_label(label2);
2115 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2116
2117 tcg_temp_free(t0);
2118 tcg_temp_free(t1);
2119 tcg_temp_free(t2);
2120 tcg_temp_free(a0);
2121}
2122
2123static void *helper_rotc[8] = {
2124 helper_rclb,
2125 helper_rclw,
2126 helper_rcll,
2127 X86_64_ONLY(helper_rclq),
2128 helper_rcrb,
2129 helper_rcrw,
2130 helper_rcrl,
2131 X86_64_ONLY(helper_rcrq),
2132};
2133
2134/* XXX: add faster immediate = 1 case */
2135static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2136 int is_right)
2137{
2138 int label1;
2139
2140 if (s->cc_op != CC_OP_DYNAMIC)
2141 gen_op_set_cc_op(s->cc_op);
2142
2143 /* load */
2144 if (op1 == OR_TMP0)
2145 gen_op_ld_T0_A0(ot + s->mem_index);
2146 else
2147 gen_op_mov_TN_reg(ot, 0, op1);
2148
2149 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2150 cpu_T[0], cpu_T[0], cpu_T[1]);
2151 /* store */
2152 if (op1 == OR_TMP0)
2153 gen_op_st_T0_A0(ot + s->mem_index);
2154 else
2155 gen_op_mov_reg_T0(ot, op1);
2156
2157 /* update eflags */
2158 label1 = gen_new_label();
2159 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2160
2161 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2162 tcg_gen_discard_tl(cpu_cc_dst);
2163 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2164
2165 gen_set_label(label1);
2166 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2167}
2168
2169/* XXX: add faster immediate case */
2170static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2171 int is_right)
2172{
2173 int label1, label2, data_bits;
2174 target_ulong mask;
2175 TCGv t0, t1, t2, a0;
2176
2177 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2178 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2179 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2180 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2181
2182 if (ot == OT_QUAD)
2183 mask = 0x3f;
2184 else
2185 mask = 0x1f;
2186
2187 /* load */
2188 if (op1 == OR_TMP0) {
2189 tcg_gen_mov_tl(a0, cpu_A0);
2190 gen_op_ld_v(ot + s->mem_index, t0, a0);
2191 } else {
2192 gen_op_mov_v_reg(ot, t0, op1);
2193 }
2194
2195 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2196
2197 tcg_gen_mov_tl(t1, cpu_T[1]);
2198 tcg_gen_mov_tl(t2, cpu_T3);
2199
2200 /* Must test zero case to avoid using undefined behaviour in TCG
2201 shifts. */
2202 label1 = gen_new_label();
2203 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2204
2205 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2206 if (ot == OT_WORD) {
2207 /* Note: we implement the Intel behaviour for shift count > 16 */
2208 if (is_right) {
2209 tcg_gen_andi_tl(t0, t0, 0xffff);
2210 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2211 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2212 tcg_gen_ext32u_tl(t0, t0);
2213
2214 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2215
2216 /* only needed if count > 16, but a test would complicate */
2217 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2218 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2219
2220 tcg_gen_shr_tl(t0, t0, t2);
2221
2222 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2223 } else {
2224 /* XXX: not optimal */
2225 tcg_gen_andi_tl(t0, t0, 0xffff);
2226 tcg_gen_shli_tl(t1, t1, 16);
2227 tcg_gen_or_tl(t1, t1, t0);
2228 tcg_gen_ext32u_tl(t1, t1);
2229
2230 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2231 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2232 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2233 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2234
2235 tcg_gen_shl_tl(t0, t0, t2);
2236 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2237 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2238 tcg_gen_or_tl(t0, t0, t1);
2239 }
2240 } else {
2241 data_bits = 8 << ot;
2242 if (is_right) {
2243 if (ot == OT_LONG)
2244 tcg_gen_ext32u_tl(t0, t0);
2245
2246 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2247
2248 tcg_gen_shr_tl(t0, t0, t2);
2249 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2250 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2251 tcg_gen_or_tl(t0, t0, t1);
2252
2253 } else {
2254 if (ot == OT_LONG)
2255 tcg_gen_ext32u_tl(t1, t1);
2256
2257 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2258
2259 tcg_gen_shl_tl(t0, t0, t2);
2260 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2261 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2262 tcg_gen_or_tl(t0, t0, t1);
2263 }
2264 }
2265 tcg_gen_mov_tl(t1, cpu_tmp4);
2266
2267 gen_set_label(label1);
2268 /* store */
2269 if (op1 == OR_TMP0) {
2270 gen_op_st_v(ot + s->mem_index, t0, a0);
2271 } else {
2272 gen_op_mov_reg_v(ot, op1, t0);
2273 }
2274
2275 /* update eflags */
2276 if (s->cc_op != CC_OP_DYNAMIC)
2277 gen_op_set_cc_op(s->cc_op);
2278
2279 label2 = gen_new_label();
2280 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2281
2282 tcg_gen_mov_tl(cpu_cc_src, t1);
2283 tcg_gen_mov_tl(cpu_cc_dst, t0);
2284 if (is_right) {
2285 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2286 } else {
2287 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2288 }
2289 gen_set_label(label2);
2290 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2291
2292 tcg_temp_free(t0);
2293 tcg_temp_free(t1);
2294 tcg_temp_free(t2);
2295 tcg_temp_free(a0);
2296}
2297
2298static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2299{
2300 if (s != OR_TMP1)
2301 gen_op_mov_TN_reg(ot, 1, s);
2302 switch(op) {
2303 case OP_ROL:
2304 gen_rot_rm_T1(s1, ot, d, 0);
2305 break;
2306 case OP_ROR:
2307 gen_rot_rm_T1(s1, ot, d, 1);
2308 break;
2309 case OP_SHL:
2310 case OP_SHL1:
2311 gen_shift_rm_T1(s1, ot, d, 0, 0);
2312 break;
2313 case OP_SHR:
2314 gen_shift_rm_T1(s1, ot, d, 1, 0);
2315 break;
2316 case OP_SAR:
2317 gen_shift_rm_T1(s1, ot, d, 1, 1);
2318 break;
2319 case OP_RCL:
2320 gen_rotc_rm_T1(s1, ot, d, 0);
2321 break;
2322 case OP_RCR:
2323 gen_rotc_rm_T1(s1, ot, d, 1);
2324 break;
2325 }
2326}
2327
2328static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2329{
2330 switch(op) {
2331 case OP_SHL:
2332 case OP_SHL1:
2333 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2334 break;
2335 case OP_SHR:
2336 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2337 break;
2338 case OP_SAR:
2339 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2340 break;
2341 default:
2342 /* currently not optimized */
2343 gen_op_movl_T1_im(c);
2344 gen_shift(s1, op, ot, d, OR_TMP1);
2345 break;
2346 }
2347}
2348
2349static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2350{
2351 target_long disp;
2352 int havesib;
2353 int base;
2354 int index;
2355 int scale;
2356 int opreg;
2357 int mod, rm, code, override, must_add_seg;
2358
2359 override = s->override;
2360 must_add_seg = s->addseg;
2361 if (override >= 0)
2362 must_add_seg = 1;
2363 mod = (modrm >> 6) & 3;
2364 rm = modrm & 7;
2365
2366 if (s->aflag) {
2367
2368 havesib = 0;
2369 base = rm;
2370 index = 0;
2371 scale = 0;
2372
2373 if (base == 4) {
2374 havesib = 1;
2375 code = ldub_code(s->pc++);
2376 scale = (code >> 6) & 3;
2377 index = ((code >> 3) & 7) | REX_X(s);
2378 base = (code & 7);
2379 }
2380 base |= REX_B(s);
2381
2382 switch (mod) {
2383 case 0:
2384 if ((base & 7) == 5) {
2385 base = -1;
2386 disp = (int32_t)ldl_code(s->pc);
2387 s->pc += 4;
2388 if (CODE64(s) && !havesib) {
2389 disp += s->pc + s->rip_offset;
2390 }
2391 } else {
2392 disp = 0;
2393 }
2394 break;
2395 case 1:
2396 disp = (int8_t)ldub_code(s->pc++);
2397 break;
2398 default:
2399 case 2:
2400#ifdef VBOX
2401 disp = (int32_t)ldl_code(s->pc);
2402#else
2403 disp = ldl_code(s->pc);
2404#endif
2405 s->pc += 4;
2406 break;
2407 }
2408
2409 if (base >= 0) {
2410 /* for correct popl handling with esp */
2411 if (base == 4 && s->popl_esp_hack)
2412 disp += s->popl_esp_hack;
2413#ifdef TARGET_X86_64
2414 if (s->aflag == 2) {
2415 gen_op_movq_A0_reg(base);
2416 if (disp != 0) {
2417 gen_op_addq_A0_im(disp);
2418 }
2419 } else
2420#endif
2421 {
2422 gen_op_movl_A0_reg(base);
2423 if (disp != 0)
2424 gen_op_addl_A0_im(disp);
2425 }
2426 } else {
2427#ifdef TARGET_X86_64
2428 if (s->aflag == 2) {
2429 gen_op_movq_A0_im(disp);
2430 } else
2431#endif
2432 {
2433 gen_op_movl_A0_im(disp);
2434 }
2435 }
2436 /* XXX: index == 4 is always invalid */
2437 if (havesib && (index != 4 || scale != 0)) {
2438#ifdef TARGET_X86_64
2439 if (s->aflag == 2) {
2440 gen_op_addq_A0_reg_sN(scale, index);
2441 } else
2442#endif
2443 {
2444 gen_op_addl_A0_reg_sN(scale, index);
2445 }
2446 }
2447 if (must_add_seg) {
2448 if (override < 0) {
2449 if (base == R_EBP || base == R_ESP)
2450 override = R_SS;
2451 else
2452 override = R_DS;
2453 }
2454#ifdef TARGET_X86_64
2455 if (s->aflag == 2) {
2456 gen_op_addq_A0_seg(override);
2457 } else
2458#endif
2459 {
2460 gen_op_addl_A0_seg(override);
2461 }
2462 }
2463 } else {
2464 switch (mod) {
2465 case 0:
2466 if (rm == 6) {
2467 disp = lduw_code(s->pc);
2468 s->pc += 2;
2469 gen_op_movl_A0_im(disp);
2470 rm = 0; /* avoid SS override */
2471 goto no_rm;
2472 } else {
2473 disp = 0;
2474 }
2475 break;
2476 case 1:
2477 disp = (int8_t)ldub_code(s->pc++);
2478 break;
2479 default:
2480 case 2:
2481 disp = lduw_code(s->pc);
2482 s->pc += 2;
2483 break;
2484 }
2485 switch(rm) {
2486 case 0:
2487 gen_op_movl_A0_reg(R_EBX);
2488 gen_op_addl_A0_reg_sN(0, R_ESI);
2489 break;
2490 case 1:
2491 gen_op_movl_A0_reg(R_EBX);
2492 gen_op_addl_A0_reg_sN(0, R_EDI);
2493 break;
2494 case 2:
2495 gen_op_movl_A0_reg(R_EBP);
2496 gen_op_addl_A0_reg_sN(0, R_ESI);
2497 break;
2498 case 3:
2499 gen_op_movl_A0_reg(R_EBP);
2500 gen_op_addl_A0_reg_sN(0, R_EDI);
2501 break;
2502 case 4:
2503 gen_op_movl_A0_reg(R_ESI);
2504 break;
2505 case 5:
2506 gen_op_movl_A0_reg(R_EDI);
2507 break;
2508 case 6:
2509 gen_op_movl_A0_reg(R_EBP);
2510 break;
2511 default:
2512 case 7:
2513 gen_op_movl_A0_reg(R_EBX);
2514 break;
2515 }
2516 if (disp != 0)
2517 gen_op_addl_A0_im(disp);
2518 gen_op_andl_A0_ffff();
2519 no_rm:
2520 if (must_add_seg) {
2521 if (override < 0) {
2522 if (rm == 2 || rm == 3 || rm == 6)
2523 override = R_SS;
2524 else
2525 override = R_DS;
2526 }
2527 gen_op_addl_A0_seg(override);
2528 }
2529 }
2530
2531 opreg = OR_A0;
2532 disp = 0;
2533 *reg_ptr = opreg;
2534 *offset_ptr = disp;
2535}
2536
2537static void gen_nop_modrm(DisasContext *s, int modrm)
2538{
2539 int mod, rm, base, code;
2540
2541 mod = (modrm >> 6) & 3;
2542 if (mod == 3)
2543 return;
2544 rm = modrm & 7;
2545
2546 if (s->aflag) {
2547
2548 base = rm;
2549
2550 if (base == 4) {
2551 code = ldub_code(s->pc++);
2552 base = (code & 7);
2553 }
2554
2555 switch (mod) {
2556 case 0:
2557 if (base == 5) {
2558 s->pc += 4;
2559 }
2560 break;
2561 case 1:
2562 s->pc++;
2563 break;
2564 default:
2565 case 2:
2566 s->pc += 4;
2567 break;
2568 }
2569 } else {
2570 switch (mod) {
2571 case 0:
2572 if (rm == 6) {
2573 s->pc += 2;
2574 }
2575 break;
2576 case 1:
2577 s->pc++;
2578 break;
2579 default:
2580 case 2:
2581 s->pc += 2;
2582 break;
2583 }
2584 }
2585}
2586
2587/* used for LEA and MOV AX, mem */
2588static void gen_add_A0_ds_seg(DisasContext *s)
2589{
2590 int override, must_add_seg;
2591 must_add_seg = s->addseg;
2592 override = R_DS;
2593 if (s->override >= 0) {
2594 override = s->override;
2595 must_add_seg = 1;
2596 } else {
2597 override = R_DS;
2598 }
2599 if (must_add_seg) {
2600#ifdef TARGET_X86_64
2601 if (CODE64(s)) {
2602 gen_op_addq_A0_seg(override);
2603 } else
2604#endif
2605 {
2606 gen_op_addl_A0_seg(override);
2607 }
2608 }
2609}
2610
2611/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2612 OR_TMP0 */
2613static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2614{
2615 int mod, rm, opreg, disp;
2616
2617 mod = (modrm >> 6) & 3;
2618 rm = (modrm & 7) | REX_B(s);
2619 if (mod == 3) {
2620 if (is_store) {
2621 if (reg != OR_TMP0)
2622 gen_op_mov_TN_reg(ot, 0, reg);
2623 gen_op_mov_reg_T0(ot, rm);
2624 } else {
2625 gen_op_mov_TN_reg(ot, 0, rm);
2626 if (reg != OR_TMP0)
2627 gen_op_mov_reg_T0(ot, reg);
2628 }
2629 } else {
2630 gen_lea_modrm(s, modrm, &opreg, &disp);
2631 if (is_store) {
2632 if (reg != OR_TMP0)
2633 gen_op_mov_TN_reg(ot, 0, reg);
2634 gen_op_st_T0_A0(ot + s->mem_index);
2635 } else {
2636 gen_op_ld_T0_A0(ot + s->mem_index);
2637 if (reg != OR_TMP0)
2638 gen_op_mov_reg_T0(ot, reg);
2639 }
2640 }
2641}
2642
2643#ifndef VBOX
2644static inline uint32_t insn_get(DisasContext *s, int ot)
2645#else /* VBOX */
2646DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2647#endif /* VBOX */
2648{
2649 uint32_t ret;
2650
2651 switch(ot) {
2652 case OT_BYTE:
2653 ret = ldub_code(s->pc);
2654 s->pc++;
2655 break;
2656 case OT_WORD:
2657 ret = lduw_code(s->pc);
2658 s->pc += 2;
2659 break;
2660 default:
2661 case OT_LONG:
2662 ret = ldl_code(s->pc);
2663 s->pc += 4;
2664 break;
2665 }
2666 return ret;
2667}
2668
2669#ifndef VBOX
2670static inline int insn_const_size(unsigned int ot)
2671#else /* VBOX */
2672DECLINLINE(int) insn_const_size(unsigned int ot)
2673#endif /* VBOX */
2674{
2675 if (ot <= OT_LONG)
2676 return 1 << ot;
2677 else
2678 return 4;
2679}
2680
2681#ifndef VBOX
2682static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2683#else /* VBOX */
2684DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2685#endif /* VBOX */
2686{
2687 TranslationBlock *tb;
2688 target_ulong pc;
2689
2690 pc = s->cs_base + eip;
2691 tb = s->tb;
2692 /* NOTE: we handle the case where the TB spans two pages here */
2693 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2694 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2695#ifdef VBOX
2696 gen_check_external_event(s);
2697#endif /* VBOX */
2698 /* jump to same page: we can use a direct jump */
2699 tcg_gen_goto_tb(tb_num);
2700 gen_jmp_im(eip);
2701 tcg_gen_exit_tb((long)tb + tb_num);
2702 } else {
2703 /* jump to another page: currently not optimized */
2704 gen_jmp_im(eip);
2705 gen_eob(s);
2706 }
2707}
2708
2709#ifndef VBOX
2710static inline void gen_jcc(DisasContext *s, int b,
2711#else /* VBOX */
2712DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2713#endif /* VBOX */
2714 target_ulong val, target_ulong next_eip)
2715{
2716 int l1, l2, cc_op;
2717
2718 cc_op = s->cc_op;
2719 if (s->cc_op != CC_OP_DYNAMIC) {
2720 gen_op_set_cc_op(s->cc_op);
2721 s->cc_op = CC_OP_DYNAMIC;
2722 }
2723 if (s->jmp_opt) {
2724 l1 = gen_new_label();
2725 gen_jcc1(s, cc_op, b, l1);
2726
2727 gen_goto_tb(s, 0, next_eip);
2728
2729 gen_set_label(l1);
2730 gen_goto_tb(s, 1, val);
2731 s->is_jmp = 3;
2732 } else {
2733
2734 l1 = gen_new_label();
2735 l2 = gen_new_label();
2736 gen_jcc1(s, cc_op, b, l1);
2737
2738 gen_jmp_im(next_eip);
2739 tcg_gen_br(l2);
2740
2741 gen_set_label(l1);
2742 gen_jmp_im(val);
2743 gen_set_label(l2);
2744 gen_eob(s);
2745 }
2746}
2747
2748static void gen_setcc(DisasContext *s, int b)
2749{
2750 int inv, jcc_op, l1;
2751 TCGv t0;
2752
2753 if (is_fast_jcc_case(s, b)) {
2754 /* nominal case: we use a jump */
2755 /* XXX: make it faster by adding new instructions in TCG */
2756 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2757 tcg_gen_movi_tl(t0, 0);
2758 l1 = gen_new_label();
2759 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2760 tcg_gen_movi_tl(t0, 1);
2761 gen_set_label(l1);
2762 tcg_gen_mov_tl(cpu_T[0], t0);
2763 tcg_temp_free(t0);
2764 } else {
2765 /* slow case: it is more efficient not to generate a jump,
2766 although it is questionnable whether this optimization is
2767 worth to */
2768 inv = b & 1;
2769 jcc_op = (b >> 1) & 7;
2770 gen_setcc_slow_T0(s, jcc_op);
2771 if (inv) {
2772 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2773 }
2774 }
2775}
2776
2777#ifndef VBOX
2778static inline void gen_op_movl_T0_seg(int seg_reg)
2779#else /* VBOX */
2780DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2781#endif /* VBOX */
2782{
2783 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2784 offsetof(CPUX86State,segs[seg_reg].selector));
2785}
2786
2787#ifndef VBOX
2788static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2789#else /* VBOX */
2790DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2791#endif /* VBOX */
2792{
2793 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2794 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2795 offsetof(CPUX86State,segs[seg_reg].selector));
2796 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2797 tcg_gen_st_tl(cpu_T[0], cpu_env,
2798 offsetof(CPUX86State,segs[seg_reg].base));
2799#ifdef VBOX
2800 /* flags must be 0xf3; expand-up read/write accessed data segment with DPL=3. (VT-x) */
2801 unsigned flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK | DESC_A_MASK;
2802 flags |= (3 << DESC_DPL_SHIFT);
2803
2804 gen_op_movl_T0_im(flags);
2805 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2806
2807 /* Set the limit to 0xffff. */
2808 gen_op_movl_T0_im(0xffff);
2809 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].limit));
2810#endif
2811}
2812
2813/* move T0 to seg_reg and compute if the CPU state may change. Never
2814 call this function with seg_reg == R_CS */
2815static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2816{
2817 if (s->pe && !s->vm86) {
2818 /* XXX: optimize by finding processor state dynamically */
2819 if (s->cc_op != CC_OP_DYNAMIC)
2820 gen_op_set_cc_op(s->cc_op);
2821 gen_jmp_im(cur_eip);
2822 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2823 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2824 /* abort translation because the addseg value may change or
2825 because ss32 may change. For R_SS, translation must always
2826 stop as a special handling must be done to disable hardware
2827 interrupts for the next instruction */
2828 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2829 s->is_jmp = 3;
2830 } else {
2831 gen_op_movl_seg_T0_vm(seg_reg);
2832 if (seg_reg == R_SS)
2833 s->is_jmp = 3;
2834 }
2835}
2836
2837#ifndef VBOX
2838static inline int svm_is_rep(int prefixes)
2839#else /* VBOX */
2840DECLINLINE(int) svm_is_rep(int prefixes)
2841#endif /* VBOX */
2842{
2843 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2844}
2845
2846#ifndef VBOX
2847static inline void
2848#else /* VBOX */
2849DECLINLINE(void)
2850#endif /* VBOX */
2851gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2852 uint32_t type, uint64_t param)
2853{
2854 /* no SVM activated; fast case */
2855 if (likely(!(s->flags & HF_SVMI_MASK)))
2856 return;
2857 if (s->cc_op != CC_OP_DYNAMIC)
2858 gen_op_set_cc_op(s->cc_op);
2859 gen_jmp_im(pc_start - s->cs_base);
2860 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2861 tcg_const_i32(type), tcg_const_i64(param));
2862}
2863
2864#ifndef VBOX
2865static inline void
2866#else /* VBOX */
2867DECLINLINE(void)
2868#endif
2869gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2870{
2871 gen_svm_check_intercept_param(s, pc_start, type, 0);
2872}
2873
2874#ifndef VBOX
2875static inline void gen_stack_update(DisasContext *s, int addend)
2876#else /* VBOX */
2877DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2878#endif /* VBOX */
2879{
2880#ifdef TARGET_X86_64
2881 if (CODE64(s)) {
2882 gen_op_add_reg_im(2, R_ESP, addend);
2883 } else
2884#endif
2885 if (s->ss32) {
2886 gen_op_add_reg_im(1, R_ESP, addend);
2887 } else {
2888 gen_op_add_reg_im(0, R_ESP, addend);
2889 }
2890}
2891
2892/* generate a push. It depends on ss32, addseg and dflag */
2893static void gen_push_T0(DisasContext *s)
2894{
2895#ifdef TARGET_X86_64
2896 if (CODE64(s)) {
2897 gen_op_movq_A0_reg(R_ESP);
2898 if (s->dflag) {
2899 gen_op_addq_A0_im(-8);
2900 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2901 } else {
2902 gen_op_addq_A0_im(-2);
2903 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2904 }
2905 gen_op_mov_reg_A0(2, R_ESP);
2906 } else
2907#endif
2908 {
2909 gen_op_movl_A0_reg(R_ESP);
2910 if (!s->dflag)
2911 gen_op_addl_A0_im(-2);
2912 else
2913 gen_op_addl_A0_im(-4);
2914 if (s->ss32) {
2915 if (s->addseg) {
2916 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2917 gen_op_addl_A0_seg(R_SS);
2918 }
2919 } else {
2920 gen_op_andl_A0_ffff();
2921 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2922 gen_op_addl_A0_seg(R_SS);
2923 }
2924 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2925 if (s->ss32 && !s->addseg)
2926 gen_op_mov_reg_A0(1, R_ESP);
2927 else
2928 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2929 }
2930}
2931
2932/* generate a push. It depends on ss32, addseg and dflag */
2933/* slower version for T1, only used for call Ev */
2934static void gen_push_T1(DisasContext *s)
2935{
2936#ifdef TARGET_X86_64
2937 if (CODE64(s)) {
2938 gen_op_movq_A0_reg(R_ESP);
2939 if (s->dflag) {
2940 gen_op_addq_A0_im(-8);
2941 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2942 } else {
2943 gen_op_addq_A0_im(-2);
2944 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2945 }
2946 gen_op_mov_reg_A0(2, R_ESP);
2947 } else
2948#endif
2949 {
2950 gen_op_movl_A0_reg(R_ESP);
2951 if (!s->dflag)
2952 gen_op_addl_A0_im(-2);
2953 else
2954 gen_op_addl_A0_im(-4);
2955 if (s->ss32) {
2956 if (s->addseg) {
2957 gen_op_addl_A0_seg(R_SS);
2958 }
2959 } else {
2960 gen_op_andl_A0_ffff();
2961 gen_op_addl_A0_seg(R_SS);
2962 }
2963 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2964
2965 if (s->ss32 && !s->addseg)
2966 gen_op_mov_reg_A0(1, R_ESP);
2967 else
2968 gen_stack_update(s, (-2) << s->dflag);
2969 }
2970}
2971
2972/* two step pop is necessary for precise exceptions */
2973static void gen_pop_T0(DisasContext *s)
2974{
2975#ifdef TARGET_X86_64
2976 if (CODE64(s)) {
2977 gen_op_movq_A0_reg(R_ESP);
2978 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2979 } else
2980#endif
2981 {
2982 gen_op_movl_A0_reg(R_ESP);
2983 if (s->ss32) {
2984 if (s->addseg)
2985 gen_op_addl_A0_seg(R_SS);
2986 } else {
2987 gen_op_andl_A0_ffff();
2988 gen_op_addl_A0_seg(R_SS);
2989 }
2990 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2991 }
2992}
2993
2994static void gen_pop_update(DisasContext *s)
2995{
2996#ifdef TARGET_X86_64
2997 if (CODE64(s) && s->dflag) {
2998 gen_stack_update(s, 8);
2999 } else
3000#endif
3001 {
3002 gen_stack_update(s, 2 << s->dflag);
3003 }
3004}
3005
3006static void gen_stack_A0(DisasContext *s)
3007{
3008 gen_op_movl_A0_reg(R_ESP);
3009 if (!s->ss32)
3010 gen_op_andl_A0_ffff();
3011 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3012 if (s->addseg)
3013 gen_op_addl_A0_seg(R_SS);
3014}
3015
3016/* NOTE: wrap around in 16 bit not fully handled */
3017static void gen_pusha(DisasContext *s)
3018{
3019 int i;
3020 gen_op_movl_A0_reg(R_ESP);
3021 gen_op_addl_A0_im(-16 << s->dflag);
3022 if (!s->ss32)
3023 gen_op_andl_A0_ffff();
3024 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3025 if (s->addseg)
3026 gen_op_addl_A0_seg(R_SS);
3027 for(i = 0;i < 8; i++) {
3028 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3029 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3030 gen_op_addl_A0_im(2 << s->dflag);
3031 }
3032 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3033}
3034
3035/* NOTE: wrap around in 16 bit not fully handled */
3036static void gen_popa(DisasContext *s)
3037{
3038 int i;
3039 gen_op_movl_A0_reg(R_ESP);
3040 if (!s->ss32)
3041 gen_op_andl_A0_ffff();
3042 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3043 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3044 if (s->addseg)
3045 gen_op_addl_A0_seg(R_SS);
3046 for(i = 0;i < 8; i++) {
3047 /* ESP is not reloaded */
3048 if (i != 3) {
3049 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3050 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3051 }
3052 gen_op_addl_A0_im(2 << s->dflag);
3053 }
3054 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3055}
3056
3057static void gen_enter(DisasContext *s, int esp_addend, int level)
3058{
3059 int ot, opsize;
3060
3061 level &= 0x1f;
3062#ifdef TARGET_X86_64
3063 if (CODE64(s)) {
3064 ot = s->dflag ? OT_QUAD : OT_WORD;
3065 opsize = 1 << ot;
3066
3067 gen_op_movl_A0_reg(R_ESP);
3068 gen_op_addq_A0_im(-opsize);
3069 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3070
3071 /* push bp */
3072 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3073 gen_op_st_T0_A0(ot + s->mem_index);
3074 if (level) {
3075 /* XXX: must save state */
3076 tcg_gen_helper_0_3(helper_enter64_level,
3077 tcg_const_i32(level),
3078 tcg_const_i32((ot == OT_QUAD)),
3079 cpu_T[1]);
3080 }
3081 gen_op_mov_reg_T1(ot, R_EBP);
3082 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3083 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3084 } else
3085#endif
3086 {
3087 ot = s->dflag + OT_WORD;
3088 opsize = 2 << s->dflag;
3089
3090 gen_op_movl_A0_reg(R_ESP);
3091 gen_op_addl_A0_im(-opsize);
3092 if (!s->ss32)
3093 gen_op_andl_A0_ffff();
3094 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3095 if (s->addseg)
3096 gen_op_addl_A0_seg(R_SS);
3097 /* push bp */
3098 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3099 gen_op_st_T0_A0(ot + s->mem_index);
3100 if (level) {
3101 /* XXX: must save state */
3102 tcg_gen_helper_0_3(helper_enter_level,
3103 tcg_const_i32(level),
3104 tcg_const_i32(s->dflag),
3105 cpu_T[1]);
3106 }
3107 gen_op_mov_reg_T1(ot, R_EBP);
3108 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3109 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3110 }
3111}
3112
3113static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3114{
3115 if (s->cc_op != CC_OP_DYNAMIC)
3116 gen_op_set_cc_op(s->cc_op);
3117 gen_jmp_im(cur_eip);
3118 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3119 s->is_jmp = 3;
3120}
3121
3122/* an interrupt is different from an exception because of the
3123 privilege checks */
3124static void gen_interrupt(DisasContext *s, int intno,
3125 target_ulong cur_eip, target_ulong next_eip)
3126{
3127 if (s->cc_op != CC_OP_DYNAMIC)
3128 gen_op_set_cc_op(s->cc_op);
3129 gen_jmp_im(cur_eip);
3130 tcg_gen_helper_0_2(helper_raise_interrupt,
3131 tcg_const_i32(intno),
3132 tcg_const_i32(next_eip - cur_eip));
3133 s->is_jmp = 3;
3134}
3135
3136static void gen_debug(DisasContext *s, target_ulong cur_eip)
3137{
3138 if (s->cc_op != CC_OP_DYNAMIC)
3139 gen_op_set_cc_op(s->cc_op);
3140 gen_jmp_im(cur_eip);
3141 tcg_gen_helper_0_0(helper_debug);
3142 s->is_jmp = 3;
3143}
3144
3145/* generate a generic end of block. Trace exception is also generated
3146 if needed */
3147static void gen_eob(DisasContext *s)
3148{
3149 if (s->cc_op != CC_OP_DYNAMIC)
3150 gen_op_set_cc_op(s->cc_op);
3151 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3152 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3153 }
3154
3155#ifdef VBOX
3156 gen_check_external_event(s);
3157#endif /* VBOX */
3158
3159 if (s->singlestep_enabled) {
3160 tcg_gen_helper_0_0(helper_debug);
3161 } else if (s->tf) {
3162 tcg_gen_helper_0_0(helper_single_step);
3163 } else {
3164 tcg_gen_exit_tb(0);
3165 }
3166 s->is_jmp = 3;
3167}
3168
3169/* generate a jump to eip. No segment change must happen before as a
3170 direct call to the next block may occur */
3171static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3172{
3173 if (s->jmp_opt) {
3174 if (s->cc_op != CC_OP_DYNAMIC) {
3175 gen_op_set_cc_op(s->cc_op);
3176 s->cc_op = CC_OP_DYNAMIC;
3177 }
3178 gen_goto_tb(s, tb_num, eip);
3179 s->is_jmp = 3;
3180 } else {
3181 gen_jmp_im(eip);
3182 gen_eob(s);
3183 }
3184}
3185
3186static void gen_jmp(DisasContext *s, target_ulong eip)
3187{
3188 gen_jmp_tb(s, eip, 0);
3189}
3190
3191#ifndef VBOX
3192static inline void gen_ldq_env_A0(int idx, int offset)
3193#else /* VBOX */
3194DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3195#endif /* VBOX */
3196{
3197 int mem_index = (idx >> 2) - 1;
3198 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3199 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3200}
3201
3202#ifndef VBOX
3203static inline void gen_stq_env_A0(int idx, int offset)
3204#else /* VBOX */
3205DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3206#endif /* VBOX */
3207{
3208 int mem_index = (idx >> 2) - 1;
3209 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3210 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3211}
3212
3213#ifndef VBOX
3214static inline void gen_ldo_env_A0(int idx, int offset)
3215#else /* VBOX */
3216DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3217#endif /* VBOX */
3218{
3219 int mem_index = (idx >> 2) - 1;
3220 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3221 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3222 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3223 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3224 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3225}
3226
3227#ifndef VBOX
3228static inline void gen_sto_env_A0(int idx, int offset)
3229#else /* VBOX */
3230DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3231#endif /* VBOX */
3232{
3233 int mem_index = (idx >> 2) - 1;
3234 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3235 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3236 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3237 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3238 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3239}
3240
3241#ifndef VBOX
3242static inline void gen_op_movo(int d_offset, int s_offset)
3243#else /* VBOX */
3244DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3245#endif /* VBOX */
3246{
3247 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3248 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3249 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3250 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3251}
3252
3253#ifndef VBOX
3254static inline void gen_op_movq(int d_offset, int s_offset)
3255#else /* VBOX */
3256DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3257#endif /* VBOX */
3258{
3259 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3260 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3261}
3262
3263#ifndef VBOX
3264static inline void gen_op_movl(int d_offset, int s_offset)
3265#else /* VBOX */
3266DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3267#endif /* VBOX */
3268{
3269 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3270 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3271}
3272
3273#ifndef VBOX
3274static inline void gen_op_movq_env_0(int d_offset)
3275#else /* VBOX */
3276DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3277#endif /* VBOX */
3278{
3279 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3280 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3281}
3282
3283#define SSE_SPECIAL ((void *)1)
3284#define SSE_DUMMY ((void *)2)
3285
3286#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3287#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3288 helper_ ## x ## ss, helper_ ## x ## sd, }
3289
3290static void *sse_op_table1[256][4] = {
3291 /* 3DNow! extensions */
3292 [0x0e] = { SSE_DUMMY }, /* femms */
3293 [0x0f] = { SSE_DUMMY }, /* pf... */
3294 /* pure SSE operations */
3295 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3296 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3297 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3298 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3299 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3300 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3301 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3302 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3303
3304 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3305 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3306 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3307 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3308 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3309 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3310 [0x2e] = { helper_ucomiss, helper_ucomisd },
3311 [0x2f] = { helper_comiss, helper_comisd },
3312 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3313 [0x51] = SSE_FOP(sqrt),
3314 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3315 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3316 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3317 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3318 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3319 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3320 [0x58] = SSE_FOP(add),
3321 [0x59] = SSE_FOP(mul),
3322 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3323 helper_cvtss2sd, helper_cvtsd2ss },
3324 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3325 [0x5c] = SSE_FOP(sub),
3326 [0x5d] = SSE_FOP(min),
3327 [0x5e] = SSE_FOP(div),
3328 [0x5f] = SSE_FOP(max),
3329
3330 [0xc2] = SSE_FOP(cmpeq),
3331 [0xc6] = { helper_shufps, helper_shufpd },
3332
3333 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3334 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3335
3336 /* MMX ops and their SSE extensions */
3337 [0x60] = MMX_OP2(punpcklbw),
3338 [0x61] = MMX_OP2(punpcklwd),
3339 [0x62] = MMX_OP2(punpckldq),
3340 [0x63] = MMX_OP2(packsswb),
3341 [0x64] = MMX_OP2(pcmpgtb),
3342 [0x65] = MMX_OP2(pcmpgtw),
3343 [0x66] = MMX_OP2(pcmpgtl),
3344 [0x67] = MMX_OP2(packuswb),
3345 [0x68] = MMX_OP2(punpckhbw),
3346 [0x69] = MMX_OP2(punpckhwd),
3347 [0x6a] = MMX_OP2(punpckhdq),
3348 [0x6b] = MMX_OP2(packssdw),
3349 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3350 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3351 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3352 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3353 [0x70] = { helper_pshufw_mmx,
3354 helper_pshufd_xmm,
3355 helper_pshufhw_xmm,
3356 helper_pshuflw_xmm },
3357 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3358 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3359 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3360 [0x74] = MMX_OP2(pcmpeqb),
3361 [0x75] = MMX_OP2(pcmpeqw),
3362 [0x76] = MMX_OP2(pcmpeql),
3363 [0x77] = { SSE_DUMMY }, /* emms */
3364 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3365 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3366 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3367 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3368 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3369 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3370 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3371 [0xd1] = MMX_OP2(psrlw),
3372 [0xd2] = MMX_OP2(psrld),
3373 [0xd3] = MMX_OP2(psrlq),
3374 [0xd4] = MMX_OP2(paddq),
3375 [0xd5] = MMX_OP2(pmullw),
3376 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3377 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3378 [0xd8] = MMX_OP2(psubusb),
3379 [0xd9] = MMX_OP2(psubusw),
3380 [0xda] = MMX_OP2(pminub),
3381 [0xdb] = MMX_OP2(pand),
3382 [0xdc] = MMX_OP2(paddusb),
3383 [0xdd] = MMX_OP2(paddusw),
3384 [0xde] = MMX_OP2(pmaxub),
3385 [0xdf] = MMX_OP2(pandn),
3386 [0xe0] = MMX_OP2(pavgb),
3387 [0xe1] = MMX_OP2(psraw),
3388 [0xe2] = MMX_OP2(psrad),
3389 [0xe3] = MMX_OP2(pavgw),
3390 [0xe4] = MMX_OP2(pmulhuw),
3391 [0xe5] = MMX_OP2(pmulhw),
3392 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3393 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3394 [0xe8] = MMX_OP2(psubsb),
3395 [0xe9] = MMX_OP2(psubsw),
3396 [0xea] = MMX_OP2(pminsw),
3397 [0xeb] = MMX_OP2(por),
3398 [0xec] = MMX_OP2(paddsb),
3399 [0xed] = MMX_OP2(paddsw),
3400 [0xee] = MMX_OP2(pmaxsw),
3401 [0xef] = MMX_OP2(pxor),
3402 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3403 [0xf1] = MMX_OP2(psllw),
3404 [0xf2] = MMX_OP2(pslld),
3405 [0xf3] = MMX_OP2(psllq),
3406 [0xf4] = MMX_OP2(pmuludq),
3407 [0xf5] = MMX_OP2(pmaddwd),
3408 [0xf6] = MMX_OP2(psadbw),
3409 [0xf7] = MMX_OP2(maskmov),
3410 [0xf8] = MMX_OP2(psubb),
3411 [0xf9] = MMX_OP2(psubw),
3412 [0xfa] = MMX_OP2(psubl),
3413 [0xfb] = MMX_OP2(psubq),
3414 [0xfc] = MMX_OP2(paddb),
3415 [0xfd] = MMX_OP2(paddw),
3416 [0xfe] = MMX_OP2(paddl),
3417};
3418
3419static void *sse_op_table2[3 * 8][2] = {
3420 [0 + 2] = MMX_OP2(psrlw),
3421 [0 + 4] = MMX_OP2(psraw),
3422 [0 + 6] = MMX_OP2(psllw),
3423 [8 + 2] = MMX_OP2(psrld),
3424 [8 + 4] = MMX_OP2(psrad),
3425 [8 + 6] = MMX_OP2(pslld),
3426 [16 + 2] = MMX_OP2(psrlq),
3427 [16 + 3] = { NULL, helper_psrldq_xmm },
3428 [16 + 6] = MMX_OP2(psllq),
3429 [16 + 7] = { NULL, helper_pslldq_xmm },
3430};
3431
3432static void *sse_op_table3[4 * 3] = {
3433 helper_cvtsi2ss,
3434 helper_cvtsi2sd,
3435 X86_64_ONLY(helper_cvtsq2ss),
3436 X86_64_ONLY(helper_cvtsq2sd),
3437
3438 helper_cvttss2si,
3439 helper_cvttsd2si,
3440 X86_64_ONLY(helper_cvttss2sq),
3441 X86_64_ONLY(helper_cvttsd2sq),
3442
3443 helper_cvtss2si,
3444 helper_cvtsd2si,
3445 X86_64_ONLY(helper_cvtss2sq),
3446 X86_64_ONLY(helper_cvtsd2sq),
3447};
3448
3449static void *sse_op_table4[8][4] = {
3450 SSE_FOP(cmpeq),
3451 SSE_FOP(cmplt),
3452 SSE_FOP(cmple),
3453 SSE_FOP(cmpunord),
3454 SSE_FOP(cmpneq),
3455 SSE_FOP(cmpnlt),
3456 SSE_FOP(cmpnle),
3457 SSE_FOP(cmpord),
3458};
3459
3460static void *sse_op_table5[256] = {
3461 [0x0c] = helper_pi2fw,
3462 [0x0d] = helper_pi2fd,
3463 [0x1c] = helper_pf2iw,
3464 [0x1d] = helper_pf2id,
3465 [0x8a] = helper_pfnacc,
3466 [0x8e] = helper_pfpnacc,
3467 [0x90] = helper_pfcmpge,
3468 [0x94] = helper_pfmin,
3469 [0x96] = helper_pfrcp,
3470 [0x97] = helper_pfrsqrt,
3471 [0x9a] = helper_pfsub,
3472 [0x9e] = helper_pfadd,
3473 [0xa0] = helper_pfcmpgt,
3474 [0xa4] = helper_pfmax,
3475 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3476 [0xa7] = helper_movq, /* pfrsqit1 */
3477 [0xaa] = helper_pfsubr,
3478 [0xae] = helper_pfacc,
3479 [0xb0] = helper_pfcmpeq,
3480 [0xb4] = helper_pfmul,
3481 [0xb6] = helper_movq, /* pfrcpit2 */
3482 [0xb7] = helper_pmulhrw_mmx,
3483 [0xbb] = helper_pswapd,
3484 [0xbf] = helper_pavgb_mmx /* pavgusb */
3485};
3486
3487struct sse_op_helper_s {
3488 void *op[2]; uint32_t ext_mask;
3489};
3490#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3491#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3492#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3493#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3494static struct sse_op_helper_s sse_op_table6[256] = {
3495 [0x00] = SSSE3_OP(pshufb),
3496 [0x01] = SSSE3_OP(phaddw),
3497 [0x02] = SSSE3_OP(phaddd),
3498 [0x03] = SSSE3_OP(phaddsw),
3499 [0x04] = SSSE3_OP(pmaddubsw),
3500 [0x05] = SSSE3_OP(phsubw),
3501 [0x06] = SSSE3_OP(phsubd),
3502 [0x07] = SSSE3_OP(phsubsw),
3503 [0x08] = SSSE3_OP(psignb),
3504 [0x09] = SSSE3_OP(psignw),
3505 [0x0a] = SSSE3_OP(psignd),
3506 [0x0b] = SSSE3_OP(pmulhrsw),
3507 [0x10] = SSE41_OP(pblendvb),
3508 [0x14] = SSE41_OP(blendvps),
3509 [0x15] = SSE41_OP(blendvpd),
3510 [0x17] = SSE41_OP(ptest),
3511 [0x1c] = SSSE3_OP(pabsb),
3512 [0x1d] = SSSE3_OP(pabsw),
3513 [0x1e] = SSSE3_OP(pabsd),
3514 [0x20] = SSE41_OP(pmovsxbw),
3515 [0x21] = SSE41_OP(pmovsxbd),
3516 [0x22] = SSE41_OP(pmovsxbq),
3517 [0x23] = SSE41_OP(pmovsxwd),
3518 [0x24] = SSE41_OP(pmovsxwq),
3519 [0x25] = SSE41_OP(pmovsxdq),
3520 [0x28] = SSE41_OP(pmuldq),
3521 [0x29] = SSE41_OP(pcmpeqq),
3522 [0x2a] = SSE41_SPECIAL, /* movntqda */
3523 [0x2b] = SSE41_OP(packusdw),
3524 [0x30] = SSE41_OP(pmovzxbw),
3525 [0x31] = SSE41_OP(pmovzxbd),
3526 [0x32] = SSE41_OP(pmovzxbq),
3527 [0x33] = SSE41_OP(pmovzxwd),
3528 [0x34] = SSE41_OP(pmovzxwq),
3529 [0x35] = SSE41_OP(pmovzxdq),
3530 [0x37] = SSE42_OP(pcmpgtq),
3531 [0x38] = SSE41_OP(pminsb),
3532 [0x39] = SSE41_OP(pminsd),
3533 [0x3a] = SSE41_OP(pminuw),
3534 [0x3b] = SSE41_OP(pminud),
3535 [0x3c] = SSE41_OP(pmaxsb),
3536 [0x3d] = SSE41_OP(pmaxsd),
3537 [0x3e] = SSE41_OP(pmaxuw),
3538 [0x3f] = SSE41_OP(pmaxud),
3539 [0x40] = SSE41_OP(pmulld),
3540 [0x41] = SSE41_OP(phminposuw),
3541};
3542
3543static struct sse_op_helper_s sse_op_table7[256] = {
3544 [0x08] = SSE41_OP(roundps),
3545 [0x09] = SSE41_OP(roundpd),
3546 [0x0a] = SSE41_OP(roundss),
3547 [0x0b] = SSE41_OP(roundsd),
3548 [0x0c] = SSE41_OP(blendps),
3549 [0x0d] = SSE41_OP(blendpd),
3550 [0x0e] = SSE41_OP(pblendw),
3551 [0x0f] = SSSE3_OP(palignr),
3552 [0x14] = SSE41_SPECIAL, /* pextrb */
3553 [0x15] = SSE41_SPECIAL, /* pextrw */
3554 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3555 [0x17] = SSE41_SPECIAL, /* extractps */
3556 [0x20] = SSE41_SPECIAL, /* pinsrb */
3557 [0x21] = SSE41_SPECIAL, /* insertps */
3558 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3559 [0x40] = SSE41_OP(dpps),
3560 [0x41] = SSE41_OP(dppd),
3561 [0x42] = SSE41_OP(mpsadbw),
3562 [0x60] = SSE42_OP(pcmpestrm),
3563 [0x61] = SSE42_OP(pcmpestri),
3564 [0x62] = SSE42_OP(pcmpistrm),
3565 [0x63] = SSE42_OP(pcmpistri),
3566};
3567
3568static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3569{
3570 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3571 int modrm, mod, rm, reg, reg_addr, offset_addr;
3572 void *sse_op2;
3573
3574 b &= 0xff;
3575 if (s->prefix & PREFIX_DATA)
3576 b1 = 1;
3577 else if (s->prefix & PREFIX_REPZ)
3578 b1 = 2;
3579 else if (s->prefix & PREFIX_REPNZ)
3580 b1 = 3;
3581 else
3582 b1 = 0;
3583 sse_op2 = sse_op_table1[b][b1];
3584 if (!sse_op2)
3585 goto illegal_op;
3586 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3587 is_xmm = 1;
3588 } else {
3589 if (b1 == 0) {
3590 /* MMX case */
3591 is_xmm = 0;
3592 } else {
3593 is_xmm = 1;
3594 }
3595 }
3596 /* simple MMX/SSE operation */
3597 if (s->flags & HF_TS_MASK) {
3598 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3599 return;
3600 }
3601 if (s->flags & HF_EM_MASK) {
3602 illegal_op:
3603 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3604 return;
3605 }
3606 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3607 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3608 goto illegal_op;
3609 if (b == 0x0e) {
3610 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3611 goto illegal_op;
3612 /* femms */
3613 tcg_gen_helper_0_0(helper_emms);
3614 return;
3615 }
3616 if (b == 0x77) {
3617 /* emms */
3618 tcg_gen_helper_0_0(helper_emms);
3619 return;
3620 }
3621 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3622 the static cpu state) */
3623 if (!is_xmm) {
3624 tcg_gen_helper_0_0(helper_enter_mmx);
3625 }
3626
3627 modrm = ldub_code(s->pc++);
3628 reg = ((modrm >> 3) & 7);
3629 if (is_xmm)
3630 reg |= rex_r;
3631 mod = (modrm >> 6) & 3;
3632 if (sse_op2 == SSE_SPECIAL) {
3633 b |= (b1 << 8);
3634 switch(b) {
3635 case 0x0e7: /* movntq */
3636 if (mod == 3)
3637 goto illegal_op;
3638 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3639 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3640 break;
3641 case 0x1e7: /* movntdq */
3642 case 0x02b: /* movntps */
3643 case 0x12b: /* movntps */
3644 case 0x3f0: /* lddqu */
3645 if (mod == 3)
3646 goto illegal_op;
3647 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3648 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3649 break;
3650 case 0x6e: /* movd mm, ea */
3651#ifdef TARGET_X86_64
3652 if (s->dflag == 2) {
3653 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3654 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3655 } else
3656#endif
3657 {
3658 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3659 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3660 offsetof(CPUX86State,fpregs[reg].mmx));
3661 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3662 }
3663 break;
3664 case 0x16e: /* movd xmm, ea */
3665#ifdef TARGET_X86_64
3666 if (s->dflag == 2) {
3667 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3668 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3669 offsetof(CPUX86State,xmm_regs[reg]));
3670 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3671 } else
3672#endif
3673 {
3674 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3675 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3676 offsetof(CPUX86State,xmm_regs[reg]));
3677 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3678 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3679 }
3680 break;
3681 case 0x6f: /* movq mm, ea */
3682 if (mod != 3) {
3683 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3684 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3685 } else {
3686 rm = (modrm & 7);
3687 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3688 offsetof(CPUX86State,fpregs[rm].mmx));
3689 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3690 offsetof(CPUX86State,fpregs[reg].mmx));
3691 }
3692 break;
3693 case 0x010: /* movups */
3694 case 0x110: /* movupd */
3695 case 0x028: /* movaps */
3696 case 0x128: /* movapd */
3697 case 0x16f: /* movdqa xmm, ea */
3698 case 0x26f: /* movdqu xmm, ea */
3699 if (mod != 3) {
3700 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3701 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3702 } else {
3703 rm = (modrm & 7) | REX_B(s);
3704 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3705 offsetof(CPUX86State,xmm_regs[rm]));
3706 }
3707 break;
3708 case 0x210: /* movss xmm, ea */
3709 if (mod != 3) {
3710 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3711 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3712 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3713 gen_op_movl_T0_0();
3714 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3715 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3716 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3717 } else {
3718 rm = (modrm & 7) | REX_B(s);
3719 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3720 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3721 }
3722 break;
3723 case 0x310: /* movsd xmm, ea */
3724 if (mod != 3) {
3725 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3726 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3727 gen_op_movl_T0_0();
3728 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3729 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3730 } else {
3731 rm = (modrm & 7) | REX_B(s);
3732 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3733 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3734 }
3735 break;
3736 case 0x012: /* movlps */
3737 case 0x112: /* movlpd */
3738 if (mod != 3) {
3739 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3740 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3741 } else {
3742 /* movhlps */
3743 rm = (modrm & 7) | REX_B(s);
3744 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3745 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3746 }
3747 break;
3748 case 0x212: /* movsldup */
3749 if (mod != 3) {
3750 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3751 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3752 } else {
3753 rm = (modrm & 7) | REX_B(s);
3754 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3755 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3756 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3757 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3758 }
3759 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3760 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3761 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3762 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3763 break;
3764 case 0x312: /* movddup */
3765 if (mod != 3) {
3766 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3767 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3768 } else {
3769 rm = (modrm & 7) | REX_B(s);
3770 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3771 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3772 }
3773 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3774 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3775 break;
3776 case 0x016: /* movhps */
3777 case 0x116: /* movhpd */
3778 if (mod != 3) {
3779 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3780 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3781 } else {
3782 /* movlhps */
3783 rm = (modrm & 7) | REX_B(s);
3784 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3785 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3786 }
3787 break;
3788 case 0x216: /* movshdup */
3789 if (mod != 3) {
3790 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3791 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3792 } else {
3793 rm = (modrm & 7) | REX_B(s);
3794 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3795 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3796 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3797 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3798 }
3799 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3800 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3801 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3802 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3803 break;
3804 case 0x7e: /* movd ea, mm */
3805#ifdef TARGET_X86_64
3806 if (s->dflag == 2) {
3807 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3808 offsetof(CPUX86State,fpregs[reg].mmx));
3809 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3810 } else
3811#endif
3812 {
3813 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3814 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3815 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3816 }
3817 break;
3818 case 0x17e: /* movd ea, xmm */
3819#ifdef TARGET_X86_64
3820 if (s->dflag == 2) {
3821 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3822 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3823 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3824 } else
3825#endif
3826 {
3827 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3828 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3829 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3830 }
3831 break;
3832 case 0x27e: /* movq xmm, ea */
3833 if (mod != 3) {
3834 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3835 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3836 } else {
3837 rm = (modrm & 7) | REX_B(s);
3838 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3839 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3840 }
3841 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3842 break;
3843 case 0x7f: /* movq ea, mm */
3844 if (mod != 3) {
3845 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3846 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3847 } else {
3848 rm = (modrm & 7);
3849 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3850 offsetof(CPUX86State,fpregs[reg].mmx));
3851 }
3852 break;
3853 case 0x011: /* movups */
3854 case 0x111: /* movupd */
3855 case 0x029: /* movaps */
3856 case 0x129: /* movapd */
3857 case 0x17f: /* movdqa ea, xmm */
3858 case 0x27f: /* movdqu ea, xmm */
3859 if (mod != 3) {
3860 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3861 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3862 } else {
3863 rm = (modrm & 7) | REX_B(s);
3864 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3865 offsetof(CPUX86State,xmm_regs[reg]));
3866 }
3867 break;
3868 case 0x211: /* movss ea, xmm */
3869 if (mod != 3) {
3870 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3871 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3872 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3873 } else {
3874 rm = (modrm & 7) | REX_B(s);
3875 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3876 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3877 }
3878 break;
3879 case 0x311: /* movsd ea, xmm */
3880 if (mod != 3) {
3881 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3882 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3883 } else {
3884 rm = (modrm & 7) | REX_B(s);
3885 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3886 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3887 }
3888 break;
3889 case 0x013: /* movlps */
3890 case 0x113: /* movlpd */
3891 if (mod != 3) {
3892 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3893 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3894 } else {
3895 goto illegal_op;
3896 }
3897 break;
3898 case 0x017: /* movhps */
3899 case 0x117: /* movhpd */
3900 if (mod != 3) {
3901 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3902 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3903 } else {
3904 goto illegal_op;
3905 }
3906 break;
3907 case 0x71: /* shift mm, im */
3908 case 0x72:
3909 case 0x73:
3910 case 0x171: /* shift xmm, im */
3911 case 0x172:
3912 case 0x173:
3913 val = ldub_code(s->pc++);
3914 if (is_xmm) {
3915 gen_op_movl_T0_im(val);
3916 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3917 gen_op_movl_T0_0();
3918 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3919 op1_offset = offsetof(CPUX86State,xmm_t0);
3920 } else {
3921 gen_op_movl_T0_im(val);
3922 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3923 gen_op_movl_T0_0();
3924 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3925 op1_offset = offsetof(CPUX86State,mmx_t0);
3926 }
3927 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3928 if (!sse_op2)
3929 goto illegal_op;
3930 if (is_xmm) {
3931 rm = (modrm & 7) | REX_B(s);
3932 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3933 } else {
3934 rm = (modrm & 7);
3935 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3936 }
3937 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3938 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3939 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3940 break;
3941 case 0x050: /* movmskps */
3942 rm = (modrm & 7) | REX_B(s);
3943 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3944 offsetof(CPUX86State,xmm_regs[rm]));
3945 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3946 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3947 gen_op_mov_reg_T0(OT_LONG, reg);
3948 break;
3949 case 0x150: /* movmskpd */
3950 rm = (modrm & 7) | REX_B(s);
3951 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3952 offsetof(CPUX86State,xmm_regs[rm]));
3953 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3954 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3955 gen_op_mov_reg_T0(OT_LONG, reg);
3956 break;
3957 case 0x02a: /* cvtpi2ps */
3958 case 0x12a: /* cvtpi2pd */
3959 tcg_gen_helper_0_0(helper_enter_mmx);
3960 if (mod != 3) {
3961 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3962 op2_offset = offsetof(CPUX86State,mmx_t0);
3963 gen_ldq_env_A0(s->mem_index, op2_offset);
3964 } else {
3965 rm = (modrm & 7);
3966 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3967 }
3968 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3969 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3970 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3971 switch(b >> 8) {
3972 case 0x0:
3973 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3974 break;
3975 default:
3976 case 0x1:
3977 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3978 break;
3979 }
3980 break;
3981 case 0x22a: /* cvtsi2ss */
3982 case 0x32a: /* cvtsi2sd */
3983 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3984 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3985 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3986 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3987 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3988 if (ot == OT_LONG) {
3989 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3990 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3991 } else {
3992 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3993 }
3994 break;
3995 case 0x02c: /* cvttps2pi */
3996 case 0x12c: /* cvttpd2pi */
3997 case 0x02d: /* cvtps2pi */
3998 case 0x12d: /* cvtpd2pi */
3999 tcg_gen_helper_0_0(helper_enter_mmx);
4000 if (mod != 3) {
4001 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4002 op2_offset = offsetof(CPUX86State,xmm_t0);
4003 gen_ldo_env_A0(s->mem_index, op2_offset);
4004 } else {
4005 rm = (modrm & 7) | REX_B(s);
4006 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4007 }
4008 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
4009 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4010 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4011 switch(b) {
4012 case 0x02c:
4013 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
4014 break;
4015 case 0x12c:
4016 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
4017 break;
4018 case 0x02d:
4019 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
4020 break;
4021 case 0x12d:
4022 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
4023 break;
4024 }
4025 break;
4026 case 0x22c: /* cvttss2si */
4027 case 0x32c: /* cvttsd2si */
4028 case 0x22d: /* cvtss2si */
4029 case 0x32d: /* cvtsd2si */
4030 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4031 if (mod != 3) {
4032 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4033 if ((b >> 8) & 1) {
4034 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4035 } else {
4036 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4037 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4038 }
4039 op2_offset = offsetof(CPUX86State,xmm_t0);
4040 } else {
4041 rm = (modrm & 7) | REX_B(s);
4042 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4043 }
4044 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4045 (b & 1) * 4];
4046 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4047 if (ot == OT_LONG) {
4048 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4049 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4050 } else {
4051 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4052 }
4053 gen_op_mov_reg_T0(ot, reg);
4054 break;
4055 case 0xc4: /* pinsrw */
4056 case 0x1c4:
4057 s->rip_offset = 1;
4058 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4059 val = ldub_code(s->pc++);
4060 if (b1) {
4061 val &= 7;
4062 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4063 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4064 } else {
4065 val &= 3;
4066 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4067 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4068 }
4069 break;
4070 case 0xc5: /* pextrw */
4071 case 0x1c5:
4072 if (mod != 3)
4073 goto illegal_op;
4074 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4075 val = ldub_code(s->pc++);
4076 if (b1) {
4077 val &= 7;
4078 rm = (modrm & 7) | REX_B(s);
4079 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4080 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4081 } else {
4082 val &= 3;
4083 rm = (modrm & 7);
4084 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4085 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4086 }
4087 reg = ((modrm >> 3) & 7) | rex_r;
4088 gen_op_mov_reg_T0(ot, reg);
4089 break;
4090 case 0x1d6: /* movq ea, xmm */
4091 if (mod != 3) {
4092 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4093 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4094 } else {
4095 rm = (modrm & 7) | REX_B(s);
4096 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4097 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4098 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4099 }
4100 break;
4101 case 0x2d6: /* movq2dq */
4102 tcg_gen_helper_0_0(helper_enter_mmx);
4103 rm = (modrm & 7);
4104 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4105 offsetof(CPUX86State,fpregs[rm].mmx));
4106 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4107 break;
4108 case 0x3d6: /* movdq2q */
4109 tcg_gen_helper_0_0(helper_enter_mmx);
4110 rm = (modrm & 7) | REX_B(s);
4111 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4112 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4113 break;
4114 case 0xd7: /* pmovmskb */
4115 case 0x1d7:
4116 if (mod != 3)
4117 goto illegal_op;
4118 if (b1) {
4119 rm = (modrm & 7) | REX_B(s);
4120 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4121 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4122 } else {
4123 rm = (modrm & 7);
4124 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4125 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4126 }
4127 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4128 reg = ((modrm >> 3) & 7) | rex_r;
4129 gen_op_mov_reg_T0(OT_LONG, reg);
4130 break;
4131 case 0x138:
4132 if (s->prefix & PREFIX_REPNZ)
4133 goto crc32;
4134 case 0x038:
4135 b = modrm;
4136 modrm = ldub_code(s->pc++);
4137 rm = modrm & 7;
4138 reg = ((modrm >> 3) & 7) | rex_r;
4139 mod = (modrm >> 6) & 3;
4140
4141 sse_op2 = sse_op_table6[b].op[b1];
4142 if (!sse_op2)
4143 goto illegal_op;
4144 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4145 goto illegal_op;
4146
4147 if (b1) {
4148 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4149 if (mod == 3) {
4150 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4151 } else {
4152 op2_offset = offsetof(CPUX86State,xmm_t0);
4153 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4154 switch (b) {
4155 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4156 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4157 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4158 gen_ldq_env_A0(s->mem_index, op2_offset +
4159 offsetof(XMMReg, XMM_Q(0)));
4160 break;
4161 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4162 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4163 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
4164 (s->mem_index >> 2) - 1);
4165 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4166 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4167 offsetof(XMMReg, XMM_L(0)));
4168 break;
4169 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4170 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4171 (s->mem_index >> 2) - 1);
4172 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4173 offsetof(XMMReg, XMM_W(0)));
4174 break;
4175 case 0x2a: /* movntqda */
4176 gen_ldo_env_A0(s->mem_index, op1_offset);
4177 return;
4178 default:
4179 gen_ldo_env_A0(s->mem_index, op2_offset);
4180 }
4181 }
4182 } else {
4183 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4184 if (mod == 3) {
4185 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4186 } else {
4187 op2_offset = offsetof(CPUX86State,mmx_t0);
4188 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4189 gen_ldq_env_A0(s->mem_index, op2_offset);
4190 }
4191 }
4192 if (sse_op2 == SSE_SPECIAL)
4193 goto illegal_op;
4194
4195 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4196 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4197 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4198
4199 if (b == 0x17)
4200 s->cc_op = CC_OP_EFLAGS;
4201 break;
4202 case 0x338: /* crc32 */
4203 crc32:
4204 b = modrm;
4205 modrm = ldub_code(s->pc++);
4206 reg = ((modrm >> 3) & 7) | rex_r;
4207
4208 if (b != 0xf0 && b != 0xf1)
4209 goto illegal_op;
4210 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4211 goto illegal_op;
4212
4213 if (b == 0xf0)
4214 ot = OT_BYTE;
4215 else if (b == 0xf1 && s->dflag != 2)
4216 if (s->prefix & PREFIX_DATA)
4217 ot = OT_WORD;
4218 else
4219 ot = OT_LONG;
4220 else
4221 ot = OT_QUAD;
4222
4223 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4224 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4225 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4226 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4227 cpu_T[0], tcg_const_i32(8 << ot));
4228
4229 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4230 gen_op_mov_reg_T0(ot, reg);
4231 break;
4232 case 0x03a:
4233 case 0x13a:
4234 b = modrm;
4235 modrm = ldub_code(s->pc++);
4236 rm = modrm & 7;
4237 reg = ((modrm >> 3) & 7) | rex_r;
4238 mod = (modrm >> 6) & 3;
4239
4240 sse_op2 = sse_op_table7[b].op[b1];
4241 if (!sse_op2)
4242 goto illegal_op;
4243 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4244 goto illegal_op;
4245
4246 if (sse_op2 == SSE_SPECIAL) {
4247 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4248 rm = (modrm & 7) | REX_B(s);
4249 if (mod != 3)
4250 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4251 reg = ((modrm >> 3) & 7) | rex_r;
4252 val = ldub_code(s->pc++);
4253 switch (b) {
4254 case 0x14: /* pextrb */
4255 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4256 xmm_regs[reg].XMM_B(val & 15)));
4257 if (mod == 3)
4258 gen_op_mov_reg_T0(ot, rm);
4259 else
4260 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4261 (s->mem_index >> 2) - 1);
4262 break;
4263 case 0x15: /* pextrw */
4264 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4265 xmm_regs[reg].XMM_W(val & 7)));
4266 if (mod == 3)
4267 gen_op_mov_reg_T0(ot, rm);
4268 else
4269 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4270 (s->mem_index >> 2) - 1);
4271 break;
4272 case 0x16:
4273 if (ot == OT_LONG) { /* pextrd */
4274 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4275 offsetof(CPUX86State,
4276 xmm_regs[reg].XMM_L(val & 3)));
4277 if (mod == 3)
4278 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4279 else
4280 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4281 (s->mem_index >> 2) - 1);
4282 } else { /* pextrq */
4283 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4284 offsetof(CPUX86State,
4285 xmm_regs[reg].XMM_Q(val & 1)));
4286 if (mod == 3)
4287 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4288 else
4289 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4290 (s->mem_index >> 2) - 1);
4291 }
4292 break;
4293 case 0x17: /* extractps */
4294 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4295 xmm_regs[reg].XMM_L(val & 3)));
4296 if (mod == 3)
4297 gen_op_mov_reg_T0(ot, rm);
4298 else
4299 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4300 (s->mem_index >> 2) - 1);
4301 break;
4302 case 0x20: /* pinsrb */
4303 if (mod == 3)
4304 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4305 else
4306 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4307 (s->mem_index >> 2) - 1);
4308 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4309 xmm_regs[reg].XMM_B(val & 15)));
4310 break;
4311 case 0x21: /* insertps */
4312 if (mod == 3)
4313 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4314 offsetof(CPUX86State,xmm_regs[rm]
4315 .XMM_L((val >> 6) & 3)));
4316 else
4317 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4318 (s->mem_index >> 2) - 1);
4319 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4320 offsetof(CPUX86State,xmm_regs[reg]
4321 .XMM_L((val >> 4) & 3)));
4322 if ((val >> 0) & 1)
4323 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4324 cpu_env, offsetof(CPUX86State,
4325 xmm_regs[reg].XMM_L(0)));
4326 if ((val >> 1) & 1)
4327 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4328 cpu_env, offsetof(CPUX86State,
4329 xmm_regs[reg].XMM_L(1)));
4330 if ((val >> 2) & 1)
4331 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4332 cpu_env, offsetof(CPUX86State,
4333 xmm_regs[reg].XMM_L(2)));
4334 if ((val >> 3) & 1)
4335 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4336 cpu_env, offsetof(CPUX86State,
4337 xmm_regs[reg].XMM_L(3)));
4338 break;
4339 case 0x22:
4340 if (ot == OT_LONG) { /* pinsrd */
4341 if (mod == 3)
4342 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4343 else
4344 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4345 (s->mem_index >> 2) - 1);
4346 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4347 offsetof(CPUX86State,
4348 xmm_regs[reg].XMM_L(val & 3)));
4349 } else { /* pinsrq */
4350 if (mod == 3)
4351 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4352 else
4353 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4354 (s->mem_index >> 2) - 1);
4355 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4356 offsetof(CPUX86State,
4357 xmm_regs[reg].XMM_Q(val & 1)));
4358 }
4359 break;
4360 }
4361 return;
4362 }
4363
4364 if (b1) {
4365 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4366 if (mod == 3) {
4367 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4368 } else {
4369 op2_offset = offsetof(CPUX86State,xmm_t0);
4370 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4371 gen_ldo_env_A0(s->mem_index, op2_offset);
4372 }
4373 } else {
4374 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4375 if (mod == 3) {
4376 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4377 } else {
4378 op2_offset = offsetof(CPUX86State,mmx_t0);
4379 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4380 gen_ldq_env_A0(s->mem_index, op2_offset);
4381 }
4382 }
4383 val = ldub_code(s->pc++);
4384
4385 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4386 s->cc_op = CC_OP_EFLAGS;
4387
4388 if (s->dflag == 2)
4389 /* The helper must use entire 64-bit gp registers */
4390 val |= 1 << 8;
4391 }
4392
4393 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4394 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4395 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4396 break;
4397 default:
4398 goto illegal_op;
4399 }
4400 } else {
4401 /* generic MMX or SSE operation */
4402 switch(b) {
4403 case 0x70: /* pshufx insn */
4404 case 0xc6: /* pshufx insn */
4405 case 0xc2: /* compare insns */
4406 s->rip_offset = 1;
4407 break;
4408 default:
4409 break;
4410 }
4411 if (is_xmm) {
4412 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4413 if (mod != 3) {
4414 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4415 op2_offset = offsetof(CPUX86State,xmm_t0);
4416 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4417 b == 0xc2)) {
4418 /* specific case for SSE single instructions */
4419 if (b1 == 2) {
4420 /* 32 bit access */
4421 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4422 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4423 } else {
4424 /* 64 bit access */
4425 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4426 }
4427 } else {
4428 gen_ldo_env_A0(s->mem_index, op2_offset);
4429 }
4430 } else {
4431 rm = (modrm & 7) | REX_B(s);
4432 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4433 }
4434 } else {
4435 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4436 if (mod != 3) {
4437 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4438 op2_offset = offsetof(CPUX86State,mmx_t0);
4439 gen_ldq_env_A0(s->mem_index, op2_offset);
4440 } else {
4441 rm = (modrm & 7);
4442 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4443 }
4444 }
4445 switch(b) {
4446 case 0x0f: /* 3DNow! data insns */
4447 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4448 goto illegal_op;
4449 val = ldub_code(s->pc++);
4450 sse_op2 = sse_op_table5[val];
4451 if (!sse_op2)
4452 goto illegal_op;
4453 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4454 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4455 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4456 break;
4457 case 0x70: /* pshufx insn */
4458 case 0xc6: /* pshufx insn */
4459 val = ldub_code(s->pc++);
4460 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4461 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4462 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4463 break;
4464 case 0xc2:
4465 /* compare insns */
4466 val = ldub_code(s->pc++);
4467 if (val >= 8)
4468 goto illegal_op;
4469 sse_op2 = sse_op_table4[val][b1];
4470 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4471 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4472 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4473 break;
4474 case 0xf7:
4475 /* maskmov : we must prepare A0 */
4476 if (mod != 3)
4477 goto illegal_op;
4478#ifdef TARGET_X86_64
4479 if (s->aflag == 2) {
4480 gen_op_movq_A0_reg(R_EDI);
4481 } else
4482#endif
4483 {
4484 gen_op_movl_A0_reg(R_EDI);
4485 if (s->aflag == 0)
4486 gen_op_andl_A0_ffff();
4487 }
4488 gen_add_A0_ds_seg(s);
4489
4490 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4491 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4492 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4493 break;
4494 default:
4495 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4496 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4497 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4498 break;
4499 }
4500 if (b == 0x2e || b == 0x2f) {
4501 s->cc_op = CC_OP_EFLAGS;
4502 }
4503 }
4504}
4505
4506#ifdef VBOX
4507/* Checks if it's an invalid lock sequence. Only a few instructions
4508 can be used together with the lock prefix and of those only the
4509 form that write a memory operand. So, this is kind of annoying
4510 work to do...
4511 The AMD manual lists the following instructions.
4512 ADC
4513 ADD
4514 AND
4515 BTC
4516 BTR
4517 BTS
4518 CMPXCHG
4519 CMPXCHG8B
4520 CMPXCHG16B
4521 DEC
4522 INC
4523 NEG
4524 NOT
4525 OR
4526 SBB
4527 SUB
4528 XADD
4529 XCHG
4530 XOR */
4531static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4532{
4533 target_ulong pc = s->pc;
4534 int modrm, mod, op;
4535
4536 /* X={8,16,32,64} Y={16,32,64} */
4537 switch (b)
4538 {
4539 /* /2: ADC reg/memX, immX */
4540 /* /0: ADD reg/memX, immX */
4541 /* /4: AND reg/memX, immX */
4542 /* /1: OR reg/memX, immX */
4543 /* /3: SBB reg/memX, immX */
4544 /* /5: SUB reg/memX, immX */
4545 /* /6: XOR reg/memX, immX */
4546 case 0x80:
4547 case 0x81:
4548 case 0x83:
4549 modrm = ldub_code(pc++);
4550 op = (modrm >> 3) & 7;
4551 if (op == 7) /* /7: CMP */
4552 break;
4553 mod = (modrm >> 6) & 3;
4554 if (mod == 3) /* register destination */
4555 break;
4556 return false;
4557
4558 case 0x10: /* /r: ADC reg/mem8, reg8 */
4559 case 0x11: /* /r: ADC reg/memX, regY */
4560 case 0x00: /* /r: ADD reg/mem8, reg8 */
4561 case 0x01: /* /r: ADD reg/memX, regY */
4562 case 0x20: /* /r: AND reg/mem8, reg8 */
4563 case 0x21: /* /r: AND reg/memY, regY */
4564 case 0x08: /* /r: OR reg/mem8, reg8 */
4565 case 0x09: /* /r: OR reg/memY, regY */
4566 case 0x18: /* /r: SBB reg/mem8, reg8 */
4567 case 0x19: /* /r: SBB reg/memY, regY */
4568 case 0x28: /* /r: SUB reg/mem8, reg8 */
4569 case 0x29: /* /r: SUB reg/memY, regY */
4570 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4571 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4572 case 0x30: /* /r: XOR reg/mem8, reg8 */
4573 case 0x31: /* /r: XOR reg/memY, regY */
4574 modrm = ldub_code(pc++);
4575 mod = (modrm >> 6) & 3;
4576 if (mod == 3) /* register destination */
4577 break;
4578 return false;
4579
4580 /* /1: DEC reg/memX */
4581 /* /0: INC reg/memX */
4582 case 0xfe:
4583 case 0xff:
4584 modrm = ldub_code(pc++);
4585 mod = (modrm >> 6) & 3;
4586 if (mod == 3) /* register destination */
4587 break;
4588 return false;
4589
4590 /* /3: NEG reg/memX */
4591 /* /2: NOT reg/memX */
4592 case 0xf6:
4593 case 0xf7:
4594 modrm = ldub_code(pc++);
4595 mod = (modrm >> 6) & 3;
4596 if (mod == 3) /* register destination */
4597 break;
4598 return false;
4599
4600 case 0x0f:
4601 b = ldub_code(pc++);
4602 switch (b)
4603 {
4604 /* /7: BTC reg/memY, imm8 */
4605 /* /6: BTR reg/memY, imm8 */
4606 /* /5: BTS reg/memY, imm8 */
4607 case 0xba:
4608 modrm = ldub_code(pc++);
4609 op = (modrm >> 3) & 7;
4610 if (op < 5)
4611 break;
4612 mod = (modrm >> 6) & 3;
4613 if (mod == 3) /* register destination */
4614 break;
4615 return false;
4616
4617 case 0xbb: /* /r: BTC reg/memY, regY */
4618 case 0xb3: /* /r: BTR reg/memY, regY */
4619 case 0xab: /* /r: BTS reg/memY, regY */
4620 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4621 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4622 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4623 case 0xc1: /* /r: XADD reg/memY, regY */
4624 modrm = ldub_code(pc++);
4625 mod = (modrm >> 6) & 3;
4626 if (mod == 3) /* register destination */
4627 break;
4628 return false;
4629
4630 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4631 case 0xc7:
4632 modrm = ldub_code(pc++);
4633 op = (modrm >> 3) & 7;
4634 if (op != 1)
4635 break;
4636 return false;
4637 }
4638 break;
4639 }
4640
4641 /* illegal sequence. The s->pc is past the lock prefix and that
4642 is sufficient for the TB, I think. */
4643 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4644 return true;
4645}
4646#endif /* VBOX */
4647
4648
4649/* convert one instruction. s->is_jmp is set if the translation must
4650 be stopped. Return the next pc value */
4651static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4652{
4653 int b, prefixes, aflag, dflag;
4654 int shift, ot;
4655 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4656 target_ulong next_eip, tval;
4657 int rex_w, rex_r;
4658
4659 if (unlikely(loglevel & CPU_LOG_TB_OP))
4660 tcg_gen_debug_insn_start(pc_start);
4661
4662 s->pc = pc_start;
4663 prefixes = 0;
4664 aflag = s->code32;
4665 dflag = s->code32;
4666 s->override = -1;
4667 rex_w = -1;
4668 rex_r = 0;
4669#ifdef TARGET_X86_64
4670 s->rex_x = 0;
4671 s->rex_b = 0;
4672 x86_64_hregs = 0;
4673#endif
4674 s->rip_offset = 0; /* for relative ip address */
4675#ifdef VBOX
4676 /* nike: seems only slow down things */
4677# if 0
4678 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4679
4680 gen_update_eip(pc_start - s->cs_base);
4681# endif
4682#endif
4683
4684 next_byte:
4685 b = ldub_code(s->pc);
4686 s->pc++;
4687 /* check prefixes */
4688#ifdef TARGET_X86_64
4689 if (CODE64(s)) {
4690 switch (b) {
4691 case 0xf3:
4692 prefixes |= PREFIX_REPZ;
4693 goto next_byte;
4694 case 0xf2:
4695 prefixes |= PREFIX_REPNZ;
4696 goto next_byte;
4697 case 0xf0:
4698 prefixes |= PREFIX_LOCK;
4699 goto next_byte;
4700 case 0x2e:
4701 s->override = R_CS;
4702 goto next_byte;
4703 case 0x36:
4704 s->override = R_SS;
4705 goto next_byte;
4706 case 0x3e:
4707 s->override = R_DS;
4708 goto next_byte;
4709 case 0x26:
4710 s->override = R_ES;
4711 goto next_byte;
4712 case 0x64:
4713 s->override = R_FS;
4714 goto next_byte;
4715 case 0x65:
4716 s->override = R_GS;
4717 goto next_byte;
4718 case 0x66:
4719 prefixes |= PREFIX_DATA;
4720 goto next_byte;
4721 case 0x67:
4722 prefixes |= PREFIX_ADR;
4723 goto next_byte;
4724 case 0x40 ... 0x4f:
4725 /* REX prefix */
4726 rex_w = (b >> 3) & 1;
4727 rex_r = (b & 0x4) << 1;
4728 s->rex_x = (b & 0x2) << 2;
4729 REX_B(s) = (b & 0x1) << 3;
4730 x86_64_hregs = 1; /* select uniform byte register addressing */
4731 goto next_byte;
4732 }
4733 if (rex_w == 1) {
4734 /* 0x66 is ignored if rex.w is set */
4735 dflag = 2;
4736 } else {
4737 if (prefixes & PREFIX_DATA)
4738 dflag ^= 1;
4739 }
4740 if (!(prefixes & PREFIX_ADR))
4741 aflag = 2;
4742 } else
4743#endif
4744 {
4745 switch (b) {
4746 case 0xf3:
4747 prefixes |= PREFIX_REPZ;
4748 goto next_byte;
4749 case 0xf2:
4750 prefixes |= PREFIX_REPNZ;
4751 goto next_byte;
4752 case 0xf0:
4753 prefixes |= PREFIX_LOCK;
4754 goto next_byte;
4755 case 0x2e:
4756 s->override = R_CS;
4757 goto next_byte;
4758 case 0x36:
4759 s->override = R_SS;
4760 goto next_byte;
4761 case 0x3e:
4762 s->override = R_DS;
4763 goto next_byte;
4764 case 0x26:
4765 s->override = R_ES;
4766 goto next_byte;
4767 case 0x64:
4768 s->override = R_FS;
4769 goto next_byte;
4770 case 0x65:
4771 s->override = R_GS;
4772 goto next_byte;
4773 case 0x66:
4774 prefixes |= PREFIX_DATA;
4775 goto next_byte;
4776 case 0x67:
4777 prefixes |= PREFIX_ADR;
4778 goto next_byte;
4779 }
4780 if (prefixes & PREFIX_DATA)
4781 dflag ^= 1;
4782 if (prefixes & PREFIX_ADR)
4783 aflag ^= 1;
4784 }
4785
4786 s->prefix = prefixes;
4787 s->aflag = aflag;
4788 s->dflag = dflag;
4789
4790 /* lock generation */
4791#ifndef VBOX
4792 if (prefixes & PREFIX_LOCK)
4793 tcg_gen_helper_0_0(helper_lock);
4794#else /* VBOX */
4795 if (prefixes & PREFIX_LOCK) {
4796 if (is_invalid_lock_sequence(s, pc_start, b)) {
4797 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4798 return s->pc;
4799 }
4800 tcg_gen_helper_0_0(helper_lock);
4801 }
4802#endif /* VBOX */
4803
4804 /* now check op code */
4805 reswitch:
4806 switch(b) {
4807 case 0x0f:
4808 /**************************/
4809 /* extended op code */
4810 b = ldub_code(s->pc++) | 0x100;
4811 goto reswitch;
4812
4813 /**************************/
4814 /* arith & logic */
4815 case 0x00 ... 0x05:
4816 case 0x08 ... 0x0d:
4817 case 0x10 ... 0x15:
4818 case 0x18 ... 0x1d:
4819 case 0x20 ... 0x25:
4820 case 0x28 ... 0x2d:
4821 case 0x30 ... 0x35:
4822 case 0x38 ... 0x3d:
4823 {
4824 int op, f, val;
4825 op = (b >> 3) & 7;
4826 f = (b >> 1) & 3;
4827
4828 if ((b & 1) == 0)
4829 ot = OT_BYTE;
4830 else
4831 ot = dflag + OT_WORD;
4832
4833 switch(f) {
4834 case 0: /* OP Ev, Gv */
4835 modrm = ldub_code(s->pc++);
4836 reg = ((modrm >> 3) & 7) | rex_r;
4837 mod = (modrm >> 6) & 3;
4838 rm = (modrm & 7) | REX_B(s);
4839 if (mod != 3) {
4840 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4841 opreg = OR_TMP0;
4842 } else if (op == OP_XORL && rm == reg) {
4843 xor_zero:
4844 /* xor reg, reg optimisation */
4845 gen_op_movl_T0_0();
4846 s->cc_op = CC_OP_LOGICB + ot;
4847 gen_op_mov_reg_T0(ot, reg);
4848 gen_op_update1_cc();
4849 break;
4850 } else {
4851 opreg = rm;
4852 }
4853 gen_op_mov_TN_reg(ot, 1, reg);
4854 gen_op(s, op, ot, opreg);
4855 break;
4856 case 1: /* OP Gv, Ev */
4857 modrm = ldub_code(s->pc++);
4858 mod = (modrm >> 6) & 3;
4859 reg = ((modrm >> 3) & 7) | rex_r;
4860 rm = (modrm & 7) | REX_B(s);
4861 if (mod != 3) {
4862 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4863 gen_op_ld_T1_A0(ot + s->mem_index);
4864 } else if (op == OP_XORL && rm == reg) {
4865 goto xor_zero;
4866 } else {
4867 gen_op_mov_TN_reg(ot, 1, rm);
4868 }
4869 gen_op(s, op, ot, reg);
4870 break;
4871 case 2: /* OP A, Iv */
4872 val = insn_get(s, ot);
4873 gen_op_movl_T1_im(val);
4874 gen_op(s, op, ot, OR_EAX);
4875 break;
4876 }
4877 }
4878 break;
4879
4880 case 0x82:
4881 if (CODE64(s))
4882 goto illegal_op;
4883 case 0x80: /* GRP1 */
4884 case 0x81:
4885 case 0x83:
4886 {
4887 int val;
4888
4889 if ((b & 1) == 0)
4890 ot = OT_BYTE;
4891 else
4892 ot = dflag + OT_WORD;
4893
4894 modrm = ldub_code(s->pc++);
4895 mod = (modrm >> 6) & 3;
4896 rm = (modrm & 7) | REX_B(s);
4897 op = (modrm >> 3) & 7;
4898
4899 if (mod != 3) {
4900 if (b == 0x83)
4901 s->rip_offset = 1;
4902 else
4903 s->rip_offset = insn_const_size(ot);
4904 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4905 opreg = OR_TMP0;
4906 } else {
4907 opreg = rm;
4908 }
4909
4910 switch(b) {
4911 default:
4912 case 0x80:
4913 case 0x81:
4914 case 0x82:
4915 val = insn_get(s, ot);
4916 break;
4917 case 0x83:
4918 val = (int8_t)insn_get(s, OT_BYTE);
4919 break;
4920 }
4921 gen_op_movl_T1_im(val);
4922 gen_op(s, op, ot, opreg);
4923 }
4924 break;
4925
4926 /**************************/
4927 /* inc, dec, and other misc arith */
4928 case 0x40 ... 0x47: /* inc Gv */
4929 ot = dflag ? OT_LONG : OT_WORD;
4930 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4931 break;
4932 case 0x48 ... 0x4f: /* dec Gv */
4933 ot = dflag ? OT_LONG : OT_WORD;
4934 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4935 break;
4936 case 0xf6: /* GRP3 */
4937 case 0xf7:
4938 if ((b & 1) == 0)
4939 ot = OT_BYTE;
4940 else
4941 ot = dflag + OT_WORD;
4942
4943 modrm = ldub_code(s->pc++);
4944 mod = (modrm >> 6) & 3;
4945 rm = (modrm & 7) | REX_B(s);
4946 op = (modrm >> 3) & 7;
4947 if (mod != 3) {
4948 if (op == 0)
4949 s->rip_offset = insn_const_size(ot);
4950 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4951 gen_op_ld_T0_A0(ot + s->mem_index);
4952 } else {
4953 gen_op_mov_TN_reg(ot, 0, rm);
4954 }
4955
4956 switch(op) {
4957 case 0: /* test */
4958 val = insn_get(s, ot);
4959 gen_op_movl_T1_im(val);
4960 gen_op_testl_T0_T1_cc();
4961 s->cc_op = CC_OP_LOGICB + ot;
4962 break;
4963 case 2: /* not */
4964 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4965 if (mod != 3) {
4966 gen_op_st_T0_A0(ot + s->mem_index);
4967 } else {
4968 gen_op_mov_reg_T0(ot, rm);
4969 }
4970 break;
4971 case 3: /* neg */
4972 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4973 if (mod != 3) {
4974 gen_op_st_T0_A0(ot + s->mem_index);
4975 } else {
4976 gen_op_mov_reg_T0(ot, rm);
4977 }
4978 gen_op_update_neg_cc();
4979 s->cc_op = CC_OP_SUBB + ot;
4980 break;
4981 case 4: /* mul */
4982 switch(ot) {
4983 case OT_BYTE:
4984 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4985 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4986 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4987 /* XXX: use 32 bit mul which could be faster */
4988 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4989 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4990 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4991 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4992 s->cc_op = CC_OP_MULB;
4993 break;
4994 case OT_WORD:
4995 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4996 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4997 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4998 /* XXX: use 32 bit mul which could be faster */
4999 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5000 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5001 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5002 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5003 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5004 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5005 s->cc_op = CC_OP_MULW;
5006 break;
5007 default:
5008 case OT_LONG:
5009#ifdef TARGET_X86_64
5010 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5011 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
5012 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
5013 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5014 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5015 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5016 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5017 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5018 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5019#else
5020 {
5021 TCGv t0, t1;
5022 t0 = tcg_temp_new(TCG_TYPE_I64);
5023 t1 = tcg_temp_new(TCG_TYPE_I64);
5024 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5025 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
5026 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
5027 tcg_gen_mul_i64(t0, t0, t1);
5028 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5029 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5030 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5031 tcg_gen_shri_i64(t0, t0, 32);
5032 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5033 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5034 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5035 }
5036#endif
5037 s->cc_op = CC_OP_MULL;
5038 break;
5039#ifdef TARGET_X86_64
5040 case OT_QUAD:
5041 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5042 s->cc_op = CC_OP_MULQ;
5043 break;
5044#endif
5045 }
5046 break;
5047 case 5: /* imul */
5048 switch(ot) {
5049 case OT_BYTE:
5050 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5051 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5052 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5053 /* XXX: use 32 bit mul which could be faster */
5054 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5055 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5056 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5057 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5058 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5059 s->cc_op = CC_OP_MULB;
5060 break;
5061 case OT_WORD:
5062 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5063 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5064 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5065 /* XXX: use 32 bit mul which could be faster */
5066 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5067 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5068 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5069 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5070 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5071 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5072 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5073 s->cc_op = CC_OP_MULW;
5074 break;
5075 default:
5076 case OT_LONG:
5077#ifdef TARGET_X86_64
5078 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5079 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5080 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5081 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5082 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5083 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5084 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5085 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5086 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5087 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5088#else
5089 {
5090 TCGv t0, t1;
5091 t0 = tcg_temp_new(TCG_TYPE_I64);
5092 t1 = tcg_temp_new(TCG_TYPE_I64);
5093 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5094 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5095 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5096 tcg_gen_mul_i64(t0, t0, t1);
5097 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5098 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5099 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5100 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5101 tcg_gen_shri_i64(t0, t0, 32);
5102 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5103 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5104 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5105 }
5106#endif
5107 s->cc_op = CC_OP_MULL;
5108 break;
5109#ifdef TARGET_X86_64
5110 case OT_QUAD:
5111 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5112 s->cc_op = CC_OP_MULQ;
5113 break;
5114#endif
5115 }
5116 break;
5117 case 6: /* div */
5118 switch(ot) {
5119 case OT_BYTE:
5120 gen_jmp_im(pc_start - s->cs_base);
5121 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5122 break;
5123 case OT_WORD:
5124 gen_jmp_im(pc_start - s->cs_base);
5125 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5126 break;
5127 default:
5128 case OT_LONG:
5129 gen_jmp_im(pc_start - s->cs_base);
5130 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5131 break;
5132#ifdef TARGET_X86_64
5133 case OT_QUAD:
5134 gen_jmp_im(pc_start - s->cs_base);
5135 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5136 break;
5137#endif
5138 }
5139 break;
5140 case 7: /* idiv */
5141 switch(ot) {
5142 case OT_BYTE:
5143 gen_jmp_im(pc_start - s->cs_base);
5144 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5145 break;
5146 case OT_WORD:
5147 gen_jmp_im(pc_start - s->cs_base);
5148 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5149 break;
5150 default:
5151 case OT_LONG:
5152 gen_jmp_im(pc_start - s->cs_base);
5153 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5154 break;
5155#ifdef TARGET_X86_64
5156 case OT_QUAD:
5157 gen_jmp_im(pc_start - s->cs_base);
5158 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5159 break;
5160#endif
5161 }
5162 break;
5163 default:
5164 goto illegal_op;
5165 }
5166 break;
5167
5168 case 0xfe: /* GRP4 */
5169 case 0xff: /* GRP5 */
5170 if ((b & 1) == 0)
5171 ot = OT_BYTE;
5172 else
5173 ot = dflag + OT_WORD;
5174
5175 modrm = ldub_code(s->pc++);
5176 mod = (modrm >> 6) & 3;
5177 rm = (modrm & 7) | REX_B(s);
5178 op = (modrm >> 3) & 7;
5179 if (op >= 2 && b == 0xfe) {
5180 goto illegal_op;
5181 }
5182 if (CODE64(s)) {
5183 if (op == 2 || op == 4) {
5184 /* operand size for jumps is 64 bit */
5185 ot = OT_QUAD;
5186 } else if (op == 3 || op == 5) {
5187 /* for call calls, the operand is 16 or 32 bit, even
5188 in long mode */
5189 ot = dflag ? OT_LONG : OT_WORD;
5190 } else if (op == 6) {
5191 /* default push size is 64 bit */
5192 ot = dflag ? OT_QUAD : OT_WORD;
5193 }
5194 }
5195 if (mod != 3) {
5196 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5197 if (op >= 2 && op != 3 && op != 5)
5198 gen_op_ld_T0_A0(ot + s->mem_index);
5199 } else {
5200 gen_op_mov_TN_reg(ot, 0, rm);
5201 }
5202
5203 switch(op) {
5204 case 0: /* inc Ev */
5205 if (mod != 3)
5206 opreg = OR_TMP0;
5207 else
5208 opreg = rm;
5209 gen_inc(s, ot, opreg, 1);
5210 break;
5211 case 1: /* dec Ev */
5212 if (mod != 3)
5213 opreg = OR_TMP0;
5214 else
5215 opreg = rm;
5216 gen_inc(s, ot, opreg, -1);
5217 break;
5218 case 2: /* call Ev */
5219 /* XXX: optimize if memory (no 'and' is necessary) */
5220#ifdef VBOX_WITH_CALL_RECORD
5221 if (s->record_call)
5222 gen_op_record_call();
5223#endif
5224 if (s->dflag == 0)
5225 gen_op_andl_T0_ffff();
5226 next_eip = s->pc - s->cs_base;
5227 gen_movtl_T1_im(next_eip);
5228 gen_push_T1(s);
5229 gen_op_jmp_T0();
5230 gen_eob(s);
5231 break;
5232 case 3: /* lcall Ev */
5233 gen_op_ld_T1_A0(ot + s->mem_index);
5234 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5235 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5236 do_lcall:
5237 if (s->pe && !s->vm86) {
5238 if (s->cc_op != CC_OP_DYNAMIC)
5239 gen_op_set_cc_op(s->cc_op);
5240 gen_jmp_im(pc_start - s->cs_base);
5241 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5242 tcg_gen_helper_0_4(helper_lcall_protected,
5243 cpu_tmp2_i32, cpu_T[1],
5244 tcg_const_i32(dflag),
5245 tcg_const_i32(s->pc - pc_start));
5246 } else {
5247 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5248 tcg_gen_helper_0_4(helper_lcall_real,
5249 cpu_tmp2_i32, cpu_T[1],
5250 tcg_const_i32(dflag),
5251 tcg_const_i32(s->pc - s->cs_base));
5252 }
5253 gen_eob(s);
5254 break;
5255 case 4: /* jmp Ev */
5256 if (s->dflag == 0)
5257 gen_op_andl_T0_ffff();
5258 gen_op_jmp_T0();
5259 gen_eob(s);
5260 break;
5261 case 5: /* ljmp Ev */
5262 gen_op_ld_T1_A0(ot + s->mem_index);
5263 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5264 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5265 do_ljmp:
5266 if (s->pe && !s->vm86) {
5267 if (s->cc_op != CC_OP_DYNAMIC)
5268 gen_op_set_cc_op(s->cc_op);
5269 gen_jmp_im(pc_start - s->cs_base);
5270 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5271 tcg_gen_helper_0_3(helper_ljmp_protected,
5272 cpu_tmp2_i32,
5273 cpu_T[1],
5274 tcg_const_i32(s->pc - pc_start));
5275 } else {
5276 gen_op_movl_seg_T0_vm(R_CS);
5277 gen_op_movl_T0_T1();
5278 gen_op_jmp_T0();
5279 }
5280 gen_eob(s);
5281 break;
5282 case 6: /* push Ev */
5283 gen_push_T0(s);
5284 break;
5285 default:
5286 goto illegal_op;
5287 }
5288 break;
5289
5290 case 0x84: /* test Ev, Gv */
5291 case 0x85:
5292 if ((b & 1) == 0)
5293 ot = OT_BYTE;
5294 else
5295 ot = dflag + OT_WORD;
5296
5297 modrm = ldub_code(s->pc++);
5298 mod = (modrm >> 6) & 3;
5299 rm = (modrm & 7) | REX_B(s);
5300 reg = ((modrm >> 3) & 7) | rex_r;
5301
5302 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5303 gen_op_mov_TN_reg(ot, 1, reg);
5304 gen_op_testl_T0_T1_cc();
5305 s->cc_op = CC_OP_LOGICB + ot;
5306 break;
5307
5308 case 0xa8: /* test eAX, Iv */
5309 case 0xa9:
5310 if ((b & 1) == 0)
5311 ot = OT_BYTE;
5312 else
5313 ot = dflag + OT_WORD;
5314 val = insn_get(s, ot);
5315
5316 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5317 gen_op_movl_T1_im(val);
5318 gen_op_testl_T0_T1_cc();
5319 s->cc_op = CC_OP_LOGICB + ot;
5320 break;
5321
5322 case 0x98: /* CWDE/CBW */
5323#ifdef TARGET_X86_64
5324 if (dflag == 2) {
5325 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5326 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5327 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5328 } else
5329#endif
5330 if (dflag == 1) {
5331 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5332 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5333 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5334 } else {
5335 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5336 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5337 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5338 }
5339 break;
5340 case 0x99: /* CDQ/CWD */
5341#ifdef TARGET_X86_64
5342 if (dflag == 2) {
5343 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5344 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5345 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5346 } else
5347#endif
5348 if (dflag == 1) {
5349 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5350 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5351 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5352 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5353 } else {
5354 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5355 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5356 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5357 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5358 }
5359 break;
5360 case 0x1af: /* imul Gv, Ev */
5361 case 0x69: /* imul Gv, Ev, I */
5362 case 0x6b:
5363 ot = dflag + OT_WORD;
5364 modrm = ldub_code(s->pc++);
5365 reg = ((modrm >> 3) & 7) | rex_r;
5366 if (b == 0x69)
5367 s->rip_offset = insn_const_size(ot);
5368 else if (b == 0x6b)
5369 s->rip_offset = 1;
5370 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5371 if (b == 0x69) {
5372 val = insn_get(s, ot);
5373 gen_op_movl_T1_im(val);
5374 } else if (b == 0x6b) {
5375 val = (int8_t)insn_get(s, OT_BYTE);
5376 gen_op_movl_T1_im(val);
5377 } else {
5378 gen_op_mov_TN_reg(ot, 1, reg);
5379 }
5380
5381#ifdef TARGET_X86_64
5382 if (ot == OT_QUAD) {
5383 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5384 } else
5385#endif
5386 if (ot == OT_LONG) {
5387#ifdef TARGET_X86_64
5388 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5389 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5390 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5391 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5392 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5393 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5394#else
5395 {
5396 TCGv t0, t1;
5397 t0 = tcg_temp_new(TCG_TYPE_I64);
5398 t1 = tcg_temp_new(TCG_TYPE_I64);
5399 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5400 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5401 tcg_gen_mul_i64(t0, t0, t1);
5402 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5403 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5404 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5405 tcg_gen_shri_i64(t0, t0, 32);
5406 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5407 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5408 }
5409#endif
5410 } else {
5411 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5412 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5413 /* XXX: use 32 bit mul which could be faster */
5414 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5415 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5416 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5417 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5418 }
5419 gen_op_mov_reg_T0(ot, reg);
5420 s->cc_op = CC_OP_MULB + ot;
5421 break;
5422 case 0x1c0:
5423 case 0x1c1: /* xadd Ev, Gv */
5424 if ((b & 1) == 0)
5425 ot = OT_BYTE;
5426 else
5427 ot = dflag + OT_WORD;
5428 modrm = ldub_code(s->pc++);
5429 reg = ((modrm >> 3) & 7) | rex_r;
5430 mod = (modrm >> 6) & 3;
5431 if (mod == 3) {
5432 rm = (modrm & 7) | REX_B(s);
5433 gen_op_mov_TN_reg(ot, 0, reg);
5434 gen_op_mov_TN_reg(ot, 1, rm);
5435 gen_op_addl_T0_T1();
5436 gen_op_mov_reg_T1(ot, reg);
5437 gen_op_mov_reg_T0(ot, rm);
5438 } else {
5439 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5440 gen_op_mov_TN_reg(ot, 0, reg);
5441 gen_op_ld_T1_A0(ot + s->mem_index);
5442 gen_op_addl_T0_T1();
5443 gen_op_st_T0_A0(ot + s->mem_index);
5444 gen_op_mov_reg_T1(ot, reg);
5445 }
5446 gen_op_update2_cc();
5447 s->cc_op = CC_OP_ADDB + ot;
5448 break;
5449 case 0x1b0:
5450 case 0x1b1: /* cmpxchg Ev, Gv */
5451 {
5452 int label1, label2;
5453 TCGv t0, t1, t2, a0;
5454
5455 if ((b & 1) == 0)
5456 ot = OT_BYTE;
5457 else
5458 ot = dflag + OT_WORD;
5459 modrm = ldub_code(s->pc++);
5460 reg = ((modrm >> 3) & 7) | rex_r;
5461 mod = (modrm >> 6) & 3;
5462 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5463 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5464 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5465 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5466 gen_op_mov_v_reg(ot, t1, reg);
5467 if (mod == 3) {
5468 rm = (modrm & 7) | REX_B(s);
5469 gen_op_mov_v_reg(ot, t0, rm);
5470 } else {
5471 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5472 tcg_gen_mov_tl(a0, cpu_A0);
5473 gen_op_ld_v(ot + s->mem_index, t0, a0);
5474 rm = 0; /* avoid warning */
5475 }
5476 label1 = gen_new_label();
5477 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5478 tcg_gen_sub_tl(t2, t2, t0);
5479 gen_extu(ot, t2);
5480 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5481 if (mod == 3) {
5482 label2 = gen_new_label();
5483 gen_op_mov_reg_v(ot, R_EAX, t0);
5484 tcg_gen_br(label2);
5485 gen_set_label(label1);
5486 gen_op_mov_reg_v(ot, rm, t1);
5487 gen_set_label(label2);
5488 } else {
5489 tcg_gen_mov_tl(t1, t0);
5490 gen_op_mov_reg_v(ot, R_EAX, t0);
5491 gen_set_label(label1);
5492 /* always store */
5493 gen_op_st_v(ot + s->mem_index, t1, a0);
5494 }
5495 tcg_gen_mov_tl(cpu_cc_src, t0);
5496 tcg_gen_mov_tl(cpu_cc_dst, t2);
5497 s->cc_op = CC_OP_SUBB + ot;
5498 tcg_temp_free(t0);
5499 tcg_temp_free(t1);
5500 tcg_temp_free(t2);
5501 tcg_temp_free(a0);
5502 }
5503 break;
5504 case 0x1c7: /* cmpxchg8b */
5505 modrm = ldub_code(s->pc++);
5506 mod = (modrm >> 6) & 3;
5507 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5508 goto illegal_op;
5509#ifdef TARGET_X86_64
5510 if (dflag == 2) {
5511 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5512 goto illegal_op;
5513 gen_jmp_im(pc_start - s->cs_base);
5514 if (s->cc_op != CC_OP_DYNAMIC)
5515 gen_op_set_cc_op(s->cc_op);
5516 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5517 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5518 } else
5519#endif
5520 {
5521 if (!(s->cpuid_features & CPUID_CX8))
5522 goto illegal_op;
5523 gen_jmp_im(pc_start - s->cs_base);
5524 if (s->cc_op != CC_OP_DYNAMIC)
5525 gen_op_set_cc_op(s->cc_op);
5526 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5527 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5528 }
5529 s->cc_op = CC_OP_EFLAGS;
5530 break;
5531
5532 /**************************/
5533 /* push/pop */
5534 case 0x50 ... 0x57: /* push */
5535 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5536 gen_push_T0(s);
5537 break;
5538 case 0x58 ... 0x5f: /* pop */
5539 if (CODE64(s)) {
5540 ot = dflag ? OT_QUAD : OT_WORD;
5541 } else {
5542 ot = dflag + OT_WORD;
5543 }
5544 gen_pop_T0(s);
5545 /* NOTE: order is important for pop %sp */
5546 gen_pop_update(s);
5547 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5548 break;
5549 case 0x60: /* pusha */
5550 if (CODE64(s))
5551 goto illegal_op;
5552 gen_pusha(s);
5553 break;
5554 case 0x61: /* popa */
5555 if (CODE64(s))
5556 goto illegal_op;
5557 gen_popa(s);
5558 break;
5559 case 0x68: /* push Iv */
5560 case 0x6a:
5561 if (CODE64(s)) {
5562 ot = dflag ? OT_QUAD : OT_WORD;
5563 } else {
5564 ot = dflag + OT_WORD;
5565 }
5566 if (b == 0x68)
5567 val = insn_get(s, ot);
5568 else
5569 val = (int8_t)insn_get(s, OT_BYTE);
5570 gen_op_movl_T0_im(val);
5571 gen_push_T0(s);
5572 break;
5573 case 0x8f: /* pop Ev */
5574 if (CODE64(s)) {
5575 ot = dflag ? OT_QUAD : OT_WORD;
5576 } else {
5577 ot = dflag + OT_WORD;
5578 }
5579 modrm = ldub_code(s->pc++);
5580 mod = (modrm >> 6) & 3;
5581 gen_pop_T0(s);
5582 if (mod == 3) {
5583 /* NOTE: order is important for pop %sp */
5584 gen_pop_update(s);
5585 rm = (modrm & 7) | REX_B(s);
5586 gen_op_mov_reg_T0(ot, rm);
5587 } else {
5588 /* NOTE: order is important too for MMU exceptions */
5589 s->popl_esp_hack = 1 << ot;
5590 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5591 s->popl_esp_hack = 0;
5592 gen_pop_update(s);
5593 }
5594 break;
5595 case 0xc8: /* enter */
5596 {
5597 int level;
5598 val = lduw_code(s->pc);
5599 s->pc += 2;
5600 level = ldub_code(s->pc++);
5601 gen_enter(s, val, level);
5602 }
5603 break;
5604 case 0xc9: /* leave */
5605 /* XXX: exception not precise (ESP is updated before potential exception) */
5606 if (CODE64(s)) {
5607 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5608 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5609 } else if (s->ss32) {
5610 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5611 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5612 } else {
5613 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5614 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5615 }
5616 gen_pop_T0(s);
5617 if (CODE64(s)) {
5618 ot = dflag ? OT_QUAD : OT_WORD;
5619 } else {
5620 ot = dflag + OT_WORD;
5621 }
5622 gen_op_mov_reg_T0(ot, R_EBP);
5623 gen_pop_update(s);
5624 break;
5625 case 0x06: /* push es */
5626 case 0x0e: /* push cs */
5627 case 0x16: /* push ss */
5628 case 0x1e: /* push ds */
5629 if (CODE64(s))
5630 goto illegal_op;
5631 gen_op_movl_T0_seg(b >> 3);
5632 gen_push_T0(s);
5633 break;
5634 case 0x1a0: /* push fs */
5635 case 0x1a8: /* push gs */
5636 gen_op_movl_T0_seg((b >> 3) & 7);
5637 gen_push_T0(s);
5638 break;
5639 case 0x07: /* pop es */
5640 case 0x17: /* pop ss */
5641 case 0x1f: /* pop ds */
5642 if (CODE64(s))
5643 goto illegal_op;
5644 reg = b >> 3;
5645 gen_pop_T0(s);
5646 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5647 gen_pop_update(s);
5648 if (reg == R_SS) {
5649 /* if reg == SS, inhibit interrupts/trace. */
5650 /* If several instructions disable interrupts, only the
5651 _first_ does it */
5652 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5653 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5654 s->tf = 0;
5655 }
5656 if (s->is_jmp) {
5657 gen_jmp_im(s->pc - s->cs_base);
5658 gen_eob(s);
5659 }
5660 break;
5661 case 0x1a1: /* pop fs */
5662 case 0x1a9: /* pop gs */
5663 gen_pop_T0(s);
5664 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5665 gen_pop_update(s);
5666 if (s->is_jmp) {
5667 gen_jmp_im(s->pc - s->cs_base);
5668 gen_eob(s);
5669 }
5670 break;
5671
5672 /**************************/
5673 /* mov */
5674 case 0x88:
5675 case 0x89: /* mov Gv, Ev */
5676 if ((b & 1) == 0)
5677 ot = OT_BYTE;
5678 else
5679 ot = dflag + OT_WORD;
5680 modrm = ldub_code(s->pc++);
5681 reg = ((modrm >> 3) & 7) | rex_r;
5682
5683 /* generate a generic store */
5684 gen_ldst_modrm(s, modrm, ot, reg, 1);
5685 break;
5686 case 0xc6:
5687 case 0xc7: /* mov Ev, Iv */
5688 if ((b & 1) == 0)
5689 ot = OT_BYTE;
5690 else
5691 ot = dflag + OT_WORD;
5692 modrm = ldub_code(s->pc++);
5693 mod = (modrm >> 6) & 3;
5694 if (mod != 3) {
5695 s->rip_offset = insn_const_size(ot);
5696 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5697 }
5698 val = insn_get(s, ot);
5699 gen_op_movl_T0_im(val);
5700 if (mod != 3)
5701 gen_op_st_T0_A0(ot + s->mem_index);
5702 else
5703 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5704 break;
5705 case 0x8a:
5706 case 0x8b: /* mov Ev, Gv */
5707#ifdef VBOX /* dtrace hot fix */
5708 if (prefixes & PREFIX_LOCK)
5709 goto illegal_op;
5710#endif
5711 if ((b & 1) == 0)
5712 ot = OT_BYTE;
5713 else
5714 ot = OT_WORD + dflag;
5715 modrm = ldub_code(s->pc++);
5716 reg = ((modrm >> 3) & 7) | rex_r;
5717
5718 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5719 gen_op_mov_reg_T0(ot, reg);
5720 break;
5721 case 0x8e: /* mov seg, Gv */
5722 modrm = ldub_code(s->pc++);
5723 reg = (modrm >> 3) & 7;
5724 if (reg >= 6 || reg == R_CS)
5725 goto illegal_op;
5726 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5727 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5728 if (reg == R_SS) {
5729 /* if reg == SS, inhibit interrupts/trace */
5730 /* If several instructions disable interrupts, only the
5731 _first_ does it */
5732 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5733 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5734 s->tf = 0;
5735 }
5736 if (s->is_jmp) {
5737 gen_jmp_im(s->pc - s->cs_base);
5738 gen_eob(s);
5739 }
5740 break;
5741 case 0x8c: /* mov Gv, seg */
5742 modrm = ldub_code(s->pc++);
5743 reg = (modrm >> 3) & 7;
5744 mod = (modrm >> 6) & 3;
5745 if (reg >= 6)
5746 goto illegal_op;
5747 gen_op_movl_T0_seg(reg);
5748 if (mod == 3)
5749 ot = OT_WORD + dflag;
5750 else
5751 ot = OT_WORD;
5752 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5753 break;
5754
5755 case 0x1b6: /* movzbS Gv, Eb */
5756 case 0x1b7: /* movzwS Gv, Eb */
5757 case 0x1be: /* movsbS Gv, Eb */
5758 case 0x1bf: /* movswS Gv, Eb */
5759 {
5760 int d_ot;
5761 /* d_ot is the size of destination */
5762 d_ot = dflag + OT_WORD;
5763 /* ot is the size of source */
5764 ot = (b & 1) + OT_BYTE;
5765 modrm = ldub_code(s->pc++);
5766 reg = ((modrm >> 3) & 7) | rex_r;
5767 mod = (modrm >> 6) & 3;
5768 rm = (modrm & 7) | REX_B(s);
5769
5770 if (mod == 3) {
5771 gen_op_mov_TN_reg(ot, 0, rm);
5772 switch(ot | (b & 8)) {
5773 case OT_BYTE:
5774 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5775 break;
5776 case OT_BYTE | 8:
5777 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5778 break;
5779 case OT_WORD:
5780 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5781 break;
5782 default:
5783 case OT_WORD | 8:
5784 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5785 break;
5786 }
5787 gen_op_mov_reg_T0(d_ot, reg);
5788 } else {
5789 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5790 if (b & 8) {
5791 gen_op_lds_T0_A0(ot + s->mem_index);
5792 } else {
5793 gen_op_ldu_T0_A0(ot + s->mem_index);
5794 }
5795 gen_op_mov_reg_T0(d_ot, reg);
5796 }
5797 }
5798 break;
5799
5800 case 0x8d: /* lea */
5801 ot = dflag + OT_WORD;
5802 modrm = ldub_code(s->pc++);
5803 mod = (modrm >> 6) & 3;
5804 if (mod == 3)
5805 goto illegal_op;
5806 reg = ((modrm >> 3) & 7) | rex_r;
5807 /* we must ensure that no segment is added */
5808 s->override = -1;
5809 val = s->addseg;
5810 s->addseg = 0;
5811 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5812 s->addseg = val;
5813 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5814 break;
5815
5816 case 0xa0: /* mov EAX, Ov */
5817 case 0xa1:
5818 case 0xa2: /* mov Ov, EAX */
5819 case 0xa3:
5820 {
5821 target_ulong offset_addr;
5822
5823 if ((b & 1) == 0)
5824 ot = OT_BYTE;
5825 else
5826 ot = dflag + OT_WORD;
5827#ifdef TARGET_X86_64
5828 if (s->aflag == 2) {
5829 offset_addr = ldq_code(s->pc);
5830 s->pc += 8;
5831 gen_op_movq_A0_im(offset_addr);
5832 } else
5833#endif
5834 {
5835 if (s->aflag) {
5836 offset_addr = insn_get(s, OT_LONG);
5837 } else {
5838 offset_addr = insn_get(s, OT_WORD);
5839 }
5840 gen_op_movl_A0_im(offset_addr);
5841 }
5842 gen_add_A0_ds_seg(s);
5843 if ((b & 2) == 0) {
5844 gen_op_ld_T0_A0(ot + s->mem_index);
5845 gen_op_mov_reg_T0(ot, R_EAX);
5846 } else {
5847 gen_op_mov_TN_reg(ot, 0, R_EAX);
5848 gen_op_st_T0_A0(ot + s->mem_index);
5849 }
5850 }
5851 break;
5852 case 0xd7: /* xlat */
5853#ifdef TARGET_X86_64
5854 if (s->aflag == 2) {
5855 gen_op_movq_A0_reg(R_EBX);
5856 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5857 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5858 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5859 } else
5860#endif
5861 {
5862 gen_op_movl_A0_reg(R_EBX);
5863 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5864 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5865 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5866 if (s->aflag == 0)
5867 gen_op_andl_A0_ffff();
5868 else
5869 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5870 }
5871 gen_add_A0_ds_seg(s);
5872 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5873 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5874 break;
5875 case 0xb0 ... 0xb7: /* mov R, Ib */
5876 val = insn_get(s, OT_BYTE);
5877 gen_op_movl_T0_im(val);
5878 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5879 break;
5880 case 0xb8 ... 0xbf: /* mov R, Iv */
5881#ifdef TARGET_X86_64
5882 if (dflag == 2) {
5883 uint64_t tmp;
5884 /* 64 bit case */
5885 tmp = ldq_code(s->pc);
5886 s->pc += 8;
5887 reg = (b & 7) | REX_B(s);
5888 gen_movtl_T0_im(tmp);
5889 gen_op_mov_reg_T0(OT_QUAD, reg);
5890 } else
5891#endif
5892 {
5893 ot = dflag ? OT_LONG : OT_WORD;
5894 val = insn_get(s, ot);
5895 reg = (b & 7) | REX_B(s);
5896 gen_op_movl_T0_im(val);
5897 gen_op_mov_reg_T0(ot, reg);
5898 }
5899 break;
5900
5901 case 0x91 ... 0x97: /* xchg R, EAX */
5902 ot = dflag + OT_WORD;
5903 reg = (b & 7) | REX_B(s);
5904 rm = R_EAX;
5905 goto do_xchg_reg;
5906 case 0x86:
5907 case 0x87: /* xchg Ev, Gv */
5908 if ((b & 1) == 0)
5909 ot = OT_BYTE;
5910 else
5911 ot = dflag + OT_WORD;
5912 modrm = ldub_code(s->pc++);
5913 reg = ((modrm >> 3) & 7) | rex_r;
5914 mod = (modrm >> 6) & 3;
5915 if (mod == 3) {
5916 rm = (modrm & 7) | REX_B(s);
5917 do_xchg_reg:
5918 gen_op_mov_TN_reg(ot, 0, reg);
5919 gen_op_mov_TN_reg(ot, 1, rm);
5920 gen_op_mov_reg_T0(ot, rm);
5921 gen_op_mov_reg_T1(ot, reg);
5922 } else {
5923 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5924 gen_op_mov_TN_reg(ot, 0, reg);
5925 /* for xchg, lock is implicit */
5926 if (!(prefixes & PREFIX_LOCK))
5927 tcg_gen_helper_0_0(helper_lock);
5928 gen_op_ld_T1_A0(ot + s->mem_index);
5929 gen_op_st_T0_A0(ot + s->mem_index);
5930 if (!(prefixes & PREFIX_LOCK))
5931 tcg_gen_helper_0_0(helper_unlock);
5932 gen_op_mov_reg_T1(ot, reg);
5933 }
5934 break;
5935 case 0xc4: /* les Gv */
5936 if (CODE64(s))
5937 goto illegal_op;
5938 op = R_ES;
5939 goto do_lxx;
5940 case 0xc5: /* lds Gv */
5941 if (CODE64(s))
5942 goto illegal_op;
5943 op = R_DS;
5944 goto do_lxx;
5945 case 0x1b2: /* lss Gv */
5946 op = R_SS;
5947 goto do_lxx;
5948 case 0x1b4: /* lfs Gv */
5949 op = R_FS;
5950 goto do_lxx;
5951 case 0x1b5: /* lgs Gv */
5952 op = R_GS;
5953 do_lxx:
5954 ot = dflag ? OT_LONG : OT_WORD;
5955 modrm = ldub_code(s->pc++);
5956 reg = ((modrm >> 3) & 7) | rex_r;
5957 mod = (modrm >> 6) & 3;
5958 if (mod == 3)
5959 goto illegal_op;
5960 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5961 gen_op_ld_T1_A0(ot + s->mem_index);
5962 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5963 /* load the segment first to handle exceptions properly */
5964 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5965 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5966 /* then put the data */
5967 gen_op_mov_reg_T1(ot, reg);
5968 if (s->is_jmp) {
5969 gen_jmp_im(s->pc - s->cs_base);
5970 gen_eob(s);
5971 }
5972 break;
5973
5974 /************************/
5975 /* shifts */
5976 case 0xc0:
5977 case 0xc1:
5978 /* shift Ev,Ib */
5979 shift = 2;
5980 grp2:
5981 {
5982 if ((b & 1) == 0)
5983 ot = OT_BYTE;
5984 else
5985 ot = dflag + OT_WORD;
5986
5987 modrm = ldub_code(s->pc++);
5988 mod = (modrm >> 6) & 3;
5989 op = (modrm >> 3) & 7;
5990
5991 if (mod != 3) {
5992 if (shift == 2) {
5993 s->rip_offset = 1;
5994 }
5995 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5996 opreg = OR_TMP0;
5997 } else {
5998 opreg = (modrm & 7) | REX_B(s);
5999 }
6000
6001 /* simpler op */
6002 if (shift == 0) {
6003 gen_shift(s, op, ot, opreg, OR_ECX);
6004 } else {
6005 if (shift == 2) {
6006 shift = ldub_code(s->pc++);
6007 }
6008 gen_shifti(s, op, ot, opreg, shift);
6009 }
6010 }
6011 break;
6012 case 0xd0:
6013 case 0xd1:
6014 /* shift Ev,1 */
6015 shift = 1;
6016 goto grp2;
6017 case 0xd2:
6018 case 0xd3:
6019 /* shift Ev,cl */
6020 shift = 0;
6021 goto grp2;
6022
6023 case 0x1a4: /* shld imm */
6024 op = 0;
6025 shift = 1;
6026 goto do_shiftd;
6027 case 0x1a5: /* shld cl */
6028 op = 0;
6029 shift = 0;
6030 goto do_shiftd;
6031 case 0x1ac: /* shrd imm */
6032 op = 1;
6033 shift = 1;
6034 goto do_shiftd;
6035 case 0x1ad: /* shrd cl */
6036 op = 1;
6037 shift = 0;
6038 do_shiftd:
6039 ot = dflag + OT_WORD;
6040 modrm = ldub_code(s->pc++);
6041 mod = (modrm >> 6) & 3;
6042 rm = (modrm & 7) | REX_B(s);
6043 reg = ((modrm >> 3) & 7) | rex_r;
6044 if (mod != 3) {
6045 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6046 opreg = OR_TMP0;
6047 } else {
6048 opreg = rm;
6049 }
6050 gen_op_mov_TN_reg(ot, 1, reg);
6051
6052 if (shift) {
6053 val = ldub_code(s->pc++);
6054 tcg_gen_movi_tl(cpu_T3, val);
6055 } else {
6056 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6057 }
6058 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6059 break;
6060
6061 /************************/
6062 /* floats */
6063 case 0xd8 ... 0xdf:
6064 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6065 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6066 /* XXX: what to do if illegal op ? */
6067 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6068 break;
6069 }
6070 modrm = ldub_code(s->pc++);
6071 mod = (modrm >> 6) & 3;
6072 rm = modrm & 7;
6073 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6074 if (mod != 3) {
6075 /* memory op */
6076 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6077 switch(op) {
6078 case 0x00 ... 0x07: /* fxxxs */
6079 case 0x10 ... 0x17: /* fixxxl */
6080 case 0x20 ... 0x27: /* fxxxl */
6081 case 0x30 ... 0x37: /* fixxx */
6082 {
6083 int op1;
6084 op1 = op & 7;
6085
6086 switch(op >> 4) {
6087 case 0:
6088 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6089 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6090 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6091 break;
6092 case 1:
6093 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6094 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6095 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6096 break;
6097 case 2:
6098 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6099 (s->mem_index >> 2) - 1);
6100 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6101 break;
6102 case 3:
6103 default:
6104 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6105 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6106 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6107 break;
6108 }
6109
6110 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6111 if (op1 == 3) {
6112 /* fcomp needs pop */
6113 tcg_gen_helper_0_0(helper_fpop);
6114 }
6115 }
6116 break;
6117 case 0x08: /* flds */
6118 case 0x0a: /* fsts */
6119 case 0x0b: /* fstps */
6120 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6121 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6122 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6123 switch(op & 7) {
6124 case 0:
6125 switch(op >> 4) {
6126 case 0:
6127 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6128 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6129 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6130 break;
6131 case 1:
6132 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6133 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6134 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6135 break;
6136 case 2:
6137 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6138 (s->mem_index >> 2) - 1);
6139 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6140 break;
6141 case 3:
6142 default:
6143 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6144 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6145 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6146 break;
6147 }
6148 break;
6149 case 1:
6150 /* XXX: the corresponding CPUID bit must be tested ! */
6151 switch(op >> 4) {
6152 case 1:
6153 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6154 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6155 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6156 break;
6157 case 2:
6158 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6159 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6160 (s->mem_index >> 2) - 1);
6161 break;
6162 case 3:
6163 default:
6164 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6165 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6166 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6167 break;
6168 }
6169 tcg_gen_helper_0_0(helper_fpop);
6170 break;
6171 default:
6172 switch(op >> 4) {
6173 case 0:
6174 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6175 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6176 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6177 break;
6178 case 1:
6179 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6180 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6181 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6182 break;
6183 case 2:
6184 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6185 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6186 (s->mem_index >> 2) - 1);
6187 break;
6188 case 3:
6189 default:
6190 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6191 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6192 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6193 break;
6194 }
6195 if ((op & 7) == 3)
6196 tcg_gen_helper_0_0(helper_fpop);
6197 break;
6198 }
6199 break;
6200 case 0x0c: /* fldenv mem */
6201 if (s->cc_op != CC_OP_DYNAMIC)
6202 gen_op_set_cc_op(s->cc_op);
6203 gen_jmp_im(pc_start - s->cs_base);
6204 tcg_gen_helper_0_2(helper_fldenv,
6205 cpu_A0, tcg_const_i32(s->dflag));
6206 break;
6207 case 0x0d: /* fldcw mem */
6208 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6209 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6210 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6211 break;
6212 case 0x0e: /* fnstenv mem */
6213 if (s->cc_op != CC_OP_DYNAMIC)
6214 gen_op_set_cc_op(s->cc_op);
6215 gen_jmp_im(pc_start - s->cs_base);
6216 tcg_gen_helper_0_2(helper_fstenv,
6217 cpu_A0, tcg_const_i32(s->dflag));
6218 break;
6219 case 0x0f: /* fnstcw mem */
6220 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6221 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6222 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6223 break;
6224 case 0x1d: /* fldt mem */
6225 if (s->cc_op != CC_OP_DYNAMIC)
6226 gen_op_set_cc_op(s->cc_op);
6227 gen_jmp_im(pc_start - s->cs_base);
6228 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6229 break;
6230 case 0x1f: /* fstpt mem */
6231 if (s->cc_op != CC_OP_DYNAMIC)
6232 gen_op_set_cc_op(s->cc_op);
6233 gen_jmp_im(pc_start - s->cs_base);
6234 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6235 tcg_gen_helper_0_0(helper_fpop);
6236 break;
6237 case 0x2c: /* frstor mem */
6238 if (s->cc_op != CC_OP_DYNAMIC)
6239 gen_op_set_cc_op(s->cc_op);
6240 gen_jmp_im(pc_start - s->cs_base);
6241 tcg_gen_helper_0_2(helper_frstor,
6242 cpu_A0, tcg_const_i32(s->dflag));
6243 break;
6244 case 0x2e: /* fnsave mem */
6245 if (s->cc_op != CC_OP_DYNAMIC)
6246 gen_op_set_cc_op(s->cc_op);
6247 gen_jmp_im(pc_start - s->cs_base);
6248 tcg_gen_helper_0_2(helper_fsave,
6249 cpu_A0, tcg_const_i32(s->dflag));
6250 break;
6251 case 0x2f: /* fnstsw mem */
6252 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6253 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6254 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6255 break;
6256 case 0x3c: /* fbld */
6257 if (s->cc_op != CC_OP_DYNAMIC)
6258 gen_op_set_cc_op(s->cc_op);
6259 gen_jmp_im(pc_start - s->cs_base);
6260 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6261 break;
6262 case 0x3e: /* fbstp */
6263 if (s->cc_op != CC_OP_DYNAMIC)
6264 gen_op_set_cc_op(s->cc_op);
6265 gen_jmp_im(pc_start - s->cs_base);
6266 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6267 tcg_gen_helper_0_0(helper_fpop);
6268 break;
6269 case 0x3d: /* fildll */
6270 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6271 (s->mem_index >> 2) - 1);
6272 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6273 break;
6274 case 0x3f: /* fistpll */
6275 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6276 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6277 (s->mem_index >> 2) - 1);
6278 tcg_gen_helper_0_0(helper_fpop);
6279 break;
6280 default:
6281 goto illegal_op;
6282 }
6283 } else {
6284 /* register float ops */
6285 opreg = rm;
6286
6287 switch(op) {
6288 case 0x08: /* fld sti */
6289 tcg_gen_helper_0_0(helper_fpush);
6290 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6291 break;
6292 case 0x09: /* fxchg sti */
6293 case 0x29: /* fxchg4 sti, undocumented op */
6294 case 0x39: /* fxchg7 sti, undocumented op */
6295 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6296 break;
6297 case 0x0a: /* grp d9/2 */
6298 switch(rm) {
6299 case 0: /* fnop */
6300 /* check exceptions (FreeBSD FPU probe) */
6301 if (s->cc_op != CC_OP_DYNAMIC)
6302 gen_op_set_cc_op(s->cc_op);
6303 gen_jmp_im(pc_start - s->cs_base);
6304 tcg_gen_helper_0_0(helper_fwait);
6305 break;
6306 default:
6307 goto illegal_op;
6308 }
6309 break;
6310 case 0x0c: /* grp d9/4 */
6311 switch(rm) {
6312 case 0: /* fchs */
6313 tcg_gen_helper_0_0(helper_fchs_ST0);
6314 break;
6315 case 1: /* fabs */
6316 tcg_gen_helper_0_0(helper_fabs_ST0);
6317 break;
6318 case 4: /* ftst */
6319 tcg_gen_helper_0_0(helper_fldz_FT0);
6320 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6321 break;
6322 case 5: /* fxam */
6323 tcg_gen_helper_0_0(helper_fxam_ST0);
6324 break;
6325 default:
6326 goto illegal_op;
6327 }
6328 break;
6329 case 0x0d: /* grp d9/5 */
6330 {
6331 switch(rm) {
6332 case 0:
6333 tcg_gen_helper_0_0(helper_fpush);
6334 tcg_gen_helper_0_0(helper_fld1_ST0);
6335 break;
6336 case 1:
6337 tcg_gen_helper_0_0(helper_fpush);
6338 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6339 break;
6340 case 2:
6341 tcg_gen_helper_0_0(helper_fpush);
6342 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6343 break;
6344 case 3:
6345 tcg_gen_helper_0_0(helper_fpush);
6346 tcg_gen_helper_0_0(helper_fldpi_ST0);
6347 break;
6348 case 4:
6349 tcg_gen_helper_0_0(helper_fpush);
6350 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6351 break;
6352 case 5:
6353 tcg_gen_helper_0_0(helper_fpush);
6354 tcg_gen_helper_0_0(helper_fldln2_ST0);
6355 break;
6356 case 6:
6357 tcg_gen_helper_0_0(helper_fpush);
6358 tcg_gen_helper_0_0(helper_fldz_ST0);
6359 break;
6360 default:
6361 goto illegal_op;
6362 }
6363 }
6364 break;
6365 case 0x0e: /* grp d9/6 */
6366 switch(rm) {
6367 case 0: /* f2xm1 */
6368 tcg_gen_helper_0_0(helper_f2xm1);
6369 break;
6370 case 1: /* fyl2x */
6371 tcg_gen_helper_0_0(helper_fyl2x);
6372 break;
6373 case 2: /* fptan */
6374 tcg_gen_helper_0_0(helper_fptan);
6375 break;
6376 case 3: /* fpatan */
6377 tcg_gen_helper_0_0(helper_fpatan);
6378 break;
6379 case 4: /* fxtract */
6380 tcg_gen_helper_0_0(helper_fxtract);
6381 break;
6382 case 5: /* fprem1 */
6383 tcg_gen_helper_0_0(helper_fprem1);
6384 break;
6385 case 6: /* fdecstp */
6386 tcg_gen_helper_0_0(helper_fdecstp);
6387 break;
6388 default:
6389 case 7: /* fincstp */
6390 tcg_gen_helper_0_0(helper_fincstp);
6391 break;
6392 }
6393 break;
6394 case 0x0f: /* grp d9/7 */
6395 switch(rm) {
6396 case 0: /* fprem */
6397 tcg_gen_helper_0_0(helper_fprem);
6398 break;
6399 case 1: /* fyl2xp1 */
6400 tcg_gen_helper_0_0(helper_fyl2xp1);
6401 break;
6402 case 2: /* fsqrt */
6403 tcg_gen_helper_0_0(helper_fsqrt);
6404 break;
6405 case 3: /* fsincos */
6406 tcg_gen_helper_0_0(helper_fsincos);
6407 break;
6408 case 5: /* fscale */
6409 tcg_gen_helper_0_0(helper_fscale);
6410 break;
6411 case 4: /* frndint */
6412 tcg_gen_helper_0_0(helper_frndint);
6413 break;
6414 case 6: /* fsin */
6415 tcg_gen_helper_0_0(helper_fsin);
6416 break;
6417 default:
6418 case 7: /* fcos */
6419 tcg_gen_helper_0_0(helper_fcos);
6420 break;
6421 }
6422 break;
6423 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6424 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6425 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6426 {
6427 int op1;
6428
6429 op1 = op & 7;
6430 if (op >= 0x20) {
6431 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6432 if (op >= 0x30)
6433 tcg_gen_helper_0_0(helper_fpop);
6434 } else {
6435 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6436 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6437 }
6438 }
6439 break;
6440 case 0x02: /* fcom */
6441 case 0x22: /* fcom2, undocumented op */
6442 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6443 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6444 break;
6445 case 0x03: /* fcomp */
6446 case 0x23: /* fcomp3, undocumented op */
6447 case 0x32: /* fcomp5, undocumented op */
6448 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6449 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6450 tcg_gen_helper_0_0(helper_fpop);
6451 break;
6452 case 0x15: /* da/5 */
6453 switch(rm) {
6454 case 1: /* fucompp */
6455 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6456 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6457 tcg_gen_helper_0_0(helper_fpop);
6458 tcg_gen_helper_0_0(helper_fpop);
6459 break;
6460 default:
6461 goto illegal_op;
6462 }
6463 break;
6464 case 0x1c:
6465 switch(rm) {
6466 case 0: /* feni (287 only, just do nop here) */
6467 break;
6468 case 1: /* fdisi (287 only, just do nop here) */
6469 break;
6470 case 2: /* fclex */
6471 tcg_gen_helper_0_0(helper_fclex);
6472 break;
6473 case 3: /* fninit */
6474 tcg_gen_helper_0_0(helper_fninit);
6475 break;
6476 case 4: /* fsetpm (287 only, just do nop here) */
6477 break;
6478 default:
6479 goto illegal_op;
6480 }
6481 break;
6482 case 0x1d: /* fucomi */
6483 if (s->cc_op != CC_OP_DYNAMIC)
6484 gen_op_set_cc_op(s->cc_op);
6485 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6486 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6487 s->cc_op = CC_OP_EFLAGS;
6488 break;
6489 case 0x1e: /* fcomi */
6490 if (s->cc_op != CC_OP_DYNAMIC)
6491 gen_op_set_cc_op(s->cc_op);
6492 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6493 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6494 s->cc_op = CC_OP_EFLAGS;
6495 break;
6496 case 0x28: /* ffree sti */
6497 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6498 break;
6499 case 0x2a: /* fst sti */
6500 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6501 break;
6502 case 0x2b: /* fstp sti */
6503 case 0x0b: /* fstp1 sti, undocumented op */
6504 case 0x3a: /* fstp8 sti, undocumented op */
6505 case 0x3b: /* fstp9 sti, undocumented op */
6506 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6507 tcg_gen_helper_0_0(helper_fpop);
6508 break;
6509 case 0x2c: /* fucom st(i) */
6510 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6511 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6512 break;
6513 case 0x2d: /* fucomp st(i) */
6514 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6515 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6516 tcg_gen_helper_0_0(helper_fpop);
6517 break;
6518 case 0x33: /* de/3 */
6519 switch(rm) {
6520 case 1: /* fcompp */
6521 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6522 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6523 tcg_gen_helper_0_0(helper_fpop);
6524 tcg_gen_helper_0_0(helper_fpop);
6525 break;
6526 default:
6527 goto illegal_op;
6528 }
6529 break;
6530 case 0x38: /* ffreep sti, undocumented op */
6531 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6532 tcg_gen_helper_0_0(helper_fpop);
6533 break;
6534 case 0x3c: /* df/4 */
6535 switch(rm) {
6536 case 0:
6537 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6538 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6539 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6540 break;
6541 default:
6542 goto illegal_op;
6543 }
6544 break;
6545 case 0x3d: /* fucomip */
6546 if (s->cc_op != CC_OP_DYNAMIC)
6547 gen_op_set_cc_op(s->cc_op);
6548 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6549 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6550 tcg_gen_helper_0_0(helper_fpop);
6551 s->cc_op = CC_OP_EFLAGS;
6552 break;
6553 case 0x3e: /* fcomip */
6554 if (s->cc_op != CC_OP_DYNAMIC)
6555 gen_op_set_cc_op(s->cc_op);
6556 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6557 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6558 tcg_gen_helper_0_0(helper_fpop);
6559 s->cc_op = CC_OP_EFLAGS;
6560 break;
6561 case 0x10 ... 0x13: /* fcmovxx */
6562 case 0x18 ... 0x1b:
6563 {
6564 int op1, l1;
6565 static const uint8_t fcmov_cc[8] = {
6566 (JCC_B << 1),
6567 (JCC_Z << 1),
6568 (JCC_BE << 1),
6569 (JCC_P << 1),
6570 };
6571 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6572 l1 = gen_new_label();
6573 gen_jcc1(s, s->cc_op, op1, l1);
6574 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6575 gen_set_label(l1);
6576 }
6577 break;
6578 default:
6579 goto illegal_op;
6580 }
6581 }
6582 break;
6583 /************************/
6584 /* string ops */
6585
6586 case 0xa4: /* movsS */
6587 case 0xa5:
6588 if ((b & 1) == 0)
6589 ot = OT_BYTE;
6590 else
6591 ot = dflag + OT_WORD;
6592
6593 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6594 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6595 } else {
6596 gen_movs(s, ot);
6597 }
6598 break;
6599
6600 case 0xaa: /* stosS */
6601 case 0xab:
6602 if ((b & 1) == 0)
6603 ot = OT_BYTE;
6604 else
6605 ot = dflag + OT_WORD;
6606
6607 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6608 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6609 } else {
6610 gen_stos(s, ot);
6611 }
6612 break;
6613 case 0xac: /* lodsS */
6614 case 0xad:
6615 if ((b & 1) == 0)
6616 ot = OT_BYTE;
6617 else
6618 ot = dflag + OT_WORD;
6619 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6620 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6621 } else {
6622 gen_lods(s, ot);
6623 }
6624 break;
6625 case 0xae: /* scasS */
6626 case 0xaf:
6627 if ((b & 1) == 0)
6628 ot = OT_BYTE;
6629 else
6630 ot = dflag + OT_WORD;
6631 if (prefixes & PREFIX_REPNZ) {
6632 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6633 } else if (prefixes & PREFIX_REPZ) {
6634 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6635 } else {
6636 gen_scas(s, ot);
6637 s->cc_op = CC_OP_SUBB + ot;
6638 }
6639 break;
6640
6641 case 0xa6: /* cmpsS */
6642 case 0xa7:
6643 if ((b & 1) == 0)
6644 ot = OT_BYTE;
6645 else
6646 ot = dflag + OT_WORD;
6647 if (prefixes & PREFIX_REPNZ) {
6648 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6649 } else if (prefixes & PREFIX_REPZ) {
6650 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6651 } else {
6652 gen_cmps(s, ot);
6653 s->cc_op = CC_OP_SUBB + ot;
6654 }
6655 break;
6656 case 0x6c: /* insS */
6657 case 0x6d:
6658 if ((b & 1) == 0)
6659 ot = OT_BYTE;
6660 else
6661 ot = dflag ? OT_LONG : OT_WORD;
6662 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6663 gen_op_andl_T0_ffff();
6664 gen_check_io(s, ot, pc_start - s->cs_base,
6665 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6666 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6667 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6668 } else {
6669 gen_ins(s, ot);
6670 if (use_icount) {
6671 gen_jmp(s, s->pc - s->cs_base);
6672 }
6673 }
6674 break;
6675 case 0x6e: /* outsS */
6676 case 0x6f:
6677 if ((b & 1) == 0)
6678 ot = OT_BYTE;
6679 else
6680 ot = dflag ? OT_LONG : OT_WORD;
6681 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6682 gen_op_andl_T0_ffff();
6683 gen_check_io(s, ot, pc_start - s->cs_base,
6684 svm_is_rep(prefixes) | 4);
6685 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6686 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6687 } else {
6688 gen_outs(s, ot);
6689 if (use_icount) {
6690 gen_jmp(s, s->pc - s->cs_base);
6691 }
6692 }
6693 break;
6694
6695 /************************/
6696 /* port I/O */
6697
6698 case 0xe4:
6699 case 0xe5:
6700 if ((b & 1) == 0)
6701 ot = OT_BYTE;
6702 else
6703 ot = dflag ? OT_LONG : OT_WORD;
6704 val = ldub_code(s->pc++);
6705 gen_op_movl_T0_im(val);
6706 gen_check_io(s, ot, pc_start - s->cs_base,
6707 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6708 if (use_icount)
6709 gen_io_start();
6710 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6711 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6712 gen_op_mov_reg_T1(ot, R_EAX);
6713 if (use_icount) {
6714 gen_io_end();
6715 gen_jmp(s, s->pc - s->cs_base);
6716 }
6717 break;
6718 case 0xe6:
6719 case 0xe7:
6720 if ((b & 1) == 0)
6721 ot = OT_BYTE;
6722 else
6723 ot = dflag ? OT_LONG : OT_WORD;
6724 val = ldub_code(s->pc++);
6725 gen_op_movl_T0_im(val);
6726 gen_check_io(s, ot, pc_start - s->cs_base,
6727 svm_is_rep(prefixes));
6728#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6729 if (val == 0x80)
6730 break;
6731#endif /* VBOX */
6732 gen_op_mov_TN_reg(ot, 1, R_EAX);
6733
6734 if (use_icount)
6735 gen_io_start();
6736 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6737 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6738 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6739 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6740 if (use_icount) {
6741 gen_io_end();
6742 gen_jmp(s, s->pc - s->cs_base);
6743 }
6744 break;
6745 case 0xec:
6746 case 0xed:
6747 if ((b & 1) == 0)
6748 ot = OT_BYTE;
6749 else
6750 ot = dflag ? OT_LONG : OT_WORD;
6751 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6752 gen_op_andl_T0_ffff();
6753 gen_check_io(s, ot, pc_start - s->cs_base,
6754 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6755 if (use_icount)
6756 gen_io_start();
6757 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6758 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6759 gen_op_mov_reg_T1(ot, R_EAX);
6760 if (use_icount) {
6761 gen_io_end();
6762 gen_jmp(s, s->pc - s->cs_base);
6763 }
6764 break;
6765 case 0xee:
6766 case 0xef:
6767 if ((b & 1) == 0)
6768 ot = OT_BYTE;
6769 else
6770 ot = dflag ? OT_LONG : OT_WORD;
6771 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6772 gen_op_andl_T0_ffff();
6773 gen_check_io(s, ot, pc_start - s->cs_base,
6774 svm_is_rep(prefixes));
6775 gen_op_mov_TN_reg(ot, 1, R_EAX);
6776
6777 if (use_icount)
6778 gen_io_start();
6779 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6780 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6781 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6782 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6783 if (use_icount) {
6784 gen_io_end();
6785 gen_jmp(s, s->pc - s->cs_base);
6786 }
6787 break;
6788
6789 /************************/
6790 /* control */
6791 case 0xc2: /* ret im */
6792 val = ldsw_code(s->pc);
6793 s->pc += 2;
6794 gen_pop_T0(s);
6795 if (CODE64(s) && s->dflag)
6796 s->dflag = 2;
6797 gen_stack_update(s, val + (2 << s->dflag));
6798 if (s->dflag == 0)
6799 gen_op_andl_T0_ffff();
6800 gen_op_jmp_T0();
6801 gen_eob(s);
6802 break;
6803 case 0xc3: /* ret */
6804 gen_pop_T0(s);
6805 gen_pop_update(s);
6806 if (s->dflag == 0)
6807 gen_op_andl_T0_ffff();
6808 gen_op_jmp_T0();
6809 gen_eob(s);
6810 break;
6811 case 0xca: /* lret im */
6812 val = ldsw_code(s->pc);
6813 s->pc += 2;
6814 do_lret:
6815 if (s->pe && !s->vm86) {
6816 if (s->cc_op != CC_OP_DYNAMIC)
6817 gen_op_set_cc_op(s->cc_op);
6818 gen_jmp_im(pc_start - s->cs_base);
6819 tcg_gen_helper_0_2(helper_lret_protected,
6820 tcg_const_i32(s->dflag),
6821 tcg_const_i32(val));
6822 } else {
6823 gen_stack_A0(s);
6824 /* pop offset */
6825 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6826 if (s->dflag == 0)
6827 gen_op_andl_T0_ffff();
6828 /* NOTE: keeping EIP updated is not a problem in case of
6829 exception */
6830 gen_op_jmp_T0();
6831 /* pop selector */
6832 gen_op_addl_A0_im(2 << s->dflag);
6833 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6834 gen_op_movl_seg_T0_vm(R_CS);
6835 /* add stack offset */
6836 gen_stack_update(s, val + (4 << s->dflag));
6837 }
6838 gen_eob(s);
6839 break;
6840 case 0xcb: /* lret */
6841 val = 0;
6842 goto do_lret;
6843 case 0xcf: /* iret */
6844 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6845 if (!s->pe) {
6846 /* real mode */
6847 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6848 s->cc_op = CC_OP_EFLAGS;
6849 } else if (s->vm86) {
6850#ifdef VBOX
6851 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6852#else
6853 if (s->iopl != 3) {
6854#endif
6855 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6856 } else {
6857 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6858 s->cc_op = CC_OP_EFLAGS;
6859 }
6860 } else {
6861 if (s->cc_op != CC_OP_DYNAMIC)
6862 gen_op_set_cc_op(s->cc_op);
6863 gen_jmp_im(pc_start - s->cs_base);
6864 tcg_gen_helper_0_2(helper_iret_protected,
6865 tcg_const_i32(s->dflag),
6866 tcg_const_i32(s->pc - s->cs_base));
6867 s->cc_op = CC_OP_EFLAGS;
6868 }
6869 gen_eob(s);
6870 break;
6871 case 0xe8: /* call im */
6872 {
6873 if (dflag)
6874 tval = (int32_t)insn_get(s, OT_LONG);
6875 else
6876 tval = (int16_t)insn_get(s, OT_WORD);
6877 next_eip = s->pc - s->cs_base;
6878 tval += next_eip;
6879 if (s->dflag == 0)
6880 tval &= 0xffff;
6881 gen_movtl_T0_im(next_eip);
6882 gen_push_T0(s);
6883 gen_jmp(s, tval);
6884 }
6885 break;
6886 case 0x9a: /* lcall im */
6887 {
6888 unsigned int selector, offset;
6889
6890 if (CODE64(s))
6891 goto illegal_op;
6892 ot = dflag ? OT_LONG : OT_WORD;
6893 offset = insn_get(s, ot);
6894 selector = insn_get(s, OT_WORD);
6895
6896 gen_op_movl_T0_im(selector);
6897 gen_op_movl_T1_imu(offset);
6898 }
6899 goto do_lcall;
6900 case 0xe9: /* jmp im */
6901 if (dflag)
6902 tval = (int32_t)insn_get(s, OT_LONG);
6903 else
6904 tval = (int16_t)insn_get(s, OT_WORD);
6905 tval += s->pc - s->cs_base;
6906 if (s->dflag == 0)
6907 tval &= 0xffff;
6908 else if(!CODE64(s))
6909 tval &= 0xffffffff;
6910 gen_jmp(s, tval);
6911 break;
6912 case 0xea: /* ljmp im */
6913 {
6914 unsigned int selector, offset;
6915
6916 if (CODE64(s))
6917 goto illegal_op;
6918 ot = dflag ? OT_LONG : OT_WORD;
6919 offset = insn_get(s, ot);
6920 selector = insn_get(s, OT_WORD);
6921
6922 gen_op_movl_T0_im(selector);
6923 gen_op_movl_T1_imu(offset);
6924 }
6925 goto do_ljmp;
6926 case 0xeb: /* jmp Jb */
6927 tval = (int8_t)insn_get(s, OT_BYTE);
6928 tval += s->pc - s->cs_base;
6929 if (s->dflag == 0)
6930 tval &= 0xffff;
6931 gen_jmp(s, tval);
6932 break;
6933 case 0x70 ... 0x7f: /* jcc Jb */
6934 tval = (int8_t)insn_get(s, OT_BYTE);
6935 goto do_jcc;
6936 case 0x180 ... 0x18f: /* jcc Jv */
6937 if (dflag) {
6938 tval = (int32_t)insn_get(s, OT_LONG);
6939 } else {
6940 tval = (int16_t)insn_get(s, OT_WORD);
6941 }
6942 do_jcc:
6943 next_eip = s->pc - s->cs_base;
6944 tval += next_eip;
6945 if (s->dflag == 0)
6946 tval &= 0xffff;
6947 gen_jcc(s, b, tval, next_eip);
6948 break;
6949
6950 case 0x190 ... 0x19f: /* setcc Gv */
6951 modrm = ldub_code(s->pc++);
6952 gen_setcc(s, b);
6953 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6954 break;
6955 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6956 {
6957 int l1;
6958 TCGv t0;
6959
6960 ot = dflag + OT_WORD;
6961 modrm = ldub_code(s->pc++);
6962 reg = ((modrm >> 3) & 7) | rex_r;
6963 mod = (modrm >> 6) & 3;
6964 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6965 if (mod != 3) {
6966 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6967 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6968 } else {
6969 rm = (modrm & 7) | REX_B(s);
6970 gen_op_mov_v_reg(ot, t0, rm);
6971 }
6972#ifdef TARGET_X86_64
6973 if (ot == OT_LONG) {
6974 /* XXX: specific Intel behaviour ? */
6975 l1 = gen_new_label();
6976 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6977 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6978 gen_set_label(l1);
6979 tcg_gen_movi_tl(cpu_tmp0, 0);
6980 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6981 } else
6982#endif
6983 {
6984 l1 = gen_new_label();
6985 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6986 gen_op_mov_reg_v(ot, reg, t0);
6987 gen_set_label(l1);
6988 }
6989 tcg_temp_free(t0);
6990 }
6991 break;
6992
6993 /************************/
6994 /* flags */
6995 case 0x9c: /* pushf */
6996 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6997#ifdef VBOX
6998 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6999#else
7000 if (s->vm86 && s->iopl != 3) {
7001#endif
7002 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7003 } else {
7004 if (s->cc_op != CC_OP_DYNAMIC)
7005 gen_op_set_cc_op(s->cc_op);
7006#ifdef VBOX
7007 if (s->vm86 && s->vme && s->iopl != 3)
7008 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
7009 else
7010#endif
7011 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
7012 gen_push_T0(s);
7013 }
7014 break;
7015 case 0x9d: /* popf */
7016 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
7017#ifdef VBOX
7018 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
7019#else
7020 if (s->vm86 && s->iopl != 3) {
7021#endif
7022 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7023 } else {
7024 gen_pop_T0(s);
7025 if (s->cpl == 0) {
7026 if (s->dflag) {
7027 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7028 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
7029 } else {
7030 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7031 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
7032 }
7033 } else {
7034 if (s->cpl <= s->iopl) {
7035 if (s->dflag) {
7036 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7037 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7038 } else {
7039 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7040 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7041 }
7042 } else {
7043 if (s->dflag) {
7044 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7045 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7046 } else {
7047#ifdef VBOX
7048 if (s->vm86 && s->vme)
7049 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7050 else
7051#endif
7052 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7053 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7054 }
7055 }
7056 }
7057 gen_pop_update(s);
7058 s->cc_op = CC_OP_EFLAGS;
7059 /* abort translation because TF flag may change */
7060 gen_jmp_im(s->pc - s->cs_base);
7061 gen_eob(s);
7062 }
7063 break;
7064 case 0x9e: /* sahf */
7065 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7066 goto illegal_op;
7067 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7068 if (s->cc_op != CC_OP_DYNAMIC)
7069 gen_op_set_cc_op(s->cc_op);
7070 gen_compute_eflags(cpu_cc_src);
7071 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7072 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7073 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7074 s->cc_op = CC_OP_EFLAGS;
7075 break;
7076 case 0x9f: /* lahf */
7077 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7078 goto illegal_op;
7079 if (s->cc_op != CC_OP_DYNAMIC)
7080 gen_op_set_cc_op(s->cc_op);
7081 gen_compute_eflags(cpu_T[0]);
7082 /* Note: gen_compute_eflags() only gives the condition codes */
7083 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7084 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7085 break;
7086 case 0xf5: /* cmc */
7087 if (s->cc_op != CC_OP_DYNAMIC)
7088 gen_op_set_cc_op(s->cc_op);
7089 gen_compute_eflags(cpu_cc_src);
7090 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7091 s->cc_op = CC_OP_EFLAGS;
7092 break;
7093 case 0xf8: /* clc */
7094 if (s->cc_op != CC_OP_DYNAMIC)
7095 gen_op_set_cc_op(s->cc_op);
7096 gen_compute_eflags(cpu_cc_src);
7097 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7098 s->cc_op = CC_OP_EFLAGS;
7099 break;
7100 case 0xf9: /* stc */
7101 if (s->cc_op != CC_OP_DYNAMIC)
7102 gen_op_set_cc_op(s->cc_op);
7103 gen_compute_eflags(cpu_cc_src);
7104 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7105 s->cc_op = CC_OP_EFLAGS;
7106 break;
7107 case 0xfc: /* cld */
7108 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7109 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7110 break;
7111 case 0xfd: /* std */
7112 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7113 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7114 break;
7115
7116 /************************/
7117 /* bit operations */
7118 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7119 ot = dflag + OT_WORD;
7120 modrm = ldub_code(s->pc++);
7121 op = (modrm >> 3) & 7;
7122 mod = (modrm >> 6) & 3;
7123 rm = (modrm & 7) | REX_B(s);
7124 if (mod != 3) {
7125 s->rip_offset = 1;
7126 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7127 gen_op_ld_T0_A0(ot + s->mem_index);
7128 } else {
7129 gen_op_mov_TN_reg(ot, 0, rm);
7130 }
7131 /* load shift */
7132 val = ldub_code(s->pc++);
7133 gen_op_movl_T1_im(val);
7134 if (op < 4)
7135 goto illegal_op;
7136 op -= 4;
7137 goto bt_op;
7138 case 0x1a3: /* bt Gv, Ev */
7139 op = 0;
7140 goto do_btx;
7141 case 0x1ab: /* bts */
7142 op = 1;
7143 goto do_btx;
7144 case 0x1b3: /* btr */
7145 op = 2;
7146 goto do_btx;
7147 case 0x1bb: /* btc */
7148 op = 3;
7149 do_btx:
7150 ot = dflag + OT_WORD;
7151 modrm = ldub_code(s->pc++);
7152 reg = ((modrm >> 3) & 7) | rex_r;
7153 mod = (modrm >> 6) & 3;
7154 rm = (modrm & 7) | REX_B(s);
7155 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7156 if (mod != 3) {
7157 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7158 /* specific case: we need to add a displacement */
7159 gen_exts(ot, cpu_T[1]);
7160 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7161 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7162 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7163 gen_op_ld_T0_A0(ot + s->mem_index);
7164 } else {
7165 gen_op_mov_TN_reg(ot, 0, rm);
7166 }
7167 bt_op:
7168 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7169 switch(op) {
7170 case 0:
7171 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7172 tcg_gen_movi_tl(cpu_cc_dst, 0);
7173 break;
7174 case 1:
7175 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7176 tcg_gen_movi_tl(cpu_tmp0, 1);
7177 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7178 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7179 break;
7180 case 2:
7181 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7182 tcg_gen_movi_tl(cpu_tmp0, 1);
7183 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7184 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7185 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7186 break;
7187 default:
7188 case 3:
7189 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7190 tcg_gen_movi_tl(cpu_tmp0, 1);
7191 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7192 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7193 break;
7194 }
7195 s->cc_op = CC_OP_SARB + ot;
7196 if (op != 0) {
7197 if (mod != 3)
7198 gen_op_st_T0_A0(ot + s->mem_index);
7199 else
7200 gen_op_mov_reg_T0(ot, rm);
7201 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7202 tcg_gen_movi_tl(cpu_cc_dst, 0);
7203 }
7204 break;
7205 case 0x1bc: /* bsf */
7206 case 0x1bd: /* bsr */
7207 {
7208 int label1;
7209 TCGv t0;
7210
7211 ot = dflag + OT_WORD;
7212 modrm = ldub_code(s->pc++);
7213 reg = ((modrm >> 3) & 7) | rex_r;
7214 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7215 gen_extu(ot, cpu_T[0]);
7216 label1 = gen_new_label();
7217 tcg_gen_movi_tl(cpu_cc_dst, 0);
7218 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7219 tcg_gen_mov_tl(t0, cpu_T[0]);
7220 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7221 if (b & 1) {
7222 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7223 } else {
7224 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7225 }
7226 gen_op_mov_reg_T0(ot, reg);
7227 tcg_gen_movi_tl(cpu_cc_dst, 1);
7228 gen_set_label(label1);
7229 tcg_gen_discard_tl(cpu_cc_src);
7230 s->cc_op = CC_OP_LOGICB + ot;
7231 tcg_temp_free(t0);
7232 }
7233 break;
7234 /************************/
7235 /* bcd */
7236 case 0x27: /* daa */
7237 if (CODE64(s))
7238 goto illegal_op;
7239 if (s->cc_op != CC_OP_DYNAMIC)
7240 gen_op_set_cc_op(s->cc_op);
7241 tcg_gen_helper_0_0(helper_daa);
7242 s->cc_op = CC_OP_EFLAGS;
7243 break;
7244 case 0x2f: /* das */
7245 if (CODE64(s))
7246 goto illegal_op;
7247 if (s->cc_op != CC_OP_DYNAMIC)
7248 gen_op_set_cc_op(s->cc_op);
7249 tcg_gen_helper_0_0(helper_das);
7250 s->cc_op = CC_OP_EFLAGS;
7251 break;
7252 case 0x37: /* aaa */
7253 if (CODE64(s))
7254 goto illegal_op;
7255 if (s->cc_op != CC_OP_DYNAMIC)
7256 gen_op_set_cc_op(s->cc_op);
7257 tcg_gen_helper_0_0(helper_aaa);
7258 s->cc_op = CC_OP_EFLAGS;
7259 break;
7260 case 0x3f: /* aas */
7261 if (CODE64(s))
7262 goto illegal_op;
7263 if (s->cc_op != CC_OP_DYNAMIC)
7264 gen_op_set_cc_op(s->cc_op);
7265 tcg_gen_helper_0_0(helper_aas);
7266 s->cc_op = CC_OP_EFLAGS;
7267 break;
7268 case 0xd4: /* aam */
7269 if (CODE64(s))
7270 goto illegal_op;
7271 val = ldub_code(s->pc++);
7272 if (val == 0) {
7273 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7274 } else {
7275 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7276 s->cc_op = CC_OP_LOGICB;
7277 }
7278 break;
7279 case 0xd5: /* aad */
7280 if (CODE64(s))
7281 goto illegal_op;
7282 val = ldub_code(s->pc++);
7283 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7284 s->cc_op = CC_OP_LOGICB;
7285 break;
7286 /************************/
7287 /* misc */
7288 case 0x90: /* nop */
7289 /* XXX: xchg + rex handling */
7290 /* XXX: correct lock test for all insn */
7291 if (prefixes & PREFIX_LOCK)
7292 goto illegal_op;
7293 if (prefixes & PREFIX_REPZ) {
7294 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7295 }
7296 break;
7297 case 0x9b: /* fwait */
7298 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7299 (HF_MP_MASK | HF_TS_MASK)) {
7300 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7301 } else {
7302 if (s->cc_op != CC_OP_DYNAMIC)
7303 gen_op_set_cc_op(s->cc_op);
7304 gen_jmp_im(pc_start - s->cs_base);
7305 tcg_gen_helper_0_0(helper_fwait);
7306 }
7307 break;
7308 case 0xcc: /* int3 */
7309#ifdef VBOX
7310 if (s->vm86 && s->iopl != 3 && !s->vme) {
7311 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7312 } else
7313#endif
7314 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7315 break;
7316 case 0xcd: /* int N */
7317 val = ldub_code(s->pc++);
7318#ifdef VBOX
7319 if (s->vm86 && s->iopl != 3 && !s->vme) {
7320#else
7321 if (s->vm86 && s->iopl != 3) {
7322#endif
7323 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7324 } else {
7325 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7326 }
7327 break;
7328 case 0xce: /* into */
7329 if (CODE64(s))
7330 goto illegal_op;
7331 if (s->cc_op != CC_OP_DYNAMIC)
7332 gen_op_set_cc_op(s->cc_op);
7333 gen_jmp_im(pc_start - s->cs_base);
7334 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7335 break;
7336 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7337 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7338#if 1
7339 gen_debug(s, pc_start - s->cs_base);
7340#else
7341 /* start debug */
7342 tb_flush(cpu_single_env);
7343 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7344#endif
7345 break;
7346 case 0xfa: /* cli */
7347 if (!s->vm86) {
7348 if (s->cpl <= s->iopl) {
7349 tcg_gen_helper_0_0(helper_cli);
7350 } else {
7351 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7352 }
7353 } else {
7354 if (s->iopl == 3) {
7355 tcg_gen_helper_0_0(helper_cli);
7356#ifdef VBOX
7357 } else if (s->iopl != 3 && s->vme) {
7358 tcg_gen_helper_0_0(helper_cli_vme);
7359#endif
7360 } else {
7361 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7362 }
7363 }
7364 break;
7365 case 0xfb: /* sti */
7366 if (!s->vm86) {
7367 if (s->cpl <= s->iopl) {
7368 gen_sti:
7369 tcg_gen_helper_0_0(helper_sti);
7370 /* interruptions are enabled only the first insn after sti */
7371 /* If several instructions disable interrupts, only the
7372 _first_ does it */
7373 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7374 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7375 /* give a chance to handle pending irqs */
7376 gen_jmp_im(s->pc - s->cs_base);
7377 gen_eob(s);
7378 } else {
7379 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7380 }
7381 } else {
7382 if (s->iopl == 3) {
7383 goto gen_sti;
7384#ifdef VBOX
7385 } else if (s->iopl != 3 && s->vme) {
7386 tcg_gen_helper_0_0(helper_sti_vme);
7387 /* give a chance to handle pending irqs */
7388 gen_jmp_im(s->pc - s->cs_base);
7389 gen_eob(s);
7390#endif
7391 } else {
7392 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7393 }
7394 }
7395 break;
7396 case 0x62: /* bound */
7397 if (CODE64(s))
7398 goto illegal_op;
7399 ot = dflag ? OT_LONG : OT_WORD;
7400 modrm = ldub_code(s->pc++);
7401 reg = (modrm >> 3) & 7;
7402 mod = (modrm >> 6) & 3;
7403 if (mod == 3)
7404 goto illegal_op;
7405 gen_op_mov_TN_reg(ot, 0, reg);
7406 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7407 gen_jmp_im(pc_start - s->cs_base);
7408 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7409 if (ot == OT_WORD)
7410 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7411 else
7412 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7413 break;
7414 case 0x1c8 ... 0x1cf: /* bswap reg */
7415 reg = (b & 7) | REX_B(s);
7416#ifdef TARGET_X86_64
7417 if (dflag == 2) {
7418 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7419 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7420 gen_op_mov_reg_T0(OT_QUAD, reg);
7421 } else
7422 {
7423 TCGv tmp0;
7424 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7425
7426 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7427 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7428 tcg_gen_bswap_i32(tmp0, tmp0);
7429 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7430 gen_op_mov_reg_T0(OT_LONG, reg);
7431 }
7432#else
7433 {
7434 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7435 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7436 gen_op_mov_reg_T0(OT_LONG, reg);
7437 }
7438#endif
7439 break;
7440 case 0xd6: /* salc */
7441 if (CODE64(s))
7442 goto illegal_op;
7443 if (s->cc_op != CC_OP_DYNAMIC)
7444 gen_op_set_cc_op(s->cc_op);
7445 gen_compute_eflags_c(cpu_T[0]);
7446 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7447 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7448 break;
7449 case 0xe0: /* loopnz */
7450 case 0xe1: /* loopz */
7451 case 0xe2: /* loop */
7452 case 0xe3: /* jecxz */
7453 {
7454 int l1, l2, l3;
7455
7456 tval = (int8_t)insn_get(s, OT_BYTE);
7457 next_eip = s->pc - s->cs_base;
7458 tval += next_eip;
7459 if (s->dflag == 0)
7460 tval &= 0xffff;
7461
7462 l1 = gen_new_label();
7463 l2 = gen_new_label();
7464 l3 = gen_new_label();
7465 b &= 3;
7466 switch(b) {
7467 case 0: /* loopnz */
7468 case 1: /* loopz */
7469 if (s->cc_op != CC_OP_DYNAMIC)
7470 gen_op_set_cc_op(s->cc_op);
7471 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7472 gen_op_jz_ecx(s->aflag, l3);
7473 gen_compute_eflags(cpu_tmp0);
7474 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7475 if (b == 0) {
7476 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7477 } else {
7478 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7479 }
7480 break;
7481 case 2: /* loop */
7482 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7483 gen_op_jnz_ecx(s->aflag, l1);
7484 break;
7485 default:
7486 case 3: /* jcxz */
7487 gen_op_jz_ecx(s->aflag, l1);
7488 break;
7489 }
7490
7491 gen_set_label(l3);
7492 gen_jmp_im(next_eip);
7493 tcg_gen_br(l2);
7494
7495 gen_set_label(l1);
7496 gen_jmp_im(tval);
7497 gen_set_label(l2);
7498 gen_eob(s);
7499 }
7500 break;
7501 case 0x130: /* wrmsr */
7502 case 0x132: /* rdmsr */
7503 if (s->cpl != 0) {
7504 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7505 } else {
7506 if (s->cc_op != CC_OP_DYNAMIC)
7507 gen_op_set_cc_op(s->cc_op);
7508 gen_jmp_im(pc_start - s->cs_base);
7509 if (b & 2) {
7510 tcg_gen_helper_0_0(helper_rdmsr);
7511 } else {
7512 tcg_gen_helper_0_0(helper_wrmsr);
7513 }
7514 }
7515 break;
7516 case 0x131: /* rdtsc */
7517 if (s->cc_op != CC_OP_DYNAMIC)
7518 gen_op_set_cc_op(s->cc_op);
7519 gen_jmp_im(pc_start - s->cs_base);
7520 if (use_icount)
7521 gen_io_start();
7522 tcg_gen_helper_0_0(helper_rdtsc);
7523 if (use_icount) {
7524 gen_io_end();
7525 gen_jmp(s, s->pc - s->cs_base);
7526 }
7527 break;
7528 case 0x133: /* rdpmc */
7529 if (s->cc_op != CC_OP_DYNAMIC)
7530 gen_op_set_cc_op(s->cc_op);
7531 gen_jmp_im(pc_start - s->cs_base);
7532 tcg_gen_helper_0_0(helper_rdpmc);
7533 break;
7534 case 0x134: /* sysenter */
7535#ifndef VBOX
7536 /* For Intel SYSENTER is valid on 64-bit */
7537 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7538#else
7539 /** @todo: make things right */
7540 if (CODE64(s))
7541#endif
7542 goto illegal_op;
7543 if (!s->pe) {
7544 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7545 } else {
7546 if (s->cc_op != CC_OP_DYNAMIC) {
7547 gen_op_set_cc_op(s->cc_op);
7548 s->cc_op = CC_OP_DYNAMIC;
7549 }
7550 gen_jmp_im(pc_start - s->cs_base);
7551 tcg_gen_helper_0_0(helper_sysenter);
7552 gen_eob(s);
7553 }
7554 break;
7555 case 0x135: /* sysexit */
7556#ifndef VBOX
7557 /* For Intel SYSEXIT is valid on 64-bit */
7558 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7559#else
7560 /** @todo: make things right */
7561 if (CODE64(s))
7562#endif
7563 goto illegal_op;
7564 if (!s->pe) {
7565 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7566 } else {
7567 if (s->cc_op != CC_OP_DYNAMIC) {
7568 gen_op_set_cc_op(s->cc_op);
7569 s->cc_op = CC_OP_DYNAMIC;
7570 }
7571 gen_jmp_im(pc_start - s->cs_base);
7572 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7573 gen_eob(s);
7574 }
7575 break;
7576#ifdef TARGET_X86_64
7577 case 0x105: /* syscall */
7578 /* XXX: is it usable in real mode ? */
7579 if (s->cc_op != CC_OP_DYNAMIC) {
7580 gen_op_set_cc_op(s->cc_op);
7581 s->cc_op = CC_OP_DYNAMIC;
7582 }
7583 gen_jmp_im(pc_start - s->cs_base);
7584 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7585 gen_eob(s);
7586 break;
7587 case 0x107: /* sysret */
7588 if (!s->pe) {
7589 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7590 } else {
7591 if (s->cc_op != CC_OP_DYNAMIC) {
7592 gen_op_set_cc_op(s->cc_op);
7593 s->cc_op = CC_OP_DYNAMIC;
7594 }
7595 gen_jmp_im(pc_start - s->cs_base);
7596 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7597 /* condition codes are modified only in long mode */
7598 if (s->lma)
7599 s->cc_op = CC_OP_EFLAGS;
7600 gen_eob(s);
7601 }
7602 break;
7603#endif
7604 case 0x1a2: /* cpuid */
7605 if (s->cc_op != CC_OP_DYNAMIC)
7606 gen_op_set_cc_op(s->cc_op);
7607 gen_jmp_im(pc_start - s->cs_base);
7608 tcg_gen_helper_0_0(helper_cpuid);
7609 break;
7610 case 0xf4: /* hlt */
7611 if (s->cpl != 0) {
7612 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7613 } else {
7614 if (s->cc_op != CC_OP_DYNAMIC)
7615 gen_op_set_cc_op(s->cc_op);
7616 gen_jmp_im(pc_start - s->cs_base);
7617 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7618 s->is_jmp = 3;
7619 }
7620 break;
7621 case 0x100:
7622 modrm = ldub_code(s->pc++);
7623 mod = (modrm >> 6) & 3;
7624 op = (modrm >> 3) & 7;
7625 switch(op) {
7626 case 0: /* sldt */
7627 if (!s->pe || s->vm86)
7628 goto illegal_op;
7629 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7630 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7631 ot = OT_WORD;
7632 if (mod == 3)
7633 ot += s->dflag;
7634 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7635 break;
7636 case 2: /* lldt */
7637 if (!s->pe || s->vm86)
7638 goto illegal_op;
7639 if (s->cpl != 0) {
7640 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7641 } else {
7642 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7643 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7644 gen_jmp_im(pc_start - s->cs_base);
7645 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7646 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7647 }
7648 break;
7649 case 1: /* str */
7650 if (!s->pe || s->vm86)
7651 goto illegal_op;
7652 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7653 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7654 ot = OT_WORD;
7655 if (mod == 3)
7656 ot += s->dflag;
7657 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7658 break;
7659 case 3: /* ltr */
7660 if (!s->pe || s->vm86)
7661 goto illegal_op;
7662 if (s->cpl != 0) {
7663 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7664 } else {
7665 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7666 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7667 gen_jmp_im(pc_start - s->cs_base);
7668 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7669 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7670 }
7671 break;
7672 case 4: /* verr */
7673 case 5: /* verw */
7674 if (!s->pe || s->vm86)
7675 goto illegal_op;
7676 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7677 if (s->cc_op != CC_OP_DYNAMIC)
7678 gen_op_set_cc_op(s->cc_op);
7679 if (op == 4)
7680 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7681 else
7682 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7683 s->cc_op = CC_OP_EFLAGS;
7684 break;
7685 default:
7686 goto illegal_op;
7687 }
7688 break;
7689 case 0x101:
7690 modrm = ldub_code(s->pc++);
7691 mod = (modrm >> 6) & 3;
7692 op = (modrm >> 3) & 7;
7693 rm = modrm & 7;
7694
7695#ifdef VBOX
7696 /* 0f 01 f9 */
7697 if (modrm == 0xf9)
7698 {
7699 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7700 goto illegal_op;
7701 gen_jmp_im(pc_start - s->cs_base);
7702 tcg_gen_helper_0_0(helper_rdtscp);
7703 break;
7704 }
7705#endif
7706 switch(op) {
7707 case 0: /* sgdt */
7708 if (mod == 3)
7709 goto illegal_op;
7710 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7711 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7712 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7713 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7714 gen_add_A0_im(s, 2);
7715 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7716 if (!s->dflag)
7717 gen_op_andl_T0_im(0xffffff);
7718 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7719 break;
7720 case 1:
7721 if (mod == 3) {
7722 switch (rm) {
7723 case 0: /* monitor */
7724 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7725 s->cpl != 0)
7726 goto illegal_op;
7727 if (s->cc_op != CC_OP_DYNAMIC)
7728 gen_op_set_cc_op(s->cc_op);
7729 gen_jmp_im(pc_start - s->cs_base);
7730#ifdef TARGET_X86_64
7731 if (s->aflag == 2) {
7732 gen_op_movq_A0_reg(R_EAX);
7733 } else
7734#endif
7735 {
7736 gen_op_movl_A0_reg(R_EAX);
7737 if (s->aflag == 0)
7738 gen_op_andl_A0_ffff();
7739 }
7740 gen_add_A0_ds_seg(s);
7741 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7742 break;
7743 case 1: /* mwait */
7744 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7745 s->cpl != 0)
7746 goto illegal_op;
7747 if (s->cc_op != CC_OP_DYNAMIC) {
7748 gen_op_set_cc_op(s->cc_op);
7749 s->cc_op = CC_OP_DYNAMIC;
7750 }
7751 gen_jmp_im(pc_start - s->cs_base);
7752 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7753 gen_eob(s);
7754 break;
7755 default:
7756 goto illegal_op;
7757 }
7758 } else { /* sidt */
7759 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7760 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7761 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7762 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7763 gen_add_A0_im(s, 2);
7764 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7765 if (!s->dflag)
7766 gen_op_andl_T0_im(0xffffff);
7767 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7768 }
7769 break;
7770 case 2: /* lgdt */
7771 case 3: /* lidt */
7772 if (mod == 3) {
7773 if (s->cc_op != CC_OP_DYNAMIC)
7774 gen_op_set_cc_op(s->cc_op);
7775 gen_jmp_im(pc_start - s->cs_base);
7776 switch(rm) {
7777 case 0: /* VMRUN */
7778 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7779 goto illegal_op;
7780 if (s->cpl != 0) {
7781 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7782 break;
7783 } else {
7784 tcg_gen_helper_0_2(helper_vmrun,
7785 tcg_const_i32(s->aflag),
7786 tcg_const_i32(s->pc - pc_start));
7787 tcg_gen_exit_tb(0);
7788 s->is_jmp = 3;
7789 }
7790 break;
7791 case 1: /* VMMCALL */
7792 if (!(s->flags & HF_SVME_MASK))
7793 goto illegal_op;
7794 tcg_gen_helper_0_0(helper_vmmcall);
7795 break;
7796 case 2: /* VMLOAD */
7797 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7798 goto illegal_op;
7799 if (s->cpl != 0) {
7800 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7801 break;
7802 } else {
7803 tcg_gen_helper_0_1(helper_vmload,
7804 tcg_const_i32(s->aflag));
7805 }
7806 break;
7807 case 3: /* VMSAVE */
7808 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7809 goto illegal_op;
7810 if (s->cpl != 0) {
7811 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7812 break;
7813 } else {
7814 tcg_gen_helper_0_1(helper_vmsave,
7815 tcg_const_i32(s->aflag));
7816 }
7817 break;
7818 case 4: /* STGI */
7819 if ((!(s->flags & HF_SVME_MASK) &&
7820 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7821 !s->pe)
7822 goto illegal_op;
7823 if (s->cpl != 0) {
7824 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7825 break;
7826 } else {
7827 tcg_gen_helper_0_0(helper_stgi);
7828 }
7829 break;
7830 case 5: /* CLGI */
7831 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7832 goto illegal_op;
7833 if (s->cpl != 0) {
7834 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7835 break;
7836 } else {
7837 tcg_gen_helper_0_0(helper_clgi);
7838 }
7839 break;
7840 case 6: /* SKINIT */
7841 if ((!(s->flags & HF_SVME_MASK) &&
7842 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7843 !s->pe)
7844 goto illegal_op;
7845 tcg_gen_helper_0_0(helper_skinit);
7846 break;
7847 case 7: /* INVLPGA */
7848 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7849 goto illegal_op;
7850 if (s->cpl != 0) {
7851 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7852 break;
7853 } else {
7854 tcg_gen_helper_0_1(helper_invlpga,
7855 tcg_const_i32(s->aflag));
7856 }
7857 break;
7858 default:
7859 goto illegal_op;
7860 }
7861 } else if (s->cpl != 0) {
7862 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7863 } else {
7864 gen_svm_check_intercept(s, pc_start,
7865 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7866 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7867 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7868 gen_add_A0_im(s, 2);
7869 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7870 if (!s->dflag)
7871 gen_op_andl_T0_im(0xffffff);
7872 if (op == 2) {
7873 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7874 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7875 } else {
7876 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7877 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7878 }
7879 }
7880 break;
7881 case 4: /* smsw */
7882 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7883 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7884 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7885 break;
7886 case 6: /* lmsw */
7887 if (s->cpl != 0) {
7888 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7889 } else {
7890 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7891 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7892 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7893 gen_jmp_im(s->pc - s->cs_base);
7894 gen_eob(s);
7895 }
7896 break;
7897 case 7: /* invlpg */
7898 if (s->cpl != 0) {
7899 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7900 } else {
7901 if (mod == 3) {
7902#ifdef TARGET_X86_64
7903 if (CODE64(s) && rm == 0) {
7904 /* swapgs */
7905 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7906 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7907 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7908 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7909 } else
7910#endif
7911 {
7912 goto illegal_op;
7913 }
7914 } else {
7915 if (s->cc_op != CC_OP_DYNAMIC)
7916 gen_op_set_cc_op(s->cc_op);
7917 gen_jmp_im(pc_start - s->cs_base);
7918 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7919 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7920 gen_jmp_im(s->pc - s->cs_base);
7921 gen_eob(s);
7922 }
7923 }
7924 break;
7925 default:
7926 goto illegal_op;
7927 }
7928 break;
7929 case 0x108: /* invd */
7930 case 0x109: /* wbinvd */
7931 if (s->cpl != 0) {
7932 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7933 } else {
7934 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7935 /* nothing to do */
7936 }
7937 break;
7938 case 0x63: /* arpl or movslS (x86_64) */
7939#ifdef TARGET_X86_64
7940 if (CODE64(s)) {
7941 int d_ot;
7942 /* d_ot is the size of destination */
7943 d_ot = dflag + OT_WORD;
7944
7945 modrm = ldub_code(s->pc++);
7946 reg = ((modrm >> 3) & 7) | rex_r;
7947 mod = (modrm >> 6) & 3;
7948 rm = (modrm & 7) | REX_B(s);
7949
7950 if (mod == 3) {
7951 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7952 /* sign extend */
7953 if (d_ot == OT_QUAD)
7954 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7955 gen_op_mov_reg_T0(d_ot, reg);
7956 } else {
7957 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7958 if (d_ot == OT_QUAD) {
7959 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7960 } else {
7961 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7962 }
7963 gen_op_mov_reg_T0(d_ot, reg);
7964 }
7965 } else
7966#endif
7967 {
7968 int label1;
7969 TCGv t0, t1, t2, a0;
7970
7971 if (!s->pe || s->vm86)
7972 goto illegal_op;
7973
7974 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7975 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7976 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7977#ifdef VBOX
7978 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7979#endif
7980 ot = OT_WORD;
7981 modrm = ldub_code(s->pc++);
7982 reg = (modrm >> 3) & 7;
7983 mod = (modrm >> 6) & 3;
7984 rm = modrm & 7;
7985 if (mod != 3) {
7986 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7987#ifdef VBOX
7988 tcg_gen_mov_tl(a0, cpu_A0);
7989#endif
7990 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7991 } else {
7992 gen_op_mov_v_reg(ot, t0, rm);
7993 }
7994 gen_op_mov_v_reg(ot, t1, reg);
7995 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7996 tcg_gen_andi_tl(t1, t1, 3);
7997 tcg_gen_movi_tl(t2, 0);
7998 label1 = gen_new_label();
7999 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
8000 tcg_gen_andi_tl(t0, t0, ~3);
8001 tcg_gen_or_tl(t0, t0, t1);
8002 tcg_gen_movi_tl(t2, CC_Z);
8003 gen_set_label(label1);
8004 if (mod != 3) {
8005#ifdef VBOX
8006 /* cpu_A0 doesn't survive branch */
8007 gen_op_st_v(ot + s->mem_index, t0, a0);
8008#else
8009 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
8010#endif
8011 } else {
8012 gen_op_mov_reg_v(ot, rm, t0);
8013 }
8014 if (s->cc_op != CC_OP_DYNAMIC)
8015 gen_op_set_cc_op(s->cc_op);
8016 gen_compute_eflags(cpu_cc_src);
8017 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
8018 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
8019 s->cc_op = CC_OP_EFLAGS;
8020 tcg_temp_free(t0);
8021 tcg_temp_free(t1);
8022 tcg_temp_free(t2);
8023#ifdef VBOX
8024 tcg_temp_free(a0);
8025#endif
8026 }
8027 break;
8028 case 0x102: /* lar */
8029 case 0x103: /* lsl */
8030 {
8031 int label1;
8032 TCGv t0;
8033 if (!s->pe || s->vm86)
8034 goto illegal_op;
8035 ot = dflag ? OT_LONG : OT_WORD;
8036 modrm = ldub_code(s->pc++);
8037 reg = ((modrm >> 3) & 7) | rex_r;
8038 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8039 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8040 if (s->cc_op != CC_OP_DYNAMIC)
8041 gen_op_set_cc_op(s->cc_op);
8042 if (b == 0x102)
8043 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8044 else
8045 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8046 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8047 label1 = gen_new_label();
8048 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8049 gen_op_mov_reg_v(ot, reg, t0);
8050 gen_set_label(label1);
8051 s->cc_op = CC_OP_EFLAGS;
8052 tcg_temp_free(t0);
8053 }
8054 break;
8055 case 0x118:
8056 modrm = ldub_code(s->pc++);
8057 mod = (modrm >> 6) & 3;
8058 op = (modrm >> 3) & 7;
8059 switch(op) {
8060 case 0: /* prefetchnta */
8061 case 1: /* prefetchnt0 */
8062 case 2: /* prefetchnt0 */
8063 case 3: /* prefetchnt0 */
8064 if (mod == 3)
8065 goto illegal_op;
8066 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8067 /* nothing more to do */
8068 break;
8069 default: /* nop (multi byte) */
8070 gen_nop_modrm(s, modrm);
8071 break;
8072 }
8073 break;
8074 case 0x119 ... 0x11f: /* nop (multi byte) */
8075 modrm = ldub_code(s->pc++);
8076 gen_nop_modrm(s, modrm);
8077 break;
8078 case 0x120: /* mov reg, crN */
8079 case 0x122: /* mov crN, reg */
8080 if (s->cpl != 0) {
8081 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8082 } else {
8083 modrm = ldub_code(s->pc++);
8084#ifndef VBOX /* mod bits are always understood to be 11 (0xc0) regardless of actual content; see AMD manuals */
8085 if ((modrm & 0xc0) != 0xc0)
8086 goto illegal_op;
8087#endif
8088 rm = (modrm & 7) | REX_B(s);
8089 reg = ((modrm >> 3) & 7) | rex_r;
8090 if (CODE64(s))
8091 ot = OT_QUAD;
8092 else
8093 ot = OT_LONG;
8094 switch(reg) {
8095 case 0:
8096 case 2:
8097 case 3:
8098 case 4:
8099 case 8:
8100 if (s->cc_op != CC_OP_DYNAMIC)
8101 gen_op_set_cc_op(s->cc_op);
8102 gen_jmp_im(pc_start - s->cs_base);
8103 if (b & 2) {
8104 gen_op_mov_TN_reg(ot, 0, rm);
8105 tcg_gen_helper_0_2(helper_write_crN,
8106 tcg_const_i32(reg), cpu_T[0]);
8107 gen_jmp_im(s->pc - s->cs_base);
8108 gen_eob(s);
8109 } else {
8110 tcg_gen_helper_1_1(helper_read_crN,
8111 cpu_T[0], tcg_const_i32(reg));
8112 gen_op_mov_reg_T0(ot, rm);
8113 }
8114 break;
8115 default:
8116 goto illegal_op;
8117 }
8118 }
8119 break;
8120 case 0x121: /* mov reg, drN */
8121 case 0x123: /* mov drN, reg */
8122 if (s->cpl != 0) {
8123 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8124 } else {
8125 modrm = ldub_code(s->pc++);
8126#ifndef VBOX /* mod bits are always understood to be 11 (0xc0) regardless of actual content; see AMD manuals */
8127 if ((modrm & 0xc0) != 0xc0)
8128 goto illegal_op;
8129#endif
8130 rm = (modrm & 7) | REX_B(s);
8131 reg = ((modrm >> 3) & 7) | rex_r;
8132 if (CODE64(s))
8133 ot = OT_QUAD;
8134 else
8135 ot = OT_LONG;
8136 /* XXX: do it dynamically with CR4.DE bit */
8137 if (reg == 4 || reg == 5 || reg >= 8)
8138 goto illegal_op;
8139 if (b & 2) {
8140 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8141 gen_op_mov_TN_reg(ot, 0, rm);
8142 tcg_gen_helper_0_2(helper_movl_drN_T0,
8143 tcg_const_i32(reg), cpu_T[0]);
8144 gen_jmp_im(s->pc - s->cs_base);
8145 gen_eob(s);
8146 } else {
8147 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8148 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8149 gen_op_mov_reg_T0(ot, rm);
8150 }
8151 }
8152 break;
8153 case 0x106: /* clts */
8154 if (s->cpl != 0) {
8155 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8156 } else {
8157 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8158 tcg_gen_helper_0_0(helper_clts);
8159 /* abort block because static cpu state changed */
8160 gen_jmp_im(s->pc - s->cs_base);
8161 gen_eob(s);
8162 }
8163 break;
8164 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8165 case 0x1c3: /* MOVNTI reg, mem */
8166 if (!(s->cpuid_features & CPUID_SSE2))
8167 goto illegal_op;
8168 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8169 modrm = ldub_code(s->pc++);
8170 mod = (modrm >> 6) & 3;
8171 if (mod == 3)
8172 goto illegal_op;
8173 reg = ((modrm >> 3) & 7) | rex_r;
8174 /* generate a generic store */
8175 gen_ldst_modrm(s, modrm, ot, reg, 1);
8176 break;
8177 case 0x1ae:
8178 modrm = ldub_code(s->pc++);
8179 mod = (modrm >> 6) & 3;
8180 op = (modrm >> 3) & 7;
8181 switch(op) {
8182 case 0: /* fxsave */
8183 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8184 (s->flags & HF_EM_MASK))
8185 goto illegal_op;
8186 if (s->flags & HF_TS_MASK) {
8187 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8188 break;
8189 }
8190 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8191 if (s->cc_op != CC_OP_DYNAMIC)
8192 gen_op_set_cc_op(s->cc_op);
8193 gen_jmp_im(pc_start - s->cs_base);
8194 tcg_gen_helper_0_2(helper_fxsave,
8195 cpu_A0, tcg_const_i32((s->dflag == 2)));
8196 break;
8197 case 1: /* fxrstor */
8198 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8199 (s->flags & HF_EM_MASK))
8200 goto illegal_op;
8201 if (s->flags & HF_TS_MASK) {
8202 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8203 break;
8204 }
8205 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8206 if (s->cc_op != CC_OP_DYNAMIC)
8207 gen_op_set_cc_op(s->cc_op);
8208 gen_jmp_im(pc_start - s->cs_base);
8209 tcg_gen_helper_0_2(helper_fxrstor,
8210 cpu_A0, tcg_const_i32((s->dflag == 2)));
8211 break;
8212 case 2: /* ldmxcsr */
8213 case 3: /* stmxcsr */
8214 if (s->flags & HF_TS_MASK) {
8215 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8216 break;
8217 }
8218 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8219 mod == 3)
8220 goto illegal_op;
8221 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8222 if (op == 2) {
8223 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8224 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8225 } else {
8226 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8227 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8228 }
8229 break;
8230 case 5: /* lfence */
8231 case 6: /* mfence */
8232 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8233 goto illegal_op;
8234 break;
8235 case 7: /* sfence / clflush */
8236 if ((modrm & 0xc7) == 0xc0) {
8237 /* sfence */
8238 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8239 if (!(s->cpuid_features & CPUID_SSE))
8240 goto illegal_op;
8241 } else {
8242 /* clflush */
8243 if (!(s->cpuid_features & CPUID_CLFLUSH))
8244 goto illegal_op;
8245 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8246 }
8247 break;
8248 default:
8249 goto illegal_op;
8250 }
8251 break;
8252 case 0x10d: /* 3DNow! prefetch(w) */
8253 modrm = ldub_code(s->pc++);
8254 mod = (modrm >> 6) & 3;
8255 if (mod == 3)
8256 goto illegal_op;
8257 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8258 /* ignore for now */
8259 break;
8260 case 0x1aa: /* rsm */
8261 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8262 if (!(s->flags & HF_SMM_MASK))
8263 goto illegal_op;
8264 if (s->cc_op != CC_OP_DYNAMIC) {
8265 gen_op_set_cc_op(s->cc_op);
8266 s->cc_op = CC_OP_DYNAMIC;
8267 }
8268 gen_jmp_im(s->pc - s->cs_base);
8269 tcg_gen_helper_0_0(helper_rsm);
8270 gen_eob(s);
8271 break;
8272 case 0x1b8: /* SSE4.2 popcnt */
8273 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8274 PREFIX_REPZ)
8275 goto illegal_op;
8276 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8277 goto illegal_op;
8278
8279 modrm = ldub_code(s->pc++);
8280 reg = ((modrm >> 3) & 7);
8281
8282 if (s->prefix & PREFIX_DATA)
8283 ot = OT_WORD;
8284 else if (s->dflag != 2)
8285 ot = OT_LONG;
8286 else
8287 ot = OT_QUAD;
8288
8289 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8290 tcg_gen_helper_1_2(helper_popcnt,
8291 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8292 gen_op_mov_reg_T0(ot, reg);
8293
8294 s->cc_op = CC_OP_EFLAGS;
8295 break;
8296 case 0x10e ... 0x10f:
8297 /* 3DNow! instructions, ignore prefixes */
8298 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8299 case 0x110 ... 0x117:
8300 case 0x128 ... 0x12f:
8301 case 0x138 ... 0x13a:
8302 case 0x150 ... 0x177:
8303 case 0x17c ... 0x17f:
8304 case 0x1c2:
8305 case 0x1c4 ... 0x1c6:
8306 case 0x1d0 ... 0x1fe:
8307 gen_sse(s, b, pc_start, rex_r);
8308 break;
8309 default:
8310 goto illegal_op;
8311 }
8312 /* lock generation */
8313 if (s->prefix & PREFIX_LOCK)
8314 tcg_gen_helper_0_0(helper_unlock);
8315 return s->pc;
8316 illegal_op:
8317 if (s->prefix & PREFIX_LOCK)
8318 tcg_gen_helper_0_0(helper_unlock);
8319 /* XXX: ensure that no lock was generated */
8320 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8321 return s->pc;
8322}
8323
8324void optimize_flags_init(void)
8325{
8326#if TCG_TARGET_REG_BITS == 32
8327 assert(sizeof(CCTable) == (1 << 3));
8328#else
8329 assert(sizeof(CCTable) == (1 << 4));
8330#endif
8331 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8332 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8333 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8334 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8335 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8336 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8337 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8338 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8339 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8340
8341 /* register helpers */
8342
8343#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8344#include "helper.h"
8345}
8346
8347/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8348 basic block 'tb'. If search_pc is TRUE, also generate PC
8349 information for each intermediate instruction. */
8350#ifndef VBOX
8351static inline void gen_intermediate_code_internal(CPUState *env,
8352#else /* VBOX */
8353DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8354#endif /* VBOX */
8355 TranslationBlock *tb,
8356 int search_pc)
8357{
8358 DisasContext dc1, *dc = &dc1;
8359 target_ulong pc_ptr;
8360 uint16_t *gen_opc_end;
8361 int j, lj, cflags;
8362 uint64_t flags;
8363 target_ulong pc_start;
8364 target_ulong cs_base;
8365 int num_insns;
8366 int max_insns;
8367
8368 /* generate intermediate code */
8369 pc_start = tb->pc;
8370 cs_base = tb->cs_base;
8371 flags = tb->flags;
8372 cflags = tb->cflags;
8373
8374 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8375 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8376 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8377 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8378 dc->f_st = 0;
8379 dc->vm86 = (flags >> VM_SHIFT) & 1;
8380#ifdef VBOX
8381 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8382 dc->pvi = !!(env->cr[4] & CR4_PVI_MASK);
8383#ifdef VBOX_WITH_CALL_RECORD
8384 if ( !(env->state & CPU_RAW_RING0)
8385 && (env->cr[0] & CR0_PG_MASK)
8386 && !(env->eflags & X86_EFL_IF)
8387 && dc->code32)
8388 dc->record_call = 1;
8389 else
8390 dc->record_call = 0;
8391#endif
8392#endif
8393 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8394 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8395 dc->tf = (flags >> TF_SHIFT) & 1;
8396 dc->singlestep_enabled = env->singlestep_enabled;
8397 dc->cc_op = CC_OP_DYNAMIC;
8398 dc->cs_base = cs_base;
8399 dc->tb = tb;
8400 dc->popl_esp_hack = 0;
8401 /* select memory access functions */
8402 dc->mem_index = 0;
8403 if (flags & HF_SOFTMMU_MASK) {
8404 if (dc->cpl == 3)
8405 dc->mem_index = 2 * 4;
8406 else
8407 dc->mem_index = 1 * 4;
8408 }
8409 dc->cpuid_features = env->cpuid_features;
8410 dc->cpuid_ext_features = env->cpuid_ext_features;
8411 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8412 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8413#ifdef TARGET_X86_64
8414 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8415 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8416#endif
8417 dc->flags = flags;
8418 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8419 (flags & HF_INHIBIT_IRQ_MASK)
8420#ifndef CONFIG_SOFTMMU
8421 || (flags & HF_SOFTMMU_MASK)
8422#endif
8423 );
8424#if 0
8425 /* check addseg logic */
8426 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8427 printf("ERROR addseg\n");
8428#endif
8429
8430 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8431 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8432 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8433 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8434
8435 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8436 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8437 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8438 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8439 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8440 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8441 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8442 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8443 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8444
8445 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8446
8447 dc->is_jmp = DISAS_NEXT;
8448 pc_ptr = pc_start;
8449 lj = -1;
8450 num_insns = 0;
8451 max_insns = tb->cflags & CF_COUNT_MASK;
8452 if (max_insns == 0)
8453 max_insns = CF_COUNT_MASK;
8454
8455 gen_icount_start();
8456 for(;;) {
8457 if (env->nb_breakpoints > 0) {
8458 for(j = 0; j < env->nb_breakpoints; j++) {
8459 if (env->breakpoints[j] == pc_ptr) {
8460 gen_debug(dc, pc_ptr - dc->cs_base);
8461 break;
8462 }
8463 }
8464 }
8465 if (search_pc) {
8466 j = gen_opc_ptr - gen_opc_buf;
8467 if (lj < j) {
8468 lj++;
8469 while (lj < j)
8470 gen_opc_instr_start[lj++] = 0;
8471 }
8472 gen_opc_pc[lj] = pc_ptr;
8473 gen_opc_cc_op[lj] = dc->cc_op;
8474 gen_opc_instr_start[lj] = 1;
8475 gen_opc_icount[lj] = num_insns;
8476 }
8477 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8478 gen_io_start();
8479
8480 pc_ptr = disas_insn(dc, pc_ptr);
8481 num_insns++;
8482 /* stop translation if indicated */
8483 if (dc->is_jmp)
8484 break;
8485#ifdef VBOX
8486#ifdef DEBUG
8487/*
8488 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8489 {
8490 //should never happen as the jump to the patch code terminates the translation block
8491 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8492 }
8493*/
8494#endif
8495 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8496 {
8497 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8498 gen_jmp_im(pc_ptr - dc->cs_base);
8499 gen_eob(dc);
8500 break;
8501 }
8502#endif /* VBOX */
8503
8504 /* if single step mode, we generate only one instruction and
8505 generate an exception */
8506 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8507 the flag and abort the translation to give the irqs a
8508 change to be happen */
8509 if (dc->tf || dc->singlestep_enabled ||
8510 (flags & HF_INHIBIT_IRQ_MASK)) {
8511 gen_jmp_im(pc_ptr - dc->cs_base);
8512 gen_eob(dc);
8513 break;
8514 }
8515 /* if too long translation, stop generation too */
8516 if (gen_opc_ptr >= gen_opc_end ||
8517 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8518 num_insns >= max_insns) {
8519 gen_jmp_im(pc_ptr - dc->cs_base);
8520 gen_eob(dc);
8521 break;
8522 }
8523 }
8524 if (tb->cflags & CF_LAST_IO)
8525 gen_io_end();
8526 gen_icount_end(tb, num_insns);
8527 *gen_opc_ptr = INDEX_op_end;
8528 /* we don't forget to fill the last values */
8529 if (search_pc) {
8530 j = gen_opc_ptr - gen_opc_buf;
8531 lj++;
8532 while (lj <= j)
8533 gen_opc_instr_start[lj++] = 0;
8534 }
8535
8536#ifdef DEBUG_DISAS
8537 if (loglevel & CPU_LOG_TB_CPU) {
8538 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8539 }
8540 if (loglevel & CPU_LOG_TB_IN_ASM) {
8541 int disas_flags;
8542 fprintf(logfile, "----------------\n");
8543 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8544#ifdef TARGET_X86_64
8545 if (dc->code64)
8546 disas_flags = 2;
8547 else
8548#endif
8549 disas_flags = !dc->code32;
8550 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8551 fprintf(logfile, "\n");
8552 }
8553#endif
8554
8555 if (!search_pc) {
8556 tb->size = pc_ptr - pc_start;
8557 tb->icount = num_insns;
8558 }
8559}
8560
8561void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8562{
8563 gen_intermediate_code_internal(env, tb, 0);
8564}
8565
8566void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8567{
8568 gen_intermediate_code_internal(env, tb, 1);
8569}
8570
8571void gen_pc_load(CPUState *env, TranslationBlock *tb,
8572 unsigned long searched_pc, int pc_pos, void *puc)
8573{
8574 int cc_op;
8575#ifdef DEBUG_DISAS
8576 if (loglevel & CPU_LOG_TB_OP) {
8577 int i;
8578 fprintf(logfile, "RESTORE:\n");
8579 for(i = 0;i <= pc_pos; i++) {
8580 if (gen_opc_instr_start[i]) {
8581 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8582 }
8583 }
8584 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8585 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8586 (uint32_t)tb->cs_base);
8587 }
8588#endif
8589 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8590 cc_op = gen_opc_cc_op[pc_pos];
8591 if (cc_op != CC_OP_DYNAMIC)
8592 env->cc_op = cc_op;
8593}
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette