VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 13539

最後變更 在這個檔案從13539是 13539,由 vboxsync 提交於 16 年 前

integrated Sander's changes to recompiler, support jumping between TBs,
now we crash somewhere on memory access

  • 屬性 svn:eol-style 設為 native
檔案大小: 272.1 KB
 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int record_call; /* record calls for CSAM or not? */
148#endif
149 int cpl;
150 int iopl;
151 int tf; /* TF cpu flag */
152 int singlestep_enabled; /* "hardware" single step enabled */
153 int jmp_opt; /* use direct block chaining for direct jumps */
154 int mem_index; /* select memory access functions */
155 uint64_t flags; /* all execution flags */
156 struct TranslationBlock *tb;
157 int popl_esp_hack; /* for correct popl with esp base handling */
158 int rip_offset; /* only used in x86_64, but left for simplicity */
159 int cpuid_features;
160 int cpuid_ext_features;
161 int cpuid_ext2_features;
162 int cpuid_ext3_features;
163} DisasContext;
164
165static void gen_eob(DisasContext *s);
166static void gen_jmp(DisasContext *s, target_ulong eip);
167static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
168
169#ifdef VBOX
170static void gen_check_external_event();
171#endif
172
173/* i386 arith/logic operations */
174enum {
175 OP_ADDL,
176 OP_ORL,
177 OP_ADCL,
178 OP_SBBL,
179 OP_ANDL,
180 OP_SUBL,
181 OP_XORL,
182 OP_CMPL,
183};
184
185/* i386 shift ops */
186enum {
187 OP_ROL,
188 OP_ROR,
189 OP_RCL,
190 OP_RCR,
191 OP_SHL,
192 OP_SHR,
193 OP_SHL1, /* undocumented */
194 OP_SAR = 7,
195};
196
197enum {
198 JCC_O,
199 JCC_B,
200 JCC_Z,
201 JCC_BE,
202 JCC_S,
203 JCC_P,
204 JCC_L,
205 JCC_LE,
206};
207
208/* operand size */
209enum {
210 OT_BYTE = 0,
211 OT_WORD,
212 OT_LONG,
213 OT_QUAD,
214};
215
216enum {
217 /* I386 int registers */
218 OR_EAX, /* MUST be even numbered */
219 OR_ECX,
220 OR_EDX,
221 OR_EBX,
222 OR_ESP,
223 OR_EBP,
224 OR_ESI,
225 OR_EDI,
226
227 OR_TMP0 = 16, /* temporary operand register */
228 OR_TMP1,
229 OR_A0, /* temporary register used when doing address evaluation */
230};
231
232#ifndef VBOX
233static inline void gen_op_movl_T0_0(void)
234#else /* VBOX */
235DECLINLINE(void) gen_op_movl_T0_0(void)
236#endif /* VBOX */
237{
238 tcg_gen_movi_tl(cpu_T[0], 0);
239}
240
241#ifndef VBOX
242static inline void gen_op_movl_T0_im(int32_t val)
243#else /* VBOX */
244DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
245#endif /* VBOX */
246{
247 tcg_gen_movi_tl(cpu_T[0], val);
248}
249
250#ifndef VBOX
251static inline void gen_op_movl_T0_imu(uint32_t val)
252#else /* VBOX */
253DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
254#endif /* VBOX */
255{
256 tcg_gen_movi_tl(cpu_T[0], val);
257}
258
259#ifndef VBOX
260static inline void gen_op_movl_T1_im(int32_t val)
261#else /* VBOX */
262DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
263#endif /* VBOX */
264{
265 tcg_gen_movi_tl(cpu_T[1], val);
266}
267
268#ifndef VBOX
269static inline void gen_op_movl_T1_imu(uint32_t val)
270#else /* VBOX */
271DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
272#endif /* VBOX */
273{
274 tcg_gen_movi_tl(cpu_T[1], val);
275}
276
277#ifndef VBOX
278static inline void gen_op_movl_A0_im(uint32_t val)
279#else /* VBOX */
280DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
281#endif /* VBOX */
282{
283 tcg_gen_movi_tl(cpu_A0, val);
284}
285
286#ifdef TARGET_X86_64
287#ifndef VBOX
288static inline void gen_op_movq_A0_im(int64_t val)
289#else /* VBOX */
290DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
291#endif /* VBOX */
292{
293 tcg_gen_movi_tl(cpu_A0, val);
294}
295#endif
296
297#ifndef VBOX
298static inline void gen_movtl_T0_im(target_ulong val)
299#else /* VBOX */
300DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
301#endif /* VBOX */
302{
303 tcg_gen_movi_tl(cpu_T[0], val);
304}
305
306#ifndef VBOX
307static inline void gen_movtl_T1_im(target_ulong val)
308#else /* VBOX */
309DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
310#endif /* VBOX */
311{
312 tcg_gen_movi_tl(cpu_T[1], val);
313}
314
315#ifndef VBOX
316static inline void gen_op_andl_T0_ffff(void)
317#else /* VBOX */
318DECLINLINE(void) gen_op_andl_T0_ffff(void)
319#endif /* VBOX */
320{
321 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
322}
323
324#ifndef VBOX
325static inline void gen_op_andl_T0_im(uint32_t val)
326#else /* VBOX */
327DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
328#endif /* VBOX */
329{
330 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
331}
332
333#ifndef VBOX
334static inline void gen_op_movl_T0_T1(void)
335#else /* VBOX */
336DECLINLINE(void) gen_op_movl_T0_T1(void)
337#endif /* VBOX */
338{
339 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
340}
341
342#ifndef VBOX
343static inline void gen_op_andl_A0_ffff(void)
344#else /* VBOX */
345DECLINLINE(void) gen_op_andl_A0_ffff(void)
346#endif /* VBOX */
347{
348 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
349}
350
351#ifdef TARGET_X86_64
352
353#define NB_OP_SIZES 4
354
355#else /* !TARGET_X86_64 */
356
357#define NB_OP_SIZES 3
358
359#endif /* !TARGET_X86_64 */
360
361#if defined(WORDS_BIGENDIAN)
362#define REG_B_OFFSET (sizeof(target_ulong) - 1)
363#define REG_H_OFFSET (sizeof(target_ulong) - 2)
364#define REG_W_OFFSET (sizeof(target_ulong) - 2)
365#define REG_L_OFFSET (sizeof(target_ulong) - 4)
366#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
367#else
368#define REG_B_OFFSET 0
369#define REG_H_OFFSET 1
370#define REG_W_OFFSET 0
371#define REG_L_OFFSET 0
372#define REG_LH_OFFSET 4
373#endif
374
375#ifndef VBOX
376static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
377#else /* VBOX */
378DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#endif /* VBOX */
380{
381 switch(ot) {
382 case OT_BYTE:
383 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
384 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
385 } else {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
387 }
388 break;
389 case OT_WORD:
390 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
391 break;
392#ifdef TARGET_X86_64
393 case OT_LONG:
394 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
395 /* high part of register set to zero */
396 tcg_gen_movi_tl(cpu_tmp0, 0);
397 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
398 break;
399 default:
400 case OT_QUAD:
401 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
402 break;
403#else
404 default:
405 case OT_LONG:
406 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
407 break;
408#endif
409 }
410}
411
412#ifndef VBOX
413static inline void gen_op_mov_reg_T0(int ot, int reg)
414#else /* VBOX */
415DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
416#endif /* VBOX */
417{
418 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
419}
420
421#ifndef VBOX
422static inline void gen_op_mov_reg_T1(int ot, int reg)
423#else /* VBOX */
424DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
425#endif /* VBOX */
426{
427 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
428}
429
430#ifndef VBOX
431static inline void gen_op_mov_reg_A0(int size, int reg)
432#else /* VBOX */
433DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
434#endif /* VBOX */
435{
436 switch(size) {
437 case 0:
438 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
439 break;
440#ifdef TARGET_X86_64
441 case 1:
442 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
443 /* high part of register set to zero */
444 tcg_gen_movi_tl(cpu_tmp0, 0);
445 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
446 break;
447 default:
448 case 2:
449 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
450 break;
451#else
452 default:
453 case 1:
454 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
455 break;
456#endif
457 }
458}
459
460#ifndef VBOX
461static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
462#else /* VBOX */
463DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#endif /* VBOX */
465{
466 switch(ot) {
467 case OT_BYTE:
468 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
469 goto std_case;
470 } else {
471 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
472 }
473 break;
474 default:
475 std_case:
476 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
477 break;
478 }
479}
480
481#ifndef VBOX
482static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
483#else /* VBOX */
484DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
485#endif /* VBOX */
486{
487 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
488}
489
490#ifndef VBOX
491static inline void gen_op_movl_A0_reg(int reg)
492#else /* VBOX */
493DECLINLINE(void) gen_op_movl_A0_reg(int reg)
494#endif /* VBOX */
495{
496 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
497}
498
499#ifndef VBOX
500static inline void gen_op_addl_A0_im(int32_t val)
501#else /* VBOX */
502DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
503#endif /* VBOX */
504{
505 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
506#ifdef TARGET_X86_64
507 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
508#endif
509}
510
511#ifdef TARGET_X86_64
512#ifndef VBOX
513static inline void gen_op_addq_A0_im(int64_t val)
514#else /* VBOX */
515DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
516#endif /* VBOX */
517{
518 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
519}
520#endif
521
522static void gen_add_A0_im(DisasContext *s, int val)
523{
524#ifdef TARGET_X86_64
525 if (CODE64(s))
526 gen_op_addq_A0_im(val);
527 else
528#endif
529 gen_op_addl_A0_im(val);
530}
531
532#ifndef VBOX
533static inline void gen_op_addl_T0_T1(void)
534#else /* VBOX */
535DECLINLINE(void) gen_op_addl_T0_T1(void)
536#endif /* VBOX */
537{
538 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
539}
540
541#ifndef VBOX
542static inline void gen_op_jmp_T0(void)
543#else /* VBOX */
544DECLINLINE(void) gen_op_jmp_T0(void)
545#endif /* VBOX */
546{
547 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
548}
549
550#ifndef VBOX
551static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
552#else /* VBOX */
553DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
554#endif /* VBOX */
555{
556 switch(size) {
557 case 0:
558 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
559 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
560 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
561 break;
562 case 1:
563 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
564 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
565#ifdef TARGET_X86_64
566 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
567#endif
568 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
569 break;
570#ifdef TARGET_X86_64
571 case 2:
572 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
573 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
574 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
575 break;
576#endif
577 }
578}
579
580#ifndef VBOX
581static inline void gen_op_add_reg_T0(int size, int reg)
582#else /* VBOX */
583DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
584#endif /* VBOX */
585{
586 switch(size) {
587 case 0:
588 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
589 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
590 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
591 break;
592 case 1:
593 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
594 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
595#ifdef TARGET_X86_64
596 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
597#endif
598 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
599 break;
600#ifdef TARGET_X86_64
601 case 2:
602 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
603 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
604 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
605 break;
606#endif
607 }
608}
609
610#ifndef VBOX
611static inline void gen_op_set_cc_op(int32_t val)
612#else /* VBOX */
613DECLINLINE(void) gen_op_set_cc_op(int32_t val)
614#endif /* VBOX */
615{
616 tcg_gen_movi_i32(cpu_cc_op, val);
617}
618
619#ifndef VBOX
620static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
621#else /* VBOX */
622DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
623#endif /* VBOX */
624{
625 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
626 if (shift != 0)
627 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
628 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
629#ifdef TARGET_X86_64
630 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
631#endif
632}
633
634#ifndef VBOX
635static inline void gen_op_movl_A0_seg(int reg)
636#else /* VBOX */
637DECLINLINE(void) gen_op_movl_A0_seg(int reg)
638#endif /* VBOX */
639{
640 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
641}
642
643#ifndef VBOX
644static inline void gen_op_addl_A0_seg(int reg)
645#else /* VBOX */
646DECLINLINE(void) gen_op_addl_A0_seg(int reg)
647#endif /* VBOX */
648{
649 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
650 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
651#ifdef TARGET_X86_64
652 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
653#endif
654}
655
656#ifdef TARGET_X86_64
657#ifndef VBOX
658static inline void gen_op_movq_A0_seg(int reg)
659#else /* VBOX */
660DECLINLINE(void) gen_op_movq_A0_seg(int reg)
661#endif /* VBOX */
662{
663 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
664}
665
666#ifndef VBOX
667static inline void gen_op_addq_A0_seg(int reg)
668#else /* VBOX */
669DECLINLINE(void) gen_op_addq_A0_seg(int reg)
670#endif /* VBOX */
671{
672 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
673 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
674}
675
676#ifndef VBOX
677static inline void gen_op_movq_A0_reg(int reg)
678#else /* VBOX */
679DECLINLINE(void) gen_op_movq_A0_reg(int reg)
680#endif /* VBOX */
681{
682 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
683}
684
685#ifndef VBOX
686static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
687#else /* VBOX */
688DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
689#endif /* VBOX */
690{
691 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
692 if (shift != 0)
693 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
694 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
695}
696#endif
697
698#ifndef VBOX
699static inline void gen_op_lds_T0_A0(int idx)
700#else /* VBOX */
701DECLINLINE(void) gen_op_lds_T0_A0(int idx)
702#endif /* VBOX */
703{
704 int mem_index = (idx >> 2) - 1;
705 switch(idx & 3) {
706 case 0:
707 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
708 break;
709 case 1:
710 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
711 break;
712 default:
713 case 2:
714 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
715 break;
716 }
717}
718
719#ifndef VBOX
720static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
721#else /* VBOX */
722DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
723#endif /* VBOX */
724{
725 int mem_index = (idx >> 2) - 1;
726 switch(idx & 3) {
727 case 0:
728 tcg_gen_qemu_ld8u(t0, a0, mem_index);
729 break;
730 case 1:
731 tcg_gen_qemu_ld16u(t0, a0, mem_index);
732 break;
733 case 2:
734 tcg_gen_qemu_ld32u(t0, a0, mem_index);
735 break;
736 default:
737 case 3:
738 tcg_gen_qemu_ld64(t0, a0, mem_index);
739 break;
740 }
741}
742
743/* XXX: always use ldu or lds */
744#ifndef VBOX
745static inline void gen_op_ld_T0_A0(int idx)
746#else /* VBOX */
747DECLINLINE(void) gen_op_ld_T0_A0(int idx)
748#endif /* VBOX */
749{
750 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
751}
752
753#ifndef VBOX
754static inline void gen_op_ldu_T0_A0(int idx)
755#else /* VBOX */
756DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
757#endif /* VBOX */
758{
759 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
760}
761
762#ifndef VBOX
763static inline void gen_op_ld_T1_A0(int idx)
764#else /* VBOX */
765DECLINLINE(void) gen_op_ld_T1_A0(int idx)
766#endif /* VBOX */
767{
768 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
769}
770
771#ifndef VBOX
772static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
773#else /* VBOX */
774DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
775#endif /* VBOX */
776{
777 int mem_index = (idx >> 2) - 1;
778 switch(idx & 3) {
779 case 0:
780 tcg_gen_qemu_st8(t0, a0, mem_index);
781 break;
782 case 1:
783 tcg_gen_qemu_st16(t0, a0, mem_index);
784 break;
785 case 2:
786 tcg_gen_qemu_st32(t0, a0, mem_index);
787 break;
788 default:
789 case 3:
790 tcg_gen_qemu_st64(t0, a0, mem_index);
791 break;
792 }
793}
794
795#ifndef VBOX
796static inline void gen_op_st_T0_A0(int idx)
797#else /* VBOX */
798DECLINLINE(void) gen_op_st_T0_A0(int idx)
799#endif /* VBOX */
800{
801 gen_op_st_v(idx, cpu_T[0], cpu_A0);
802}
803
804#ifndef VBOX
805static inline void gen_op_st_T1_A0(int idx)
806#else /* VBOX */
807DECLINLINE(void) gen_op_st_T1_A0(int idx)
808#endif /* VBOX */
809{
810 gen_op_st_v(idx, cpu_T[1], cpu_A0);
811}
812
813#ifndef VBOX
814static inline void gen_jmp_im(target_ulong pc)
815#else /* VBOX */
816DECLINLINE(void) gen_jmp_im(target_ulong pc)
817#endif /* VBOX */
818{
819#ifdef VBOX
820 gen_check_external_event();
821#endif /* VBOX */
822 tcg_gen_movi_tl(cpu_tmp0, pc);
823 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
824}
825
826#ifdef VBOX
827static void gen_check_external_event()
828{
829 /** @todo: this code is either wrong, or low performing,
830 rewrite flags check in TCG IR */
831 //tcg_gen_helper_0_0(helper_check_external_event);
832}
833
834#ifndef VBOX
835static inline void gen_update_eip(target_ulong pc)
836#else /* VBOX */
837DECLINLINE(void) gen_update_eip(target_ulong pc)
838#endif /* VBOX */
839{
840 gen_jmp_im(pc);
841
842}
843#endif
844
845#ifndef VBOX
846static inline void gen_string_movl_A0_ESI(DisasContext *s)
847#else /* VBOX */
848DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
849#endif /* VBOX */
850{
851 int override;
852
853 override = s->override;
854#ifdef TARGET_X86_64
855 if (s->aflag == 2) {
856 if (override >= 0) {
857 gen_op_movq_A0_seg(override);
858 gen_op_addq_A0_reg_sN(0, R_ESI);
859 } else {
860 gen_op_movq_A0_reg(R_ESI);
861 }
862 } else
863#endif
864 if (s->aflag) {
865 /* 32 bit address */
866 if (s->addseg && override < 0)
867 override = R_DS;
868 if (override >= 0) {
869 gen_op_movl_A0_seg(override);
870 gen_op_addl_A0_reg_sN(0, R_ESI);
871 } else {
872 gen_op_movl_A0_reg(R_ESI);
873 }
874 } else {
875 /* 16 address, always override */
876 if (override < 0)
877 override = R_DS;
878 gen_op_movl_A0_reg(R_ESI);
879 gen_op_andl_A0_ffff();
880 gen_op_addl_A0_seg(override);
881 }
882}
883
884#ifndef VBOX
885static inline void gen_string_movl_A0_EDI(DisasContext *s)
886#else /* VBOX */
887DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
888#endif /* VBOX */
889{
890#ifdef TARGET_X86_64
891 if (s->aflag == 2) {
892 gen_op_movq_A0_reg(R_EDI);
893 } else
894#endif
895 if (s->aflag) {
896 if (s->addseg) {
897 gen_op_movl_A0_seg(R_ES);
898 gen_op_addl_A0_reg_sN(0, R_EDI);
899 } else {
900 gen_op_movl_A0_reg(R_EDI);
901 }
902 } else {
903 gen_op_movl_A0_reg(R_EDI);
904 gen_op_andl_A0_ffff();
905 gen_op_addl_A0_seg(R_ES);
906 }
907}
908
909#ifndef VBOX
910static inline void gen_op_movl_T0_Dshift(int ot)
911#else /* VBOX */
912DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
913#endif /* VBOX */
914{
915 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
916 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
917};
918
919static void gen_extu(int ot, TCGv reg)
920{
921 switch(ot) {
922 case OT_BYTE:
923 tcg_gen_ext8u_tl(reg, reg);
924 break;
925 case OT_WORD:
926 tcg_gen_ext16u_tl(reg, reg);
927 break;
928 case OT_LONG:
929 tcg_gen_ext32u_tl(reg, reg);
930 break;
931 default:
932 break;
933 }
934}
935
936static void gen_exts(int ot, TCGv reg)
937{
938 switch(ot) {
939 case OT_BYTE:
940 tcg_gen_ext8s_tl(reg, reg);
941 break;
942 case OT_WORD:
943 tcg_gen_ext16s_tl(reg, reg);
944 break;
945 case OT_LONG:
946 tcg_gen_ext32s_tl(reg, reg);
947 break;
948 default:
949 break;
950 }
951}
952
953#ifndef VBOX
954static inline void gen_op_jnz_ecx(int size, int label1)
955#else /* VBOX */
956DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
957#endif /* VBOX */
958{
959 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
960 gen_extu(size + 1, cpu_tmp0);
961 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
962}
963
964#ifndef VBOX
965static inline void gen_op_jz_ecx(int size, int label1)
966#else /* VBOX */
967DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
968#endif /* VBOX */
969{
970 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
971 gen_extu(size + 1, cpu_tmp0);
972 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
973}
974
975static void *helper_in_func[3] = {
976 helper_inb,
977 helper_inw,
978 helper_inl,
979};
980
981static void *helper_out_func[3] = {
982 helper_outb,
983 helper_outw,
984 helper_outl,
985};
986
987static void *gen_check_io_func[3] = {
988 helper_check_iob,
989 helper_check_iow,
990 helper_check_iol,
991};
992
993static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
994 uint32_t svm_flags)
995{
996 int state_saved;
997 target_ulong next_eip;
998
999 state_saved = 0;
1000 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1001 if (s->cc_op != CC_OP_DYNAMIC)
1002 gen_op_set_cc_op(s->cc_op);
1003 gen_jmp_im(cur_eip);
1004 state_saved = 1;
1005 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1006 tcg_gen_helper_0_1(gen_check_io_func[ot],
1007 cpu_tmp2_i32);
1008 }
1009 if(s->flags & HF_SVMI_MASK) {
1010 if (!state_saved) {
1011 if (s->cc_op != CC_OP_DYNAMIC)
1012 gen_op_set_cc_op(s->cc_op);
1013 gen_jmp_im(cur_eip);
1014 state_saved = 1;
1015 }
1016 svm_flags |= (1 << (4 + ot));
1017 next_eip = s->pc - s->cs_base;
1018 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1019 tcg_gen_helper_0_3(helper_svm_check_io,
1020 cpu_tmp2_i32,
1021 tcg_const_i32(svm_flags),
1022 tcg_const_i32(next_eip - cur_eip));
1023 }
1024}
1025
1026#ifndef VBOX
1027static inline void gen_movs(DisasContext *s, int ot)
1028#else /* VBOX */
1029DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1030#endif /* VBOX */
1031{
1032 gen_string_movl_A0_ESI(s);
1033 gen_op_ld_T0_A0(ot + s->mem_index);
1034 gen_string_movl_A0_EDI(s);
1035 gen_op_st_T0_A0(ot + s->mem_index);
1036 gen_op_movl_T0_Dshift(ot);
1037 gen_op_add_reg_T0(s->aflag, R_ESI);
1038 gen_op_add_reg_T0(s->aflag, R_EDI);
1039}
1040
1041#ifndef VBOX
1042static inline void gen_update_cc_op(DisasContext *s)
1043#else /* VBOX */
1044DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1045#endif /* VBOX */
1046{
1047 if (s->cc_op != CC_OP_DYNAMIC) {
1048 gen_op_set_cc_op(s->cc_op);
1049 s->cc_op = CC_OP_DYNAMIC;
1050 }
1051}
1052
1053static void gen_op_update1_cc(void)
1054{
1055 tcg_gen_discard_tl(cpu_cc_src);
1056 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1057}
1058
1059static void gen_op_update2_cc(void)
1060{
1061 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1062 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1063}
1064
1065#ifndef VBOX
1066static inline void gen_op_cmpl_T0_T1_cc(void)
1067#else /* VBOX */
1068DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1069#endif /* VBOX */
1070{
1071 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1072 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1073}
1074
1075#ifndef VBOX
1076static inline void gen_op_testl_T0_T1_cc(void)
1077#else /* VBOX */
1078DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1079#endif /* VBOX */
1080{
1081 tcg_gen_discard_tl(cpu_cc_src);
1082 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1083}
1084
1085static void gen_op_update_neg_cc(void)
1086{
1087 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1088 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1089}
1090
1091/* compute eflags.C to reg */
1092static void gen_compute_eflags_c(TCGv reg)
1093{
1094#if TCG_TARGET_REG_BITS == 32
1095 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1096 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1097 (long)cc_table + offsetof(CCTable, compute_c));
1098 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1099 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1100 1, &cpu_tmp2_i32, 0, NULL);
1101#else
1102 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1103 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1104 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1105 (long)cc_table + offsetof(CCTable, compute_c));
1106 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1107 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1108 1, &cpu_tmp2_i32, 0, NULL);
1109#endif
1110 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1111}
1112
1113/* compute all eflags to cc_src */
1114static void gen_compute_eflags(TCGv reg)
1115{
1116#if TCG_TARGET_REG_BITS == 32
1117 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1118 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1119 (long)cc_table + offsetof(CCTable, compute_all));
1120 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1121 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1122 1, &cpu_tmp2_i32, 0, NULL);
1123#else
1124 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1125 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1126 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1127 (long)cc_table + offsetof(CCTable, compute_all));
1128 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1129 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1130 1, &cpu_tmp2_i32, 0, NULL);
1131#endif
1132 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1133}
1134
1135#ifndef VBOX
1136static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1137#else /* VBOX */
1138DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1139#endif /* VBOX */
1140{
1141 if (s->cc_op != CC_OP_DYNAMIC)
1142 gen_op_set_cc_op(s->cc_op);
1143 switch(jcc_op) {
1144 case JCC_O:
1145 gen_compute_eflags(cpu_T[0]);
1146 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1147 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1148 break;
1149 case JCC_B:
1150 gen_compute_eflags_c(cpu_T[0]);
1151 break;
1152 case JCC_Z:
1153 gen_compute_eflags(cpu_T[0]);
1154 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1155 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1156 break;
1157 case JCC_BE:
1158 gen_compute_eflags(cpu_tmp0);
1159 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1160 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1161 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1162 break;
1163 case JCC_S:
1164 gen_compute_eflags(cpu_T[0]);
1165 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1166 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1167 break;
1168 case JCC_P:
1169 gen_compute_eflags(cpu_T[0]);
1170 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1171 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1172 break;
1173 case JCC_L:
1174 gen_compute_eflags(cpu_tmp0);
1175 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1176 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1177 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1178 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1179 break;
1180 default:
1181 case JCC_LE:
1182 gen_compute_eflags(cpu_tmp0);
1183 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1184 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1185 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1186 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1187 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1188 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1189 break;
1190 }
1191}
1192
1193/* return true if setcc_slow is not needed (WARNING: must be kept in
1194 sync with gen_jcc1) */
1195static int is_fast_jcc_case(DisasContext *s, int b)
1196{
1197 int jcc_op;
1198 jcc_op = (b >> 1) & 7;
1199 switch(s->cc_op) {
1200 /* we optimize the cmp/jcc case */
1201 case CC_OP_SUBB:
1202 case CC_OP_SUBW:
1203 case CC_OP_SUBL:
1204 case CC_OP_SUBQ:
1205 if (jcc_op == JCC_O || jcc_op == JCC_P)
1206 goto slow_jcc;
1207 break;
1208
1209 /* some jumps are easy to compute */
1210 case CC_OP_ADDB:
1211 case CC_OP_ADDW:
1212 case CC_OP_ADDL:
1213 case CC_OP_ADDQ:
1214
1215 case CC_OP_LOGICB:
1216 case CC_OP_LOGICW:
1217 case CC_OP_LOGICL:
1218 case CC_OP_LOGICQ:
1219
1220 case CC_OP_INCB:
1221 case CC_OP_INCW:
1222 case CC_OP_INCL:
1223 case CC_OP_INCQ:
1224
1225 case CC_OP_DECB:
1226 case CC_OP_DECW:
1227 case CC_OP_DECL:
1228 case CC_OP_DECQ:
1229
1230 case CC_OP_SHLB:
1231 case CC_OP_SHLW:
1232 case CC_OP_SHLL:
1233 case CC_OP_SHLQ:
1234 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1235 goto slow_jcc;
1236 break;
1237 default:
1238 slow_jcc:
1239 return 0;
1240 }
1241 return 1;
1242}
1243
1244/* generate a conditional jump to label 'l1' according to jump opcode
1245 value 'b'. In the fast case, T0 is guaranted not to be used. */
1246#ifndef VBOX
1247static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1248#else /* VBOX */
1249DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1250#endif /* VBOX */
1251{
1252 int inv, jcc_op, size, cond;
1253 TCGv t0;
1254
1255 inv = b & 1;
1256 jcc_op = (b >> 1) & 7;
1257
1258 switch(cc_op) {
1259 /* we optimize the cmp/jcc case */
1260 case CC_OP_SUBB:
1261 case CC_OP_SUBW:
1262 case CC_OP_SUBL:
1263 case CC_OP_SUBQ:
1264
1265 size = cc_op - CC_OP_SUBB;
1266 switch(jcc_op) {
1267 case JCC_Z:
1268 fast_jcc_z:
1269 switch(size) {
1270 case 0:
1271 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1272 t0 = cpu_tmp0;
1273 break;
1274 case 1:
1275 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1276 t0 = cpu_tmp0;
1277 break;
1278#ifdef TARGET_X86_64
1279 case 2:
1280 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1281 t0 = cpu_tmp0;
1282 break;
1283#endif
1284 default:
1285 t0 = cpu_cc_dst;
1286 break;
1287 }
1288 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1289 break;
1290 case JCC_S:
1291 fast_jcc_s:
1292 switch(size) {
1293 case 0:
1294 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1295 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1296 0, l1);
1297 break;
1298 case 1:
1299 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1300 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1301 0, l1);
1302 break;
1303#ifdef TARGET_X86_64
1304 case 2:
1305 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1306 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1307 0, l1);
1308 break;
1309#endif
1310 default:
1311 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1312 0, l1);
1313 break;
1314 }
1315 break;
1316
1317 case JCC_B:
1318 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1319 goto fast_jcc_b;
1320 case JCC_BE:
1321 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1322 fast_jcc_b:
1323 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1324 switch(size) {
1325 case 0:
1326 t0 = cpu_tmp0;
1327 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1328 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1329 break;
1330 case 1:
1331 t0 = cpu_tmp0;
1332 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1333 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1334 break;
1335#ifdef TARGET_X86_64
1336 case 2:
1337 t0 = cpu_tmp0;
1338 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1339 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1340 break;
1341#endif
1342 default:
1343 t0 = cpu_cc_src;
1344 break;
1345 }
1346 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1347 break;
1348
1349 case JCC_L:
1350 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1351 goto fast_jcc_l;
1352 case JCC_LE:
1353 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1354 fast_jcc_l:
1355 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1356 switch(size) {
1357 case 0:
1358 t0 = cpu_tmp0;
1359 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1360 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1361 break;
1362 case 1:
1363 t0 = cpu_tmp0;
1364 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1365 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1366 break;
1367#ifdef TARGET_X86_64
1368 case 2:
1369 t0 = cpu_tmp0;
1370 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1371 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1372 break;
1373#endif
1374 default:
1375 t0 = cpu_cc_src;
1376 break;
1377 }
1378 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1379 break;
1380
1381 default:
1382 goto slow_jcc;
1383 }
1384 break;
1385
1386 /* some jumps are easy to compute */
1387 case CC_OP_ADDB:
1388 case CC_OP_ADDW:
1389 case CC_OP_ADDL:
1390 case CC_OP_ADDQ:
1391
1392 case CC_OP_ADCB:
1393 case CC_OP_ADCW:
1394 case CC_OP_ADCL:
1395 case CC_OP_ADCQ:
1396
1397 case CC_OP_SBBB:
1398 case CC_OP_SBBW:
1399 case CC_OP_SBBL:
1400 case CC_OP_SBBQ:
1401
1402 case CC_OP_LOGICB:
1403 case CC_OP_LOGICW:
1404 case CC_OP_LOGICL:
1405 case CC_OP_LOGICQ:
1406
1407 case CC_OP_INCB:
1408 case CC_OP_INCW:
1409 case CC_OP_INCL:
1410 case CC_OP_INCQ:
1411
1412 case CC_OP_DECB:
1413 case CC_OP_DECW:
1414 case CC_OP_DECL:
1415 case CC_OP_DECQ:
1416
1417 case CC_OP_SHLB:
1418 case CC_OP_SHLW:
1419 case CC_OP_SHLL:
1420 case CC_OP_SHLQ:
1421
1422 case CC_OP_SARB:
1423 case CC_OP_SARW:
1424 case CC_OP_SARL:
1425 case CC_OP_SARQ:
1426 switch(jcc_op) {
1427 case JCC_Z:
1428 size = (cc_op - CC_OP_ADDB) & 3;
1429 goto fast_jcc_z;
1430 case JCC_S:
1431 size = (cc_op - CC_OP_ADDB) & 3;
1432 goto fast_jcc_s;
1433 default:
1434 goto slow_jcc;
1435 }
1436 break;
1437 default:
1438 slow_jcc:
1439 gen_setcc_slow_T0(s, jcc_op);
1440 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1441 cpu_T[0], 0, l1);
1442 break;
1443 }
1444}
1445
1446/* XXX: does not work with gdbstub "ice" single step - not a
1447 serious problem */
1448static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1449{
1450 int l1, l2;
1451
1452 l1 = gen_new_label();
1453 l2 = gen_new_label();
1454 gen_op_jnz_ecx(s->aflag, l1);
1455 gen_set_label(l2);
1456 gen_jmp_tb(s, next_eip, 1);
1457 gen_set_label(l1);
1458 return l2;
1459}
1460
1461#ifndef VBOX
1462static inline void gen_stos(DisasContext *s, int ot)
1463#else /* VBOX */
1464DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1465#endif /* VBOX */
1466{
1467 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1468 gen_string_movl_A0_EDI(s);
1469 gen_op_st_T0_A0(ot + s->mem_index);
1470 gen_op_movl_T0_Dshift(ot);
1471 gen_op_add_reg_T0(s->aflag, R_EDI);
1472}
1473
1474#ifndef VBOX
1475static inline void gen_lods(DisasContext *s, int ot)
1476#else /* VBOX */
1477DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1478#endif /* VBOX */
1479{
1480 gen_string_movl_A0_ESI(s);
1481 gen_op_ld_T0_A0(ot + s->mem_index);
1482 gen_op_mov_reg_T0(ot, R_EAX);
1483 gen_op_movl_T0_Dshift(ot);
1484 gen_op_add_reg_T0(s->aflag, R_ESI);
1485}
1486
1487#ifndef VBOX
1488static inline void gen_scas(DisasContext *s, int ot)
1489#else /* VBOX */
1490DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1491#endif /* VBOX */
1492{
1493 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1494 gen_string_movl_A0_EDI(s);
1495 gen_op_ld_T1_A0(ot + s->mem_index);
1496 gen_op_cmpl_T0_T1_cc();
1497 gen_op_movl_T0_Dshift(ot);
1498 gen_op_add_reg_T0(s->aflag, R_EDI);
1499}
1500
1501#ifndef VBOX
1502static inline void gen_cmps(DisasContext *s, int ot)
1503#else /* VBOX */
1504DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1505#endif /* VBOX */
1506{
1507 gen_string_movl_A0_ESI(s);
1508 gen_op_ld_T0_A0(ot + s->mem_index);
1509 gen_string_movl_A0_EDI(s);
1510 gen_op_ld_T1_A0(ot + s->mem_index);
1511 gen_op_cmpl_T0_T1_cc();
1512 gen_op_movl_T0_Dshift(ot);
1513 gen_op_add_reg_T0(s->aflag, R_ESI);
1514 gen_op_add_reg_T0(s->aflag, R_EDI);
1515}
1516
1517#ifndef VBOX
1518static inline void gen_ins(DisasContext *s, int ot)
1519#else /* VBOX */
1520DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1521#endif /* VBOX */
1522{
1523 if (use_icount)
1524 gen_io_start();
1525 gen_string_movl_A0_EDI(s);
1526 /* Note: we must do this dummy write first to be restartable in
1527 case of page fault. */
1528 gen_op_movl_T0_0();
1529 gen_op_st_T0_A0(ot + s->mem_index);
1530 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1531 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1532 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1533 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1534 gen_op_st_T0_A0(ot + s->mem_index);
1535 gen_op_movl_T0_Dshift(ot);
1536 gen_op_add_reg_T0(s->aflag, R_EDI);
1537 if (use_icount)
1538 gen_io_end();
1539}
1540
1541#ifndef VBOX
1542static inline void gen_outs(DisasContext *s, int ot)
1543#else /* VBOX */
1544DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1545#endif /* VBOX */
1546{
1547 if (use_icount)
1548 gen_io_start();
1549 gen_string_movl_A0_ESI(s);
1550 gen_op_ld_T0_A0(ot + s->mem_index);
1551
1552 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1553 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1554 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1555 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1556 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1557
1558 gen_op_movl_T0_Dshift(ot);
1559 gen_op_add_reg_T0(s->aflag, R_ESI);
1560 if (use_icount)
1561 gen_io_end();
1562}
1563
1564/* same method as Valgrind : we generate jumps to current or next
1565 instruction */
1566#ifndef VBOX
1567#define GEN_REPZ(op) \
1568static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1569 target_ulong cur_eip, target_ulong next_eip) \
1570{ \
1571 int l2; \
1572 gen_update_cc_op(s); \
1573 l2 = gen_jz_ecx_string(s, next_eip); \
1574 gen_ ## op(s, ot); \
1575 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1576 /* a loop would cause two single step exceptions if ECX = 1 \
1577 before rep string_insn */ \
1578 if (!s->jmp_opt) \
1579 gen_op_jz_ecx(s->aflag, l2); \
1580 gen_jmp(s, cur_eip); \
1581}
1582#else /* VBOX */
1583#define GEN_REPZ(op) \
1584DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1585 target_ulong cur_eip, target_ulong next_eip) \
1586{ \
1587 int l2; \
1588 gen_update_cc_op(s); \
1589 l2 = gen_jz_ecx_string(s, next_eip); \
1590 gen_ ## op(s, ot); \
1591 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1592 /* a loop would cause two single step exceptions if ECX = 1 \
1593 before rep string_insn */ \
1594 if (!s->jmp_opt) \
1595 gen_op_jz_ecx(s->aflag, l2); \
1596 gen_jmp(s, cur_eip); \
1597}
1598#endif /* VBOX */
1599
1600#ifndef VBOX
1601#define GEN_REPZ2(op) \
1602static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1603 target_ulong cur_eip, \
1604 target_ulong next_eip, \
1605 int nz) \
1606{ \
1607 int l2; \
1608 gen_update_cc_op(s); \
1609 l2 = gen_jz_ecx_string(s, next_eip); \
1610 gen_ ## op(s, ot); \
1611 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1612 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1613 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1614 if (!s->jmp_opt) \
1615 gen_op_jz_ecx(s->aflag, l2); \
1616 gen_jmp(s, cur_eip); \
1617}
1618#else /* VBOX */
1619#define GEN_REPZ2(op) \
1620DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1621 target_ulong cur_eip, \
1622 target_ulong next_eip, \
1623 int nz) \
1624{ \
1625 int l2;\
1626 gen_update_cc_op(s); \
1627 l2 = gen_jz_ecx_string(s, next_eip); \
1628 gen_ ## op(s, ot); \
1629 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1630 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1631 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1632 if (!s->jmp_opt) \
1633 gen_op_jz_ecx(s->aflag, l2); \
1634 gen_jmp(s, cur_eip); \
1635}
1636#endif /* VBOX */
1637
1638GEN_REPZ(movs)
1639GEN_REPZ(stos)
1640GEN_REPZ(lods)
1641GEN_REPZ(ins)
1642GEN_REPZ(outs)
1643GEN_REPZ2(scas)
1644GEN_REPZ2(cmps)
1645
1646static void *helper_fp_arith_ST0_FT0[8] = {
1647 helper_fadd_ST0_FT0,
1648 helper_fmul_ST0_FT0,
1649 helper_fcom_ST0_FT0,
1650 helper_fcom_ST0_FT0,
1651 helper_fsub_ST0_FT0,
1652 helper_fsubr_ST0_FT0,
1653 helper_fdiv_ST0_FT0,
1654 helper_fdivr_ST0_FT0,
1655};
1656
1657/* NOTE the exception in "r" op ordering */
1658static void *helper_fp_arith_STN_ST0[8] = {
1659 helper_fadd_STN_ST0,
1660 helper_fmul_STN_ST0,
1661 NULL,
1662 NULL,
1663 helper_fsubr_STN_ST0,
1664 helper_fsub_STN_ST0,
1665 helper_fdivr_STN_ST0,
1666 helper_fdiv_STN_ST0,
1667};
1668
1669/* if d == OR_TMP0, it means memory operand (address in A0) */
1670static void gen_op(DisasContext *s1, int op, int ot, int d)
1671{
1672 if (d != OR_TMP0) {
1673 gen_op_mov_TN_reg(ot, 0, d);
1674 } else {
1675 gen_op_ld_T0_A0(ot + s1->mem_index);
1676 }
1677 switch(op) {
1678 case OP_ADCL:
1679 if (s1->cc_op != CC_OP_DYNAMIC)
1680 gen_op_set_cc_op(s1->cc_op);
1681 gen_compute_eflags_c(cpu_tmp4);
1682 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1683 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1684 if (d != OR_TMP0)
1685 gen_op_mov_reg_T0(ot, d);
1686 else
1687 gen_op_st_T0_A0(ot + s1->mem_index);
1688 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1689 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1690 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1691 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1692 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1693 s1->cc_op = CC_OP_DYNAMIC;
1694 break;
1695 case OP_SBBL:
1696 if (s1->cc_op != CC_OP_DYNAMIC)
1697 gen_op_set_cc_op(s1->cc_op);
1698 gen_compute_eflags_c(cpu_tmp4);
1699 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1700 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1701 if (d != OR_TMP0)
1702 gen_op_mov_reg_T0(ot, d);
1703 else
1704 gen_op_st_T0_A0(ot + s1->mem_index);
1705 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1706 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1707 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1708 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1709 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1710 s1->cc_op = CC_OP_DYNAMIC;
1711 break;
1712 case OP_ADDL:
1713 gen_op_addl_T0_T1();
1714 if (d != OR_TMP0)
1715 gen_op_mov_reg_T0(ot, d);
1716 else
1717 gen_op_st_T0_A0(ot + s1->mem_index);
1718 gen_op_update2_cc();
1719 s1->cc_op = CC_OP_ADDB + ot;
1720 break;
1721 case OP_SUBL:
1722 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1723 if (d != OR_TMP0)
1724 gen_op_mov_reg_T0(ot, d);
1725 else
1726 gen_op_st_T0_A0(ot + s1->mem_index);
1727 gen_op_update2_cc();
1728 s1->cc_op = CC_OP_SUBB + ot;
1729 break;
1730 default:
1731 case OP_ANDL:
1732 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1733 if (d != OR_TMP0)
1734 gen_op_mov_reg_T0(ot, d);
1735 else
1736 gen_op_st_T0_A0(ot + s1->mem_index);
1737 gen_op_update1_cc();
1738 s1->cc_op = CC_OP_LOGICB + ot;
1739 break;
1740 case OP_ORL:
1741 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1742 if (d != OR_TMP0)
1743 gen_op_mov_reg_T0(ot, d);
1744 else
1745 gen_op_st_T0_A0(ot + s1->mem_index);
1746 gen_op_update1_cc();
1747 s1->cc_op = CC_OP_LOGICB + ot;
1748 break;
1749 case OP_XORL:
1750 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1751 if (d != OR_TMP0)
1752 gen_op_mov_reg_T0(ot, d);
1753 else
1754 gen_op_st_T0_A0(ot + s1->mem_index);
1755 gen_op_update1_cc();
1756 s1->cc_op = CC_OP_LOGICB + ot;
1757 break;
1758 case OP_CMPL:
1759 gen_op_cmpl_T0_T1_cc();
1760 s1->cc_op = CC_OP_SUBB + ot;
1761 break;
1762 }
1763}
1764
1765/* if d == OR_TMP0, it means memory operand (address in A0) */
1766static void gen_inc(DisasContext *s1, int ot, int d, int c)
1767{
1768 if (d != OR_TMP0)
1769 gen_op_mov_TN_reg(ot, 0, d);
1770 else
1771 gen_op_ld_T0_A0(ot + s1->mem_index);
1772 if (s1->cc_op != CC_OP_DYNAMIC)
1773 gen_op_set_cc_op(s1->cc_op);
1774 if (c > 0) {
1775 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1776 s1->cc_op = CC_OP_INCB + ot;
1777 } else {
1778 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1779 s1->cc_op = CC_OP_DECB + ot;
1780 }
1781 if (d != OR_TMP0)
1782 gen_op_mov_reg_T0(ot, d);
1783 else
1784 gen_op_st_T0_A0(ot + s1->mem_index);
1785 gen_compute_eflags_c(cpu_cc_src);
1786 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1787}
1788
1789static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1790 int is_right, int is_arith)
1791{
1792 target_ulong mask;
1793 int shift_label;
1794 TCGv t0, t1;
1795
1796 if (ot == OT_QUAD)
1797 mask = 0x3f;
1798 else
1799 mask = 0x1f;
1800
1801 /* load */
1802 if (op1 == OR_TMP0)
1803 gen_op_ld_T0_A0(ot + s->mem_index);
1804 else
1805 gen_op_mov_TN_reg(ot, 0, op1);
1806
1807 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1808
1809 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1810
1811 if (is_right) {
1812 if (is_arith) {
1813 gen_exts(ot, cpu_T[0]);
1814 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1815 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1816 } else {
1817 gen_extu(ot, cpu_T[0]);
1818 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1819 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1820 }
1821 } else {
1822 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1823 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1824 }
1825
1826 /* store */
1827 if (op1 == OR_TMP0)
1828 gen_op_st_T0_A0(ot + s->mem_index);
1829 else
1830 gen_op_mov_reg_T0(ot, op1);
1831
1832 /* update eflags if non zero shift */
1833 if (s->cc_op != CC_OP_DYNAMIC)
1834 gen_op_set_cc_op(s->cc_op);
1835
1836 /* XXX: inefficient */
1837 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1838 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1839
1840 tcg_gen_mov_tl(t0, cpu_T[0]);
1841 tcg_gen_mov_tl(t1, cpu_T3);
1842
1843 shift_label = gen_new_label();
1844 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1845
1846 tcg_gen_mov_tl(cpu_cc_src, t1);
1847 tcg_gen_mov_tl(cpu_cc_dst, t0);
1848 if (is_right)
1849 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1850 else
1851 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1852
1853 gen_set_label(shift_label);
1854 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1855
1856 tcg_temp_free(t0);
1857 tcg_temp_free(t1);
1858}
1859
1860static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1861 int is_right, int is_arith)
1862{
1863 int mask;
1864
1865 if (ot == OT_QUAD)
1866 mask = 0x3f;
1867 else
1868 mask = 0x1f;
1869
1870 /* load */
1871 if (op1 == OR_TMP0)
1872 gen_op_ld_T0_A0(ot + s->mem_index);
1873 else
1874 gen_op_mov_TN_reg(ot, 0, op1);
1875
1876 op2 &= mask;
1877 if (op2 != 0) {
1878 if (is_right) {
1879 if (is_arith) {
1880 gen_exts(ot, cpu_T[0]);
1881 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1882 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1883 } else {
1884 gen_extu(ot, cpu_T[0]);
1885 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1886 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1887 }
1888 } else {
1889 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1890 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1891 }
1892 }
1893
1894 /* store */
1895 if (op1 == OR_TMP0)
1896 gen_op_st_T0_A0(ot + s->mem_index);
1897 else
1898 gen_op_mov_reg_T0(ot, op1);
1899
1900 /* update eflags if non zero shift */
1901 if (op2 != 0) {
1902 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1903 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1904 if (is_right)
1905 s->cc_op = CC_OP_SARB + ot;
1906 else
1907 s->cc_op = CC_OP_SHLB + ot;
1908 }
1909}
1910
1911#ifndef VBOX
1912static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1913#else /* VBOX */
1914DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1915#endif /* VBOX */
1916{
1917 if (arg2 >= 0)
1918 tcg_gen_shli_tl(ret, arg1, arg2);
1919 else
1920 tcg_gen_shri_tl(ret, arg1, -arg2);
1921}
1922
1923/* XXX: add faster immediate case */
1924static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1925 int is_right)
1926{
1927 target_ulong mask;
1928 int label1, label2, data_bits;
1929 TCGv t0, t1, t2, a0;
1930
1931 /* XXX: inefficient, but we must use local temps */
1932 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1933 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1934 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1935 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1936
1937 if (ot == OT_QUAD)
1938 mask = 0x3f;
1939 else
1940 mask = 0x1f;
1941
1942 /* load */
1943 if (op1 == OR_TMP0) {
1944 tcg_gen_mov_tl(a0, cpu_A0);
1945 gen_op_ld_v(ot + s->mem_index, t0, a0);
1946 } else {
1947 gen_op_mov_v_reg(ot, t0, op1);
1948 }
1949
1950 tcg_gen_mov_tl(t1, cpu_T[1]);
1951
1952 tcg_gen_andi_tl(t1, t1, mask);
1953
1954 /* Must test zero case to avoid using undefined behaviour in TCG
1955 shifts. */
1956 label1 = gen_new_label();
1957 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1958
1959 if (ot <= OT_WORD)
1960 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1961 else
1962 tcg_gen_mov_tl(cpu_tmp0, t1);
1963
1964 gen_extu(ot, t0);
1965 tcg_gen_mov_tl(t2, t0);
1966
1967 data_bits = 8 << ot;
1968 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1969 fix TCG definition) */
1970 if (is_right) {
1971 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1972 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1973 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1974 } else {
1975 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1976 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1977 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1978 }
1979 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1980
1981 gen_set_label(label1);
1982 /* store */
1983 if (op1 == OR_TMP0) {
1984 gen_op_st_v(ot + s->mem_index, t0, a0);
1985 } else {
1986 gen_op_mov_reg_v(ot, op1, t0);
1987 }
1988
1989 /* update eflags */
1990 if (s->cc_op != CC_OP_DYNAMIC)
1991 gen_op_set_cc_op(s->cc_op);
1992
1993 label2 = gen_new_label();
1994 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1995
1996 gen_compute_eflags(cpu_cc_src);
1997 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1998 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1999 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2000 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2001 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2002 if (is_right) {
2003 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2004 }
2005 tcg_gen_andi_tl(t0, t0, CC_C);
2006 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2007
2008 tcg_gen_discard_tl(cpu_cc_dst);
2009 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2010
2011 gen_set_label(label2);
2012 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2013
2014 tcg_temp_free(t0);
2015 tcg_temp_free(t1);
2016 tcg_temp_free(t2);
2017 tcg_temp_free(a0);
2018}
2019
2020static void *helper_rotc[8] = {
2021 helper_rclb,
2022 helper_rclw,
2023 helper_rcll,
2024 X86_64_ONLY(helper_rclq),
2025 helper_rcrb,
2026 helper_rcrw,
2027 helper_rcrl,
2028 X86_64_ONLY(helper_rcrq),
2029};
2030
2031/* XXX: add faster immediate = 1 case */
2032static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2033 int is_right)
2034{
2035 int label1;
2036
2037 if (s->cc_op != CC_OP_DYNAMIC)
2038 gen_op_set_cc_op(s->cc_op);
2039
2040 /* load */
2041 if (op1 == OR_TMP0)
2042 gen_op_ld_T0_A0(ot + s->mem_index);
2043 else
2044 gen_op_mov_TN_reg(ot, 0, op1);
2045
2046 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2047 cpu_T[0], cpu_T[0], cpu_T[1]);
2048 /* store */
2049 if (op1 == OR_TMP0)
2050 gen_op_st_T0_A0(ot + s->mem_index);
2051 else
2052 gen_op_mov_reg_T0(ot, op1);
2053
2054 /* update eflags */
2055 label1 = gen_new_label();
2056 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2057
2058 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2059 tcg_gen_discard_tl(cpu_cc_dst);
2060 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2061
2062 gen_set_label(label1);
2063 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2064}
2065
2066/* XXX: add faster immediate case */
2067static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2068 int is_right)
2069{
2070 int label1, label2, data_bits;
2071 target_ulong mask;
2072 TCGv t0, t1, t2, a0;
2073
2074 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2075 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2076 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2077 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2078
2079 if (ot == OT_QUAD)
2080 mask = 0x3f;
2081 else
2082 mask = 0x1f;
2083
2084 /* load */
2085 if (op1 == OR_TMP0) {
2086 tcg_gen_mov_tl(a0, cpu_A0);
2087 gen_op_ld_v(ot + s->mem_index, t0, a0);
2088 } else {
2089 gen_op_mov_v_reg(ot, t0, op1);
2090 }
2091
2092 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2093
2094 tcg_gen_mov_tl(t1, cpu_T[1]);
2095 tcg_gen_mov_tl(t2, cpu_T3);
2096
2097 /* Must test zero case to avoid using undefined behaviour in TCG
2098 shifts. */
2099 label1 = gen_new_label();
2100 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2101
2102 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2103 if (ot == OT_WORD) {
2104 /* Note: we implement the Intel behaviour for shift count > 16 */
2105 if (is_right) {
2106 tcg_gen_andi_tl(t0, t0, 0xffff);
2107 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2108 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2109 tcg_gen_ext32u_tl(t0, t0);
2110
2111 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2112
2113 /* only needed if count > 16, but a test would complicate */
2114 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2115 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2116
2117 tcg_gen_shr_tl(t0, t0, t2);
2118
2119 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2120 } else {
2121 /* XXX: not optimal */
2122 tcg_gen_andi_tl(t0, t0, 0xffff);
2123 tcg_gen_shli_tl(t1, t1, 16);
2124 tcg_gen_or_tl(t1, t1, t0);
2125 tcg_gen_ext32u_tl(t1, t1);
2126
2127 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2128 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2129 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2130 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2131
2132 tcg_gen_shl_tl(t0, t0, t2);
2133 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2134 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2135 tcg_gen_or_tl(t0, t0, t1);
2136 }
2137 } else {
2138 data_bits = 8 << ot;
2139 if (is_right) {
2140 if (ot == OT_LONG)
2141 tcg_gen_ext32u_tl(t0, t0);
2142
2143 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2144
2145 tcg_gen_shr_tl(t0, t0, t2);
2146 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2147 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2148 tcg_gen_or_tl(t0, t0, t1);
2149
2150 } else {
2151 if (ot == OT_LONG)
2152 tcg_gen_ext32u_tl(t1, t1);
2153
2154 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2155
2156 tcg_gen_shl_tl(t0, t0, t2);
2157 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2158 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2159 tcg_gen_or_tl(t0, t0, t1);
2160 }
2161 }
2162 tcg_gen_mov_tl(t1, cpu_tmp4);
2163
2164 gen_set_label(label1);
2165 /* store */
2166 if (op1 == OR_TMP0) {
2167 gen_op_st_v(ot + s->mem_index, t0, a0);
2168 } else {
2169 gen_op_mov_reg_v(ot, op1, t0);
2170 }
2171
2172 /* update eflags */
2173 if (s->cc_op != CC_OP_DYNAMIC)
2174 gen_op_set_cc_op(s->cc_op);
2175
2176 label2 = gen_new_label();
2177 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2178
2179 tcg_gen_mov_tl(cpu_cc_src, t1);
2180 tcg_gen_mov_tl(cpu_cc_dst, t0);
2181 if (is_right) {
2182 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2183 } else {
2184 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2185 }
2186 gen_set_label(label2);
2187 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2188
2189 tcg_temp_free(t0);
2190 tcg_temp_free(t1);
2191 tcg_temp_free(t2);
2192 tcg_temp_free(a0);
2193}
2194
2195static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2196{
2197 if (s != OR_TMP1)
2198 gen_op_mov_TN_reg(ot, 1, s);
2199 switch(op) {
2200 case OP_ROL:
2201 gen_rot_rm_T1(s1, ot, d, 0);
2202 break;
2203 case OP_ROR:
2204 gen_rot_rm_T1(s1, ot, d, 1);
2205 break;
2206 case OP_SHL:
2207 case OP_SHL1:
2208 gen_shift_rm_T1(s1, ot, d, 0, 0);
2209 break;
2210 case OP_SHR:
2211 gen_shift_rm_T1(s1, ot, d, 1, 0);
2212 break;
2213 case OP_SAR:
2214 gen_shift_rm_T1(s1, ot, d, 1, 1);
2215 break;
2216 case OP_RCL:
2217 gen_rotc_rm_T1(s1, ot, d, 0);
2218 break;
2219 case OP_RCR:
2220 gen_rotc_rm_T1(s1, ot, d, 1);
2221 break;
2222 }
2223}
2224
2225static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2226{
2227 switch(op) {
2228 case OP_SHL:
2229 case OP_SHL1:
2230 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2231 break;
2232 case OP_SHR:
2233 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2234 break;
2235 case OP_SAR:
2236 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2237 break;
2238 default:
2239 /* currently not optimized */
2240 gen_op_movl_T1_im(c);
2241 gen_shift(s1, op, ot, d, OR_TMP1);
2242 break;
2243 }
2244}
2245
2246static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2247{
2248 target_long disp;
2249 int havesib;
2250 int base;
2251 int index;
2252 int scale;
2253 int opreg;
2254 int mod, rm, code, override, must_add_seg;
2255
2256 override = s->override;
2257 must_add_seg = s->addseg;
2258 if (override >= 0)
2259 must_add_seg = 1;
2260 mod = (modrm >> 6) & 3;
2261 rm = modrm & 7;
2262
2263 if (s->aflag) {
2264
2265 havesib = 0;
2266 base = rm;
2267 index = 0;
2268 scale = 0;
2269
2270 if (base == 4) {
2271 havesib = 1;
2272 code = ldub_code(s->pc++);
2273 scale = (code >> 6) & 3;
2274 index = ((code >> 3) & 7) | REX_X(s);
2275 base = (code & 7);
2276 }
2277 base |= REX_B(s);
2278
2279 switch (mod) {
2280 case 0:
2281 if ((base & 7) == 5) {
2282 base = -1;
2283 disp = (int32_t)ldl_code(s->pc);
2284 s->pc += 4;
2285 if (CODE64(s) && !havesib) {
2286 disp += s->pc + s->rip_offset;
2287 }
2288 } else {
2289 disp = 0;
2290 }
2291 break;
2292 case 1:
2293 disp = (int8_t)ldub_code(s->pc++);
2294 break;
2295 default:
2296 case 2:
2297 disp = ldl_code(s->pc);
2298 s->pc += 4;
2299 break;
2300 }
2301
2302 if (base >= 0) {
2303 /* for correct popl handling with esp */
2304 if (base == 4 && s->popl_esp_hack)
2305 disp += s->popl_esp_hack;
2306#ifdef TARGET_X86_64
2307 if (s->aflag == 2) {
2308 gen_op_movq_A0_reg(base);
2309 if (disp != 0) {
2310 gen_op_addq_A0_im(disp);
2311 }
2312 } else
2313#endif
2314 {
2315 gen_op_movl_A0_reg(base);
2316 if (disp != 0)
2317 gen_op_addl_A0_im(disp);
2318 }
2319 } else {
2320#ifdef TARGET_X86_64
2321 if (s->aflag == 2) {
2322 gen_op_movq_A0_im(disp);
2323 } else
2324#endif
2325 {
2326 gen_op_movl_A0_im(disp);
2327 }
2328 }
2329 /* XXX: index == 4 is always invalid */
2330 if (havesib && (index != 4 || scale != 0)) {
2331#ifdef TARGET_X86_64
2332 if (s->aflag == 2) {
2333 gen_op_addq_A0_reg_sN(scale, index);
2334 } else
2335#endif
2336 {
2337 gen_op_addl_A0_reg_sN(scale, index);
2338 }
2339 }
2340 if (must_add_seg) {
2341 if (override < 0) {
2342 if (base == R_EBP || base == R_ESP)
2343 override = R_SS;
2344 else
2345 override = R_DS;
2346 }
2347#ifdef TARGET_X86_64
2348 if (s->aflag == 2) {
2349 gen_op_addq_A0_seg(override);
2350 } else
2351#endif
2352 {
2353 gen_op_addl_A0_seg(override);
2354 }
2355 }
2356 } else {
2357 switch (mod) {
2358 case 0:
2359 if (rm == 6) {
2360 disp = lduw_code(s->pc);
2361 s->pc += 2;
2362 gen_op_movl_A0_im(disp);
2363 rm = 0; /* avoid SS override */
2364 goto no_rm;
2365 } else {
2366 disp = 0;
2367 }
2368 break;
2369 case 1:
2370 disp = (int8_t)ldub_code(s->pc++);
2371 break;
2372 default:
2373 case 2:
2374 disp = lduw_code(s->pc);
2375 s->pc += 2;
2376 break;
2377 }
2378 switch(rm) {
2379 case 0:
2380 gen_op_movl_A0_reg(R_EBX);
2381 gen_op_addl_A0_reg_sN(0, R_ESI);
2382 break;
2383 case 1:
2384 gen_op_movl_A0_reg(R_EBX);
2385 gen_op_addl_A0_reg_sN(0, R_EDI);
2386 break;
2387 case 2:
2388 gen_op_movl_A0_reg(R_EBP);
2389 gen_op_addl_A0_reg_sN(0, R_ESI);
2390 break;
2391 case 3:
2392 gen_op_movl_A0_reg(R_EBP);
2393 gen_op_addl_A0_reg_sN(0, R_EDI);
2394 break;
2395 case 4:
2396 gen_op_movl_A0_reg(R_ESI);
2397 break;
2398 case 5:
2399 gen_op_movl_A0_reg(R_EDI);
2400 break;
2401 case 6:
2402 gen_op_movl_A0_reg(R_EBP);
2403 break;
2404 default:
2405 case 7:
2406 gen_op_movl_A0_reg(R_EBX);
2407 break;
2408 }
2409 if (disp != 0)
2410 gen_op_addl_A0_im(disp);
2411 gen_op_andl_A0_ffff();
2412 no_rm:
2413 if (must_add_seg) {
2414 if (override < 0) {
2415 if (rm == 2 || rm == 3 || rm == 6)
2416 override = R_SS;
2417 else
2418 override = R_DS;
2419 }
2420 gen_op_addl_A0_seg(override);
2421 }
2422 }
2423
2424 opreg = OR_A0;
2425 disp = 0;
2426 *reg_ptr = opreg;
2427 *offset_ptr = disp;
2428}
2429
2430static void gen_nop_modrm(DisasContext *s, int modrm)
2431{
2432 int mod, rm, base, code;
2433
2434 mod = (modrm >> 6) & 3;
2435 if (mod == 3)
2436 return;
2437 rm = modrm & 7;
2438
2439 if (s->aflag) {
2440
2441 base = rm;
2442
2443 if (base == 4) {
2444 code = ldub_code(s->pc++);
2445 base = (code & 7);
2446 }
2447
2448 switch (mod) {
2449 case 0:
2450 if (base == 5) {
2451 s->pc += 4;
2452 }
2453 break;
2454 case 1:
2455 s->pc++;
2456 break;
2457 default:
2458 case 2:
2459 s->pc += 4;
2460 break;
2461 }
2462 } else {
2463 switch (mod) {
2464 case 0:
2465 if (rm == 6) {
2466 s->pc += 2;
2467 }
2468 break;
2469 case 1:
2470 s->pc++;
2471 break;
2472 default:
2473 case 2:
2474 s->pc += 2;
2475 break;
2476 }
2477 }
2478}
2479
2480/* used for LEA and MOV AX, mem */
2481static void gen_add_A0_ds_seg(DisasContext *s)
2482{
2483 int override, must_add_seg;
2484 must_add_seg = s->addseg;
2485 override = R_DS;
2486 if (s->override >= 0) {
2487 override = s->override;
2488 must_add_seg = 1;
2489 } else {
2490 override = R_DS;
2491 }
2492 if (must_add_seg) {
2493#ifdef TARGET_X86_64
2494 if (CODE64(s)) {
2495 gen_op_addq_A0_seg(override);
2496 } else
2497#endif
2498 {
2499 gen_op_addl_A0_seg(override);
2500 }
2501 }
2502}
2503
2504/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2505 OR_TMP0 */
2506static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2507{
2508 int mod, rm, opreg, disp;
2509
2510 mod = (modrm >> 6) & 3;
2511 rm = (modrm & 7) | REX_B(s);
2512 if (mod == 3) {
2513 if (is_store) {
2514 if (reg != OR_TMP0)
2515 gen_op_mov_TN_reg(ot, 0, reg);
2516 gen_op_mov_reg_T0(ot, rm);
2517 } else {
2518 gen_op_mov_TN_reg(ot, 0, rm);
2519 if (reg != OR_TMP0)
2520 gen_op_mov_reg_T0(ot, reg);
2521 }
2522 } else {
2523 gen_lea_modrm(s, modrm, &opreg, &disp);
2524 if (is_store) {
2525 if (reg != OR_TMP0)
2526 gen_op_mov_TN_reg(ot, 0, reg);
2527 gen_op_st_T0_A0(ot + s->mem_index);
2528 } else {
2529 gen_op_ld_T0_A0(ot + s->mem_index);
2530 if (reg != OR_TMP0)
2531 gen_op_mov_reg_T0(ot, reg);
2532 }
2533 }
2534}
2535
2536#ifndef VBOX
2537static inline uint32_t insn_get(DisasContext *s, int ot)
2538#else /* VBOX */
2539DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2540#endif /* VBOX */
2541{
2542 uint32_t ret;
2543
2544 switch(ot) {
2545 case OT_BYTE:
2546 ret = ldub_code(s->pc);
2547 s->pc++;
2548 break;
2549 case OT_WORD:
2550 ret = lduw_code(s->pc);
2551 s->pc += 2;
2552 break;
2553 default:
2554 case OT_LONG:
2555 ret = ldl_code(s->pc);
2556 s->pc += 4;
2557 break;
2558 }
2559 return ret;
2560}
2561
2562#ifndef VBOX
2563static inline int insn_const_size(unsigned int ot)
2564#else /* VBOX */
2565DECLINLINE(int) insn_const_size(unsigned int ot)
2566#endif /* VBOX */
2567{
2568 if (ot <= OT_LONG)
2569 return 1 << ot;
2570 else
2571 return 4;
2572}
2573
2574#ifndef VBOX
2575static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2576#else /* VBOX */
2577DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2578#endif /* VBOX */
2579{
2580 TranslationBlock *tb;
2581 target_ulong pc;
2582
2583 pc = s->cs_base + eip;
2584 tb = s->tb;
2585 /* NOTE: we handle the case where the TB spans two pages here */
2586 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2587 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2588 /* jump to same page: we can use a direct jump */
2589 tcg_gen_goto_tb(tb_num);
2590 gen_jmp_im(eip);
2591 tcg_gen_exit_tb((long)tb + tb_num);
2592 } else {
2593 /* jump to another page: currently not optimized */
2594 gen_jmp_im(eip);
2595 gen_eob(s);
2596 }
2597}
2598
2599#ifndef VBOX
2600static inline void gen_jcc(DisasContext *s, int b,
2601#else /* VBOX */
2602DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2603#endif /* VBOX */
2604 target_ulong val, target_ulong next_eip)
2605{
2606 int l1, l2, cc_op;
2607
2608 cc_op = s->cc_op;
2609 if (s->cc_op != CC_OP_DYNAMIC) {
2610 gen_op_set_cc_op(s->cc_op);
2611 s->cc_op = CC_OP_DYNAMIC;
2612 }
2613 if (s->jmp_opt) {
2614#ifdef VBOX
2615 gen_check_external_event(s);
2616#endif /* VBOX */
2617 l1 = gen_new_label();
2618 gen_jcc1(s, cc_op, b, l1);
2619
2620 gen_goto_tb(s, 0, next_eip);
2621
2622 gen_set_label(l1);
2623 gen_goto_tb(s, 1, val);
2624 s->is_jmp = 3;
2625 } else {
2626
2627 l1 = gen_new_label();
2628 l2 = gen_new_label();
2629 gen_jcc1(s, cc_op, b, l1);
2630
2631 gen_jmp_im(next_eip);
2632 tcg_gen_br(l2);
2633
2634 gen_set_label(l1);
2635 gen_jmp_im(val);
2636 gen_set_label(l2);
2637 gen_eob(s);
2638 }
2639}
2640
2641static void gen_setcc(DisasContext *s, int b)
2642{
2643 int inv, jcc_op, l1;
2644 TCGv t0;
2645
2646 if (is_fast_jcc_case(s, b)) {
2647 /* nominal case: we use a jump */
2648 /* XXX: make it faster by adding new instructions in TCG */
2649 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2650 tcg_gen_movi_tl(t0, 0);
2651 l1 = gen_new_label();
2652 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2653 tcg_gen_movi_tl(t0, 1);
2654 gen_set_label(l1);
2655 tcg_gen_mov_tl(cpu_T[0], t0);
2656 tcg_temp_free(t0);
2657 } else {
2658 /* slow case: it is more efficient not to generate a jump,
2659 although it is questionnable whether this optimization is
2660 worth to */
2661 inv = b & 1;
2662 jcc_op = (b >> 1) & 7;
2663 gen_setcc_slow_T0(s, jcc_op);
2664 if (inv) {
2665 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2666 }
2667 }
2668}
2669
2670#ifndef VBOX
2671static inline void gen_op_movl_T0_seg(int seg_reg)
2672#else /* VBOX */
2673DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2674#endif /* VBOX */
2675{
2676 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2677 offsetof(CPUX86State,segs[seg_reg].selector));
2678}
2679
2680#ifndef VBOX
2681static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2682#else /* VBOX */
2683DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2684#endif /* VBOX */
2685{
2686 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2687 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2688 offsetof(CPUX86State,segs[seg_reg].selector));
2689 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2690 tcg_gen_st_tl(cpu_T[0], cpu_env,
2691 offsetof(CPUX86State,segs[seg_reg].base));
2692#ifdef VBOX
2693 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2694 if (seg_reg == R_CS)
2695 flags |= DESC_CS_MASK;
2696 gen_op_movl_T0_im(flags);
2697 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2698#endif
2699}
2700
2701/* move T0 to seg_reg and compute if the CPU state may change. Never
2702 call this function with seg_reg == R_CS */
2703static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2704{
2705 if (s->pe && !s->vm86) {
2706 /* XXX: optimize by finding processor state dynamically */
2707 if (s->cc_op != CC_OP_DYNAMIC)
2708 gen_op_set_cc_op(s->cc_op);
2709 gen_jmp_im(cur_eip);
2710 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2711 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2712 /* abort translation because the addseg value may change or
2713 because ss32 may change. For R_SS, translation must always
2714 stop as a special handling must be done to disable hardware
2715 interrupts for the next instruction */
2716 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2717 s->is_jmp = 3;
2718 } else {
2719 gen_op_movl_seg_T0_vm(seg_reg);
2720 if (seg_reg == R_SS)
2721 s->is_jmp = 3;
2722 }
2723}
2724
2725#ifndef VBOX
2726static inline int svm_is_rep(int prefixes)
2727#else /* VBOX */
2728DECLINLINE(int) svm_is_rep(int prefixes)
2729#endif /* VBOX */
2730{
2731 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2732}
2733
2734#ifndef VBOX
2735static inline void
2736#else /* VBOX */
2737DECLINLINE(void)
2738#endif /* VBOX */
2739gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2740 uint32_t type, uint64_t param)
2741{
2742 /* no SVM activated; fast case */
2743 if (likely(!(s->flags & HF_SVMI_MASK)))
2744 return;
2745 if (s->cc_op != CC_OP_DYNAMIC)
2746 gen_op_set_cc_op(s->cc_op);
2747 gen_jmp_im(pc_start - s->cs_base);
2748 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2749 tcg_const_i32(type), tcg_const_i64(param));
2750}
2751
2752#ifndef VBOX
2753static inline void
2754#else /* VBOX */
2755DECLINLINE(void)
2756#endif
2757gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2758{
2759 gen_svm_check_intercept_param(s, pc_start, type, 0);
2760}
2761
2762#ifndef VBOX
2763static inline void gen_stack_update(DisasContext *s, int addend)
2764#else /* VBOX */
2765DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2766#endif /* VBOX */
2767{
2768#ifdef TARGET_X86_64
2769 if (CODE64(s)) {
2770 gen_op_add_reg_im(2, R_ESP, addend);
2771 } else
2772#endif
2773 if (s->ss32) {
2774 gen_op_add_reg_im(1, R_ESP, addend);
2775 } else {
2776 gen_op_add_reg_im(0, R_ESP, addend);
2777 }
2778}
2779
2780/* generate a push. It depends on ss32, addseg and dflag */
2781static void gen_push_T0(DisasContext *s)
2782{
2783#ifdef TARGET_X86_64
2784 if (CODE64(s)) {
2785 gen_op_movq_A0_reg(R_ESP);
2786 if (s->dflag) {
2787 gen_op_addq_A0_im(-8);
2788 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2789 } else {
2790 gen_op_addq_A0_im(-2);
2791 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2792 }
2793 gen_op_mov_reg_A0(2, R_ESP);
2794 } else
2795#endif
2796 {
2797 gen_op_movl_A0_reg(R_ESP);
2798 if (!s->dflag)
2799 gen_op_addl_A0_im(-2);
2800 else
2801 gen_op_addl_A0_im(-4);
2802 if (s->ss32) {
2803 if (s->addseg) {
2804 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2805 gen_op_addl_A0_seg(R_SS);
2806 }
2807 } else {
2808 gen_op_andl_A0_ffff();
2809 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2810 gen_op_addl_A0_seg(R_SS);
2811 }
2812 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2813 if (s->ss32 && !s->addseg)
2814 gen_op_mov_reg_A0(1, R_ESP);
2815 else
2816 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2817 }
2818}
2819
2820/* generate a push. It depends on ss32, addseg and dflag */
2821/* slower version for T1, only used for call Ev */
2822static void gen_push_T1(DisasContext *s)
2823{
2824#ifdef TARGET_X86_64
2825 if (CODE64(s)) {
2826 gen_op_movq_A0_reg(R_ESP);
2827 if (s->dflag) {
2828 gen_op_addq_A0_im(-8);
2829 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2830 } else {
2831 gen_op_addq_A0_im(-2);
2832 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2833 }
2834 gen_op_mov_reg_A0(2, R_ESP);
2835 } else
2836#endif
2837 {
2838 gen_op_movl_A0_reg(R_ESP);
2839 if (!s->dflag)
2840 gen_op_addl_A0_im(-2);
2841 else
2842 gen_op_addl_A0_im(-4);
2843 if (s->ss32) {
2844 if (s->addseg) {
2845 gen_op_addl_A0_seg(R_SS);
2846 }
2847 } else {
2848 gen_op_andl_A0_ffff();
2849 gen_op_addl_A0_seg(R_SS);
2850 }
2851 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2852
2853 if (s->ss32 && !s->addseg)
2854 gen_op_mov_reg_A0(1, R_ESP);
2855 else
2856 gen_stack_update(s, (-2) << s->dflag);
2857 }
2858}
2859
2860/* two step pop is necessary for precise exceptions */
2861static void gen_pop_T0(DisasContext *s)
2862{
2863#ifdef TARGET_X86_64
2864 if (CODE64(s)) {
2865 gen_op_movq_A0_reg(R_ESP);
2866 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2867 } else
2868#endif
2869 {
2870 gen_op_movl_A0_reg(R_ESP);
2871 if (s->ss32) {
2872 if (s->addseg)
2873 gen_op_addl_A0_seg(R_SS);
2874 } else {
2875 gen_op_andl_A0_ffff();
2876 gen_op_addl_A0_seg(R_SS);
2877 }
2878 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2879 }
2880}
2881
2882static void gen_pop_update(DisasContext *s)
2883{
2884#ifdef TARGET_X86_64
2885 if (CODE64(s) && s->dflag) {
2886 gen_stack_update(s, 8);
2887 } else
2888#endif
2889 {
2890 gen_stack_update(s, 2 << s->dflag);
2891 }
2892}
2893
2894static void gen_stack_A0(DisasContext *s)
2895{
2896 gen_op_movl_A0_reg(R_ESP);
2897 if (!s->ss32)
2898 gen_op_andl_A0_ffff();
2899 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2900 if (s->addseg)
2901 gen_op_addl_A0_seg(R_SS);
2902}
2903
2904/* NOTE: wrap around in 16 bit not fully handled */
2905static void gen_pusha(DisasContext *s)
2906{
2907 int i;
2908 gen_op_movl_A0_reg(R_ESP);
2909 gen_op_addl_A0_im(-16 << s->dflag);
2910 if (!s->ss32)
2911 gen_op_andl_A0_ffff();
2912 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2913 if (s->addseg)
2914 gen_op_addl_A0_seg(R_SS);
2915 for(i = 0;i < 8; i++) {
2916 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2917 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2918 gen_op_addl_A0_im(2 << s->dflag);
2919 }
2920 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2921}
2922
2923/* NOTE: wrap around in 16 bit not fully handled */
2924static void gen_popa(DisasContext *s)
2925{
2926 int i;
2927 gen_op_movl_A0_reg(R_ESP);
2928 if (!s->ss32)
2929 gen_op_andl_A0_ffff();
2930 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2931 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2932 if (s->addseg)
2933 gen_op_addl_A0_seg(R_SS);
2934 for(i = 0;i < 8; i++) {
2935 /* ESP is not reloaded */
2936 if (i != 3) {
2937 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2938 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2939 }
2940 gen_op_addl_A0_im(2 << s->dflag);
2941 }
2942 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2943}
2944
2945static void gen_enter(DisasContext *s, int esp_addend, int level)
2946{
2947 int ot, opsize;
2948
2949 level &= 0x1f;
2950#ifdef TARGET_X86_64
2951 if (CODE64(s)) {
2952 ot = s->dflag ? OT_QUAD : OT_WORD;
2953 opsize = 1 << ot;
2954
2955 gen_op_movl_A0_reg(R_ESP);
2956 gen_op_addq_A0_im(-opsize);
2957 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2958
2959 /* push bp */
2960 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2961 gen_op_st_T0_A0(ot + s->mem_index);
2962 if (level) {
2963 /* XXX: must save state */
2964 tcg_gen_helper_0_3(helper_enter64_level,
2965 tcg_const_i32(level),
2966 tcg_const_i32((ot == OT_QUAD)),
2967 cpu_T[1]);
2968 }
2969 gen_op_mov_reg_T1(ot, R_EBP);
2970 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2971 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2972 } else
2973#endif
2974 {
2975 ot = s->dflag + OT_WORD;
2976 opsize = 2 << s->dflag;
2977
2978 gen_op_movl_A0_reg(R_ESP);
2979 gen_op_addl_A0_im(-opsize);
2980 if (!s->ss32)
2981 gen_op_andl_A0_ffff();
2982 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2983 if (s->addseg)
2984 gen_op_addl_A0_seg(R_SS);
2985 /* push bp */
2986 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2987 gen_op_st_T0_A0(ot + s->mem_index);
2988 if (level) {
2989 /* XXX: must save state */
2990 tcg_gen_helper_0_3(helper_enter_level,
2991 tcg_const_i32(level),
2992 tcg_const_i32(s->dflag),
2993 cpu_T[1]);
2994 }
2995 gen_op_mov_reg_T1(ot, R_EBP);
2996 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2997 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2998 }
2999}
3000
3001static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3002{
3003 if (s->cc_op != CC_OP_DYNAMIC)
3004 gen_op_set_cc_op(s->cc_op);
3005 gen_jmp_im(cur_eip);
3006 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3007 s->is_jmp = 3;
3008}
3009
3010/* an interrupt is different from an exception because of the
3011 privilege checks */
3012static void gen_interrupt(DisasContext *s, int intno,
3013 target_ulong cur_eip, target_ulong next_eip)
3014{
3015 if (s->cc_op != CC_OP_DYNAMIC)
3016 gen_op_set_cc_op(s->cc_op);
3017 gen_jmp_im(cur_eip);
3018 tcg_gen_helper_0_2(helper_raise_interrupt,
3019 tcg_const_i32(intno),
3020 tcg_const_i32(next_eip - cur_eip));
3021 s->is_jmp = 3;
3022}
3023
3024static void gen_debug(DisasContext *s, target_ulong cur_eip)
3025{
3026 if (s->cc_op != CC_OP_DYNAMIC)
3027 gen_op_set_cc_op(s->cc_op);
3028 gen_jmp_im(cur_eip);
3029 tcg_gen_helper_0_0(helper_debug);
3030 s->is_jmp = 3;
3031}
3032
3033/* generate a generic end of block. Trace exception is also generated
3034 if needed */
3035static void gen_eob(DisasContext *s)
3036{
3037 if (s->cc_op != CC_OP_DYNAMIC)
3038 gen_op_set_cc_op(s->cc_op);
3039 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3040 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3041 }
3042 if (s->singlestep_enabled) {
3043 tcg_gen_helper_0_0(helper_debug);
3044 } else if (s->tf) {
3045 tcg_gen_helper_0_0(helper_single_step);
3046 } else {
3047 tcg_gen_exit_tb(0);
3048 }
3049 s->is_jmp = 3;
3050}
3051
3052/* generate a jump to eip. No segment change must happen before as a
3053 direct call to the next block may occur */
3054static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3055{
3056 if (s->jmp_opt) {
3057#ifdef VBOX
3058 gen_check_external_event(s);
3059#endif /* VBOX */
3060 if (s->cc_op != CC_OP_DYNAMIC) {
3061 gen_op_set_cc_op(s->cc_op);
3062 s->cc_op = CC_OP_DYNAMIC;
3063 }
3064 gen_goto_tb(s, tb_num, eip);
3065 s->is_jmp = 3;
3066 } else {
3067 gen_jmp_im(eip);
3068 gen_eob(s);
3069 }
3070}
3071
3072static void gen_jmp(DisasContext *s, target_ulong eip)
3073{
3074 gen_jmp_tb(s, eip, 0);
3075}
3076
3077#ifndef VBOX
3078static inline void gen_ldq_env_A0(int idx, int offset)
3079#else /* VBOX */
3080DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3081#endif /* VBOX */
3082{
3083 int mem_index = (idx >> 2) - 1;
3084 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3085 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3086}
3087
3088#ifndef VBOX
3089static inline void gen_stq_env_A0(int idx, int offset)
3090#else /* VBOX */
3091DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3092#endif /* VBOX */
3093{
3094 int mem_index = (idx >> 2) - 1;
3095 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3096 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3097}
3098
3099#ifndef VBOX
3100static inline void gen_ldo_env_A0(int idx, int offset)
3101#else /* VBOX */
3102DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3103#endif /* VBOX */
3104{
3105 int mem_index = (idx >> 2) - 1;
3106 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3107 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3108 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3109 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3110 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3111}
3112
3113#ifndef VBOX
3114static inline void gen_sto_env_A0(int idx, int offset)
3115#else /* VBOX */
3116DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3117#endif /* VBOX */
3118{
3119 int mem_index = (idx >> 2) - 1;
3120 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3121 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3122 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3123 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3124 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3125}
3126
3127#ifndef VBOX
3128static inline void gen_op_movo(int d_offset, int s_offset)
3129#else /* VBOX */
3130DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3131#endif /* VBOX */
3132{
3133 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3134 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3135 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3136 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3137}
3138
3139#ifndef VBOX
3140static inline void gen_op_movq(int d_offset, int s_offset)
3141#else /* VBOX */
3142DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3143#endif /* VBOX */
3144{
3145 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3146 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3147}
3148
3149#ifndef VBOX
3150static inline void gen_op_movl(int d_offset, int s_offset)
3151#else /* VBOX */
3152DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3153#endif /* VBOX */
3154{
3155 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3156 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3157}
3158
3159#ifndef VBOX
3160static inline void gen_op_movq_env_0(int d_offset)
3161#else /* VBOX */
3162DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3163#endif /* VBOX */
3164{
3165 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3166 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3167}
3168
3169#define SSE_SPECIAL ((void *)1)
3170#define SSE_DUMMY ((void *)2)
3171
3172#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3173#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3174 helper_ ## x ## ss, helper_ ## x ## sd, }
3175
3176static void *sse_op_table1[256][4] = {
3177 /* 3DNow! extensions */
3178 [0x0e] = { SSE_DUMMY }, /* femms */
3179 [0x0f] = { SSE_DUMMY }, /* pf... */
3180 /* pure SSE operations */
3181 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3182 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3183 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3184 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3185 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3186 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3187 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3188 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3189
3190 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3191 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3192 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3193 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3194 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3195 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3196 [0x2e] = { helper_ucomiss, helper_ucomisd },
3197 [0x2f] = { helper_comiss, helper_comisd },
3198 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3199 [0x51] = SSE_FOP(sqrt),
3200 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3201 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3202 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3203 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3204 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3205 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3206 [0x58] = SSE_FOP(add),
3207 [0x59] = SSE_FOP(mul),
3208 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3209 helper_cvtss2sd, helper_cvtsd2ss },
3210 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3211 [0x5c] = SSE_FOP(sub),
3212 [0x5d] = SSE_FOP(min),
3213 [0x5e] = SSE_FOP(div),
3214 [0x5f] = SSE_FOP(max),
3215
3216 [0xc2] = SSE_FOP(cmpeq),
3217 [0xc6] = { helper_shufps, helper_shufpd },
3218
3219 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3220 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3221
3222 /* MMX ops and their SSE extensions */
3223 [0x60] = MMX_OP2(punpcklbw),
3224 [0x61] = MMX_OP2(punpcklwd),
3225 [0x62] = MMX_OP2(punpckldq),
3226 [0x63] = MMX_OP2(packsswb),
3227 [0x64] = MMX_OP2(pcmpgtb),
3228 [0x65] = MMX_OP2(pcmpgtw),
3229 [0x66] = MMX_OP2(pcmpgtl),
3230 [0x67] = MMX_OP2(packuswb),
3231 [0x68] = MMX_OP2(punpckhbw),
3232 [0x69] = MMX_OP2(punpckhwd),
3233 [0x6a] = MMX_OP2(punpckhdq),
3234 [0x6b] = MMX_OP2(packssdw),
3235 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3236 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3237 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3238 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3239 [0x70] = { helper_pshufw_mmx,
3240 helper_pshufd_xmm,
3241 helper_pshufhw_xmm,
3242 helper_pshuflw_xmm },
3243 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3244 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3245 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3246 [0x74] = MMX_OP2(pcmpeqb),
3247 [0x75] = MMX_OP2(pcmpeqw),
3248 [0x76] = MMX_OP2(pcmpeql),
3249 [0x77] = { SSE_DUMMY }, /* emms */
3250 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3251 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3252 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3253 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3254 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3255 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3256 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3257 [0xd1] = MMX_OP2(psrlw),
3258 [0xd2] = MMX_OP2(psrld),
3259 [0xd3] = MMX_OP2(psrlq),
3260 [0xd4] = MMX_OP2(paddq),
3261 [0xd5] = MMX_OP2(pmullw),
3262 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3263 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3264 [0xd8] = MMX_OP2(psubusb),
3265 [0xd9] = MMX_OP2(psubusw),
3266 [0xda] = MMX_OP2(pminub),
3267 [0xdb] = MMX_OP2(pand),
3268 [0xdc] = MMX_OP2(paddusb),
3269 [0xdd] = MMX_OP2(paddusw),
3270 [0xde] = MMX_OP2(pmaxub),
3271 [0xdf] = MMX_OP2(pandn),
3272 [0xe0] = MMX_OP2(pavgb),
3273 [0xe1] = MMX_OP2(psraw),
3274 [0xe2] = MMX_OP2(psrad),
3275 [0xe3] = MMX_OP2(pavgw),
3276 [0xe4] = MMX_OP2(pmulhuw),
3277 [0xe5] = MMX_OP2(pmulhw),
3278 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3279 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3280 [0xe8] = MMX_OP2(psubsb),
3281 [0xe9] = MMX_OP2(psubsw),
3282 [0xea] = MMX_OP2(pminsw),
3283 [0xeb] = MMX_OP2(por),
3284 [0xec] = MMX_OP2(paddsb),
3285 [0xed] = MMX_OP2(paddsw),
3286 [0xee] = MMX_OP2(pmaxsw),
3287 [0xef] = MMX_OP2(pxor),
3288 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3289 [0xf1] = MMX_OP2(psllw),
3290 [0xf2] = MMX_OP2(pslld),
3291 [0xf3] = MMX_OP2(psllq),
3292 [0xf4] = MMX_OP2(pmuludq),
3293 [0xf5] = MMX_OP2(pmaddwd),
3294 [0xf6] = MMX_OP2(psadbw),
3295 [0xf7] = MMX_OP2(maskmov),
3296 [0xf8] = MMX_OP2(psubb),
3297 [0xf9] = MMX_OP2(psubw),
3298 [0xfa] = MMX_OP2(psubl),
3299 [0xfb] = MMX_OP2(psubq),
3300 [0xfc] = MMX_OP2(paddb),
3301 [0xfd] = MMX_OP2(paddw),
3302 [0xfe] = MMX_OP2(paddl),
3303};
3304
3305static void *sse_op_table2[3 * 8][2] = {
3306 [0 + 2] = MMX_OP2(psrlw),
3307 [0 + 4] = MMX_OP2(psraw),
3308 [0 + 6] = MMX_OP2(psllw),
3309 [8 + 2] = MMX_OP2(psrld),
3310 [8 + 4] = MMX_OP2(psrad),
3311 [8 + 6] = MMX_OP2(pslld),
3312 [16 + 2] = MMX_OP2(psrlq),
3313 [16 + 3] = { NULL, helper_psrldq_xmm },
3314 [16 + 6] = MMX_OP2(psllq),
3315 [16 + 7] = { NULL, helper_pslldq_xmm },
3316};
3317
3318static void *sse_op_table3[4 * 3] = {
3319 helper_cvtsi2ss,
3320 helper_cvtsi2sd,
3321 X86_64_ONLY(helper_cvtsq2ss),
3322 X86_64_ONLY(helper_cvtsq2sd),
3323
3324 helper_cvttss2si,
3325 helper_cvttsd2si,
3326 X86_64_ONLY(helper_cvttss2sq),
3327 X86_64_ONLY(helper_cvttsd2sq),
3328
3329 helper_cvtss2si,
3330 helper_cvtsd2si,
3331 X86_64_ONLY(helper_cvtss2sq),
3332 X86_64_ONLY(helper_cvtsd2sq),
3333};
3334
3335static void *sse_op_table4[8][4] = {
3336 SSE_FOP(cmpeq),
3337 SSE_FOP(cmplt),
3338 SSE_FOP(cmple),
3339 SSE_FOP(cmpunord),
3340 SSE_FOP(cmpneq),
3341 SSE_FOP(cmpnlt),
3342 SSE_FOP(cmpnle),
3343 SSE_FOP(cmpord),
3344};
3345
3346static void *sse_op_table5[256] = {
3347 [0x0c] = helper_pi2fw,
3348 [0x0d] = helper_pi2fd,
3349 [0x1c] = helper_pf2iw,
3350 [0x1d] = helper_pf2id,
3351 [0x8a] = helper_pfnacc,
3352 [0x8e] = helper_pfpnacc,
3353 [0x90] = helper_pfcmpge,
3354 [0x94] = helper_pfmin,
3355 [0x96] = helper_pfrcp,
3356 [0x97] = helper_pfrsqrt,
3357 [0x9a] = helper_pfsub,
3358 [0x9e] = helper_pfadd,
3359 [0xa0] = helper_pfcmpgt,
3360 [0xa4] = helper_pfmax,
3361 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3362 [0xa7] = helper_movq, /* pfrsqit1 */
3363 [0xaa] = helper_pfsubr,
3364 [0xae] = helper_pfacc,
3365 [0xb0] = helper_pfcmpeq,
3366 [0xb4] = helper_pfmul,
3367 [0xb6] = helper_movq, /* pfrcpit2 */
3368 [0xb7] = helper_pmulhrw_mmx,
3369 [0xbb] = helper_pswapd,
3370 [0xbf] = helper_pavgb_mmx /* pavgusb */
3371};
3372
3373struct sse_op_helper_s {
3374 void *op[2]; uint32_t ext_mask;
3375};
3376#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3377#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3378#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3379#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3380static struct sse_op_helper_s sse_op_table6[256] = {
3381 [0x00] = SSSE3_OP(pshufb),
3382 [0x01] = SSSE3_OP(phaddw),
3383 [0x02] = SSSE3_OP(phaddd),
3384 [0x03] = SSSE3_OP(phaddsw),
3385 [0x04] = SSSE3_OP(pmaddubsw),
3386 [0x05] = SSSE3_OP(phsubw),
3387 [0x06] = SSSE3_OP(phsubd),
3388 [0x07] = SSSE3_OP(phsubsw),
3389 [0x08] = SSSE3_OP(psignb),
3390 [0x09] = SSSE3_OP(psignw),
3391 [0x0a] = SSSE3_OP(psignd),
3392 [0x0b] = SSSE3_OP(pmulhrsw),
3393 [0x10] = SSE41_OP(pblendvb),
3394 [0x14] = SSE41_OP(blendvps),
3395 [0x15] = SSE41_OP(blendvpd),
3396 [0x17] = SSE41_OP(ptest),
3397 [0x1c] = SSSE3_OP(pabsb),
3398 [0x1d] = SSSE3_OP(pabsw),
3399 [0x1e] = SSSE3_OP(pabsd),
3400 [0x20] = SSE41_OP(pmovsxbw),
3401 [0x21] = SSE41_OP(pmovsxbd),
3402 [0x22] = SSE41_OP(pmovsxbq),
3403 [0x23] = SSE41_OP(pmovsxwd),
3404 [0x24] = SSE41_OP(pmovsxwq),
3405 [0x25] = SSE41_OP(pmovsxdq),
3406 [0x28] = SSE41_OP(pmuldq),
3407 [0x29] = SSE41_OP(pcmpeqq),
3408 [0x2a] = SSE41_SPECIAL, /* movntqda */
3409 [0x2b] = SSE41_OP(packusdw),
3410 [0x30] = SSE41_OP(pmovzxbw),
3411 [0x31] = SSE41_OP(pmovzxbd),
3412 [0x32] = SSE41_OP(pmovzxbq),
3413 [0x33] = SSE41_OP(pmovzxwd),
3414 [0x34] = SSE41_OP(pmovzxwq),
3415 [0x35] = SSE41_OP(pmovzxdq),
3416 [0x37] = SSE42_OP(pcmpgtq),
3417 [0x38] = SSE41_OP(pminsb),
3418 [0x39] = SSE41_OP(pminsd),
3419 [0x3a] = SSE41_OP(pminuw),
3420 [0x3b] = SSE41_OP(pminud),
3421 [0x3c] = SSE41_OP(pmaxsb),
3422 [0x3d] = SSE41_OP(pmaxsd),
3423 [0x3e] = SSE41_OP(pmaxuw),
3424 [0x3f] = SSE41_OP(pmaxud),
3425 [0x40] = SSE41_OP(pmulld),
3426 [0x41] = SSE41_OP(phminposuw),
3427};
3428
3429static struct sse_op_helper_s sse_op_table7[256] = {
3430 [0x08] = SSE41_OP(roundps),
3431 [0x09] = SSE41_OP(roundpd),
3432 [0x0a] = SSE41_OP(roundss),
3433 [0x0b] = SSE41_OP(roundsd),
3434 [0x0c] = SSE41_OP(blendps),
3435 [0x0d] = SSE41_OP(blendpd),
3436 [0x0e] = SSE41_OP(pblendw),
3437 [0x0f] = SSSE3_OP(palignr),
3438 [0x14] = SSE41_SPECIAL, /* pextrb */
3439 [0x15] = SSE41_SPECIAL, /* pextrw */
3440 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3441 [0x17] = SSE41_SPECIAL, /* extractps */
3442 [0x20] = SSE41_SPECIAL, /* pinsrb */
3443 [0x21] = SSE41_SPECIAL, /* insertps */
3444 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3445 [0x40] = SSE41_OP(dpps),
3446 [0x41] = SSE41_OP(dppd),
3447 [0x42] = SSE41_OP(mpsadbw),
3448 [0x60] = SSE42_OP(pcmpestrm),
3449 [0x61] = SSE42_OP(pcmpestri),
3450 [0x62] = SSE42_OP(pcmpistrm),
3451 [0x63] = SSE42_OP(pcmpistri),
3452};
3453
3454static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3455{
3456 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3457 int modrm, mod, rm, reg, reg_addr, offset_addr;
3458 void *sse_op2;
3459
3460 b &= 0xff;
3461 if (s->prefix & PREFIX_DATA)
3462 b1 = 1;
3463 else if (s->prefix & PREFIX_REPZ)
3464 b1 = 2;
3465 else if (s->prefix & PREFIX_REPNZ)
3466 b1 = 3;
3467 else
3468 b1 = 0;
3469 sse_op2 = sse_op_table1[b][b1];
3470 if (!sse_op2)
3471 goto illegal_op;
3472 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3473 is_xmm = 1;
3474 } else {
3475 if (b1 == 0) {
3476 /* MMX case */
3477 is_xmm = 0;
3478 } else {
3479 is_xmm = 1;
3480 }
3481 }
3482 /* simple MMX/SSE operation */
3483 if (s->flags & HF_TS_MASK) {
3484 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3485 return;
3486 }
3487 if (s->flags & HF_EM_MASK) {
3488 illegal_op:
3489 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3490 return;
3491 }
3492 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3493 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3494 goto illegal_op;
3495 if (b == 0x0e) {
3496 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3497 goto illegal_op;
3498 /* femms */
3499 tcg_gen_helper_0_0(helper_emms);
3500 return;
3501 }
3502 if (b == 0x77) {
3503 /* emms */
3504 tcg_gen_helper_0_0(helper_emms);
3505 return;
3506 }
3507 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3508 the static cpu state) */
3509 if (!is_xmm) {
3510 tcg_gen_helper_0_0(helper_enter_mmx);
3511 }
3512
3513 modrm = ldub_code(s->pc++);
3514 reg = ((modrm >> 3) & 7);
3515 if (is_xmm)
3516 reg |= rex_r;
3517 mod = (modrm >> 6) & 3;
3518 if (sse_op2 == SSE_SPECIAL) {
3519 b |= (b1 << 8);
3520 switch(b) {
3521 case 0x0e7: /* movntq */
3522 if (mod == 3)
3523 goto illegal_op;
3524 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3525 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3526 break;
3527 case 0x1e7: /* movntdq */
3528 case 0x02b: /* movntps */
3529 case 0x12b: /* movntps */
3530 case 0x3f0: /* lddqu */
3531 if (mod == 3)
3532 goto illegal_op;
3533 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3534 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3535 break;
3536 case 0x6e: /* movd mm, ea */
3537#ifdef TARGET_X86_64
3538 if (s->dflag == 2) {
3539 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3540 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3541 } else
3542#endif
3543 {
3544 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3545 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3546 offsetof(CPUX86State,fpregs[reg].mmx));
3547 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3548 }
3549 break;
3550 case 0x16e: /* movd xmm, ea */
3551#ifdef TARGET_X86_64
3552 if (s->dflag == 2) {
3553 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3554 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3555 offsetof(CPUX86State,xmm_regs[reg]));
3556 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3557 } else
3558#endif
3559 {
3560 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3561 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3562 offsetof(CPUX86State,xmm_regs[reg]));
3563 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3564 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3565 }
3566 break;
3567 case 0x6f: /* movq mm, ea */
3568 if (mod != 3) {
3569 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3570 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3571 } else {
3572 rm = (modrm & 7);
3573 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3574 offsetof(CPUX86State,fpregs[rm].mmx));
3575 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3576 offsetof(CPUX86State,fpregs[reg].mmx));
3577 }
3578 break;
3579 case 0x010: /* movups */
3580 case 0x110: /* movupd */
3581 case 0x028: /* movaps */
3582 case 0x128: /* movapd */
3583 case 0x16f: /* movdqa xmm, ea */
3584 case 0x26f: /* movdqu xmm, ea */
3585 if (mod != 3) {
3586 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3587 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3588 } else {
3589 rm = (modrm & 7) | REX_B(s);
3590 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3591 offsetof(CPUX86State,xmm_regs[rm]));
3592 }
3593 break;
3594 case 0x210: /* movss xmm, ea */
3595 if (mod != 3) {
3596 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3597 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3598 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3599 gen_op_movl_T0_0();
3600 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3601 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3602 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3603 } else {
3604 rm = (modrm & 7) | REX_B(s);
3605 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3606 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3607 }
3608 break;
3609 case 0x310: /* movsd xmm, ea */
3610 if (mod != 3) {
3611 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3612 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3613 gen_op_movl_T0_0();
3614 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3615 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3616 } else {
3617 rm = (modrm & 7) | REX_B(s);
3618 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3619 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3620 }
3621 break;
3622 case 0x012: /* movlps */
3623 case 0x112: /* movlpd */
3624 if (mod != 3) {
3625 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3626 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3627 } else {
3628 /* movhlps */
3629 rm = (modrm & 7) | REX_B(s);
3630 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3631 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3632 }
3633 break;
3634 case 0x212: /* movsldup */
3635 if (mod != 3) {
3636 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3637 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3638 } else {
3639 rm = (modrm & 7) | REX_B(s);
3640 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3641 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3642 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3643 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3644 }
3645 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3646 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3647 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3648 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3649 break;
3650 case 0x312: /* movddup */
3651 if (mod != 3) {
3652 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3653 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3654 } else {
3655 rm = (modrm & 7) | REX_B(s);
3656 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3657 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3658 }
3659 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3660 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3661 break;
3662 case 0x016: /* movhps */
3663 case 0x116: /* movhpd */
3664 if (mod != 3) {
3665 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3666 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3667 } else {
3668 /* movlhps */
3669 rm = (modrm & 7) | REX_B(s);
3670 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3671 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3672 }
3673 break;
3674 case 0x216: /* movshdup */
3675 if (mod != 3) {
3676 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3677 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3678 } else {
3679 rm = (modrm & 7) | REX_B(s);
3680 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3681 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3682 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3683 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3684 }
3685 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3686 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3687 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3688 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3689 break;
3690 case 0x7e: /* movd ea, mm */
3691#ifdef TARGET_X86_64
3692 if (s->dflag == 2) {
3693 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3694 offsetof(CPUX86State,fpregs[reg].mmx));
3695 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3696 } else
3697#endif
3698 {
3699 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3700 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3701 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3702 }
3703 break;
3704 case 0x17e: /* movd ea, xmm */
3705#ifdef TARGET_X86_64
3706 if (s->dflag == 2) {
3707 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3708 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3709 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3710 } else
3711#endif
3712 {
3713 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3714 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3715 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3716 }
3717 break;
3718 case 0x27e: /* movq xmm, ea */
3719 if (mod != 3) {
3720 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3721 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3722 } else {
3723 rm = (modrm & 7) | REX_B(s);
3724 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3725 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3726 }
3727 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3728 break;
3729 case 0x7f: /* movq ea, mm */
3730 if (mod != 3) {
3731 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3732 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3733 } else {
3734 rm = (modrm & 7);
3735 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3736 offsetof(CPUX86State,fpregs[reg].mmx));
3737 }
3738 break;
3739 case 0x011: /* movups */
3740 case 0x111: /* movupd */
3741 case 0x029: /* movaps */
3742 case 0x129: /* movapd */
3743 case 0x17f: /* movdqa ea, xmm */
3744 case 0x27f: /* movdqu ea, xmm */
3745 if (mod != 3) {
3746 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3747 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3748 } else {
3749 rm = (modrm & 7) | REX_B(s);
3750 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3751 offsetof(CPUX86State,xmm_regs[reg]));
3752 }
3753 break;
3754 case 0x211: /* movss ea, xmm */
3755 if (mod != 3) {
3756 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3757 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3758 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3759 } else {
3760 rm = (modrm & 7) | REX_B(s);
3761 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3762 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3763 }
3764 break;
3765 case 0x311: /* movsd ea, xmm */
3766 if (mod != 3) {
3767 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3768 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3769 } else {
3770 rm = (modrm & 7) | REX_B(s);
3771 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3772 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3773 }
3774 break;
3775 case 0x013: /* movlps */
3776 case 0x113: /* movlpd */
3777 if (mod != 3) {
3778 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3779 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3780 } else {
3781 goto illegal_op;
3782 }
3783 break;
3784 case 0x017: /* movhps */
3785 case 0x117: /* movhpd */
3786 if (mod != 3) {
3787 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3788 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3789 } else {
3790 goto illegal_op;
3791 }
3792 break;
3793 case 0x71: /* shift mm, im */
3794 case 0x72:
3795 case 0x73:
3796 case 0x171: /* shift xmm, im */
3797 case 0x172:
3798 case 0x173:
3799 val = ldub_code(s->pc++);
3800 if (is_xmm) {
3801 gen_op_movl_T0_im(val);
3802 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3803 gen_op_movl_T0_0();
3804 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3805 op1_offset = offsetof(CPUX86State,xmm_t0);
3806 } else {
3807 gen_op_movl_T0_im(val);
3808 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3809 gen_op_movl_T0_0();
3810 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3811 op1_offset = offsetof(CPUX86State,mmx_t0);
3812 }
3813 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3814 if (!sse_op2)
3815 goto illegal_op;
3816 if (is_xmm) {
3817 rm = (modrm & 7) | REX_B(s);
3818 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3819 } else {
3820 rm = (modrm & 7);
3821 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3822 }
3823 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3824 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3825 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3826 break;
3827 case 0x050: /* movmskps */
3828 rm = (modrm & 7) | REX_B(s);
3829 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3830 offsetof(CPUX86State,xmm_regs[rm]));
3831 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3832 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3833 gen_op_mov_reg_T0(OT_LONG, reg);
3834 break;
3835 case 0x150: /* movmskpd */
3836 rm = (modrm & 7) | REX_B(s);
3837 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3838 offsetof(CPUX86State,xmm_regs[rm]));
3839 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3840 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3841 gen_op_mov_reg_T0(OT_LONG, reg);
3842 break;
3843 case 0x02a: /* cvtpi2ps */
3844 case 0x12a: /* cvtpi2pd */
3845 tcg_gen_helper_0_0(helper_enter_mmx);
3846 if (mod != 3) {
3847 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3848 op2_offset = offsetof(CPUX86State,mmx_t0);
3849 gen_ldq_env_A0(s->mem_index, op2_offset);
3850 } else {
3851 rm = (modrm & 7);
3852 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3853 }
3854 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3855 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3856 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3857 switch(b >> 8) {
3858 case 0x0:
3859 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3860 break;
3861 default:
3862 case 0x1:
3863 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3864 break;
3865 }
3866 break;
3867 case 0x22a: /* cvtsi2ss */
3868 case 0x32a: /* cvtsi2sd */
3869 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3870 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3871 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3872 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3873 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3874 if (ot == OT_LONG) {
3875 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3876 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3877 } else {
3878 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3879 }
3880 break;
3881 case 0x02c: /* cvttps2pi */
3882 case 0x12c: /* cvttpd2pi */
3883 case 0x02d: /* cvtps2pi */
3884 case 0x12d: /* cvtpd2pi */
3885 tcg_gen_helper_0_0(helper_enter_mmx);
3886 if (mod != 3) {
3887 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3888 op2_offset = offsetof(CPUX86State,xmm_t0);
3889 gen_ldo_env_A0(s->mem_index, op2_offset);
3890 } else {
3891 rm = (modrm & 7) | REX_B(s);
3892 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3893 }
3894 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3895 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3896 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3897 switch(b) {
3898 case 0x02c:
3899 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3900 break;
3901 case 0x12c:
3902 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3903 break;
3904 case 0x02d:
3905 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3906 break;
3907 case 0x12d:
3908 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3909 break;
3910 }
3911 break;
3912 case 0x22c: /* cvttss2si */
3913 case 0x32c: /* cvttsd2si */
3914 case 0x22d: /* cvtss2si */
3915 case 0x32d: /* cvtsd2si */
3916 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3917 if (mod != 3) {
3918 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3919 if ((b >> 8) & 1) {
3920 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3921 } else {
3922 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3923 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3924 }
3925 op2_offset = offsetof(CPUX86State,xmm_t0);
3926 } else {
3927 rm = (modrm & 7) | REX_B(s);
3928 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3929 }
3930 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3931 (b & 1) * 4];
3932 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3933 if (ot == OT_LONG) {
3934 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3935 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3936 } else {
3937 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3938 }
3939 gen_op_mov_reg_T0(ot, reg);
3940 break;
3941 case 0xc4: /* pinsrw */
3942 case 0x1c4:
3943 s->rip_offset = 1;
3944 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3945 val = ldub_code(s->pc++);
3946 if (b1) {
3947 val &= 7;
3948 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3949 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3950 } else {
3951 val &= 3;
3952 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3953 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3954 }
3955 break;
3956 case 0xc5: /* pextrw */
3957 case 0x1c5:
3958 if (mod != 3)
3959 goto illegal_op;
3960 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3961 val = ldub_code(s->pc++);
3962 if (b1) {
3963 val &= 7;
3964 rm = (modrm & 7) | REX_B(s);
3965 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3966 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3967 } else {
3968 val &= 3;
3969 rm = (modrm & 7);
3970 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3971 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3972 }
3973 reg = ((modrm >> 3) & 7) | rex_r;
3974 gen_op_mov_reg_T0(ot, reg);
3975 break;
3976 case 0x1d6: /* movq ea, xmm */
3977 if (mod != 3) {
3978 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3979 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3980 } else {
3981 rm = (modrm & 7) | REX_B(s);
3982 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3983 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3984 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3985 }
3986 break;
3987 case 0x2d6: /* movq2dq */
3988 tcg_gen_helper_0_0(helper_enter_mmx);
3989 rm = (modrm & 7);
3990 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3991 offsetof(CPUX86State,fpregs[rm].mmx));
3992 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3993 break;
3994 case 0x3d6: /* movdq2q */
3995 tcg_gen_helper_0_0(helper_enter_mmx);
3996 rm = (modrm & 7) | REX_B(s);
3997 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3998 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3999 break;
4000 case 0xd7: /* pmovmskb */
4001 case 0x1d7:
4002 if (mod != 3)
4003 goto illegal_op;
4004 if (b1) {
4005 rm = (modrm & 7) | REX_B(s);
4006 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4007 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4008 } else {
4009 rm = (modrm & 7);
4010 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4011 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4012 }
4013 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4014 reg = ((modrm >> 3) & 7) | rex_r;
4015 gen_op_mov_reg_T0(OT_LONG, reg);
4016 break;
4017 case 0x138:
4018 if (s->prefix & PREFIX_REPNZ)
4019 goto crc32;
4020 case 0x038:
4021 b = modrm;
4022 modrm = ldub_code(s->pc++);
4023 rm = modrm & 7;
4024 reg = ((modrm >> 3) & 7) | rex_r;
4025 mod = (modrm >> 6) & 3;
4026
4027 sse_op2 = sse_op_table6[b].op[b1];
4028 if (!sse_op2)
4029 goto illegal_op;
4030 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4031 goto illegal_op;
4032
4033 if (b1) {
4034 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4035 if (mod == 3) {
4036 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4037 } else {
4038 op2_offset = offsetof(CPUX86State,xmm_t0);
4039 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4040 switch (b) {
4041 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4042 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4043 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4044 gen_ldq_env_A0(s->mem_index, op2_offset +
4045 offsetof(XMMReg, XMM_Q(0)));
4046 break;
4047 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4048 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4049 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4050 (s->mem_index >> 2) - 1);
4051 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4052 offsetof(XMMReg, XMM_L(0)));
4053 break;
4054 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4055 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4056 (s->mem_index >> 2) - 1);
4057 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4058 offsetof(XMMReg, XMM_W(0)));
4059 break;
4060 case 0x2a: /* movntqda */
4061 gen_ldo_env_A0(s->mem_index, op1_offset);
4062 return;
4063 default:
4064 gen_ldo_env_A0(s->mem_index, op2_offset);
4065 }
4066 }
4067 } else {
4068 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4069 if (mod == 3) {
4070 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4071 } else {
4072 op2_offset = offsetof(CPUX86State,mmx_t0);
4073 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4074 gen_ldq_env_A0(s->mem_index, op2_offset);
4075 }
4076 }
4077 if (sse_op2 == SSE_SPECIAL)
4078 goto illegal_op;
4079
4080 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4081 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4082 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4083
4084 if (b == 0x17)
4085 s->cc_op = CC_OP_EFLAGS;
4086 break;
4087 case 0x338: /* crc32 */
4088 crc32:
4089 b = modrm;
4090 modrm = ldub_code(s->pc++);
4091 reg = ((modrm >> 3) & 7) | rex_r;
4092
4093 if (b != 0xf0 && b != 0xf1)
4094 goto illegal_op;
4095 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4096 goto illegal_op;
4097
4098 if (b == 0xf0)
4099 ot = OT_BYTE;
4100 else if (b == 0xf1 && s->dflag != 2)
4101 if (s->prefix & PREFIX_DATA)
4102 ot = OT_WORD;
4103 else
4104 ot = OT_LONG;
4105 else
4106 ot = OT_QUAD;
4107
4108 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4109 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4110 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4111 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4112 cpu_T[0], tcg_const_i32(8 << ot));
4113
4114 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4115 gen_op_mov_reg_T0(ot, reg);
4116 break;
4117 case 0x03a:
4118 case 0x13a:
4119 b = modrm;
4120 modrm = ldub_code(s->pc++);
4121 rm = modrm & 7;
4122 reg = ((modrm >> 3) & 7) | rex_r;
4123 mod = (modrm >> 6) & 3;
4124
4125 sse_op2 = sse_op_table7[b].op[b1];
4126 if (!sse_op2)
4127 goto illegal_op;
4128 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4129 goto illegal_op;
4130
4131 if (sse_op2 == SSE_SPECIAL) {
4132 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4133 rm = (modrm & 7) | REX_B(s);
4134 if (mod != 3)
4135 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4136 reg = ((modrm >> 3) & 7) | rex_r;
4137 val = ldub_code(s->pc++);
4138 switch (b) {
4139 case 0x14: /* pextrb */
4140 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4141 xmm_regs[reg].XMM_B(val & 15)));
4142 if (mod == 3)
4143 gen_op_mov_reg_T0(ot, rm);
4144 else
4145 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4146 (s->mem_index >> 2) - 1);
4147 break;
4148 case 0x15: /* pextrw */
4149 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4150 xmm_regs[reg].XMM_W(val & 7)));
4151 if (mod == 3)
4152 gen_op_mov_reg_T0(ot, rm);
4153 else
4154 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4155 (s->mem_index >> 2) - 1);
4156 break;
4157 case 0x16:
4158 if (ot == OT_LONG) { /* pextrd */
4159 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4160 offsetof(CPUX86State,
4161 xmm_regs[reg].XMM_L(val & 3)));
4162 if (mod == 3)
4163 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4164 else
4165 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4166 (s->mem_index >> 2) - 1);
4167 } else { /* pextrq */
4168 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4169 offsetof(CPUX86State,
4170 xmm_regs[reg].XMM_Q(val & 1)));
4171 if (mod == 3)
4172 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4173 else
4174 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4175 (s->mem_index >> 2) - 1);
4176 }
4177 break;
4178 case 0x17: /* extractps */
4179 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4180 xmm_regs[reg].XMM_L(val & 3)));
4181 if (mod == 3)
4182 gen_op_mov_reg_T0(ot, rm);
4183 else
4184 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4185 (s->mem_index >> 2) - 1);
4186 break;
4187 case 0x20: /* pinsrb */
4188 if (mod == 3)
4189 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4190 else
4191 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4192 (s->mem_index >> 2) - 1);
4193 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4194 xmm_regs[reg].XMM_B(val & 15)));
4195 break;
4196 case 0x21: /* insertps */
4197 if (mod == 3)
4198 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4199 offsetof(CPUX86State,xmm_regs[rm]
4200 .XMM_L((val >> 6) & 3)));
4201 else
4202 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4203 (s->mem_index >> 2) - 1);
4204 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4205 offsetof(CPUX86State,xmm_regs[reg]
4206 .XMM_L((val >> 4) & 3)));
4207 if ((val >> 0) & 1)
4208 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4209 cpu_env, offsetof(CPUX86State,
4210 xmm_regs[reg].XMM_L(0)));
4211 if ((val >> 1) & 1)
4212 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4213 cpu_env, offsetof(CPUX86State,
4214 xmm_regs[reg].XMM_L(1)));
4215 if ((val >> 2) & 1)
4216 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4217 cpu_env, offsetof(CPUX86State,
4218 xmm_regs[reg].XMM_L(2)));
4219 if ((val >> 3) & 1)
4220 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4221 cpu_env, offsetof(CPUX86State,
4222 xmm_regs[reg].XMM_L(3)));
4223 break;
4224 case 0x22:
4225 if (ot == OT_LONG) { /* pinsrd */
4226 if (mod == 3)
4227 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4228 else
4229 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4230 (s->mem_index >> 2) - 1);
4231 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4232 offsetof(CPUX86State,
4233 xmm_regs[reg].XMM_L(val & 3)));
4234 } else { /* pinsrq */
4235 if (mod == 3)
4236 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4237 else
4238 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4239 (s->mem_index >> 2) - 1);
4240 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4241 offsetof(CPUX86State,
4242 xmm_regs[reg].XMM_Q(val & 1)));
4243 }
4244 break;
4245 }
4246 return;
4247 }
4248
4249 if (b1) {
4250 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4251 if (mod == 3) {
4252 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4253 } else {
4254 op2_offset = offsetof(CPUX86State,xmm_t0);
4255 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4256 gen_ldo_env_A0(s->mem_index, op2_offset);
4257 }
4258 } else {
4259 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4260 if (mod == 3) {
4261 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4262 } else {
4263 op2_offset = offsetof(CPUX86State,mmx_t0);
4264 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4265 gen_ldq_env_A0(s->mem_index, op2_offset);
4266 }
4267 }
4268 val = ldub_code(s->pc++);
4269
4270 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4271 s->cc_op = CC_OP_EFLAGS;
4272
4273 if (s->dflag == 2)
4274 /* The helper must use entire 64-bit gp registers */
4275 val |= 1 << 8;
4276 }
4277
4278 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4279 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4280 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4281 break;
4282 default:
4283 goto illegal_op;
4284 }
4285 } else {
4286 /* generic MMX or SSE operation */
4287 switch(b) {
4288 case 0x70: /* pshufx insn */
4289 case 0xc6: /* pshufx insn */
4290 case 0xc2: /* compare insns */
4291 s->rip_offset = 1;
4292 break;
4293 default:
4294 break;
4295 }
4296 if (is_xmm) {
4297 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4298 if (mod != 3) {
4299 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4300 op2_offset = offsetof(CPUX86State,xmm_t0);
4301 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4302 b == 0xc2)) {
4303 /* specific case for SSE single instructions */
4304 if (b1 == 2) {
4305 /* 32 bit access */
4306 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4307 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4308 } else {
4309 /* 64 bit access */
4310 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4311 }
4312 } else {
4313 gen_ldo_env_A0(s->mem_index, op2_offset);
4314 }
4315 } else {
4316 rm = (modrm & 7) | REX_B(s);
4317 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4318 }
4319 } else {
4320 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4321 if (mod != 3) {
4322 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4323 op2_offset = offsetof(CPUX86State,mmx_t0);
4324 gen_ldq_env_A0(s->mem_index, op2_offset);
4325 } else {
4326 rm = (modrm & 7);
4327 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4328 }
4329 }
4330 switch(b) {
4331 case 0x0f: /* 3DNow! data insns */
4332 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4333 goto illegal_op;
4334 val = ldub_code(s->pc++);
4335 sse_op2 = sse_op_table5[val];
4336 if (!sse_op2)
4337 goto illegal_op;
4338 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4339 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4340 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4341 break;
4342 case 0x70: /* pshufx insn */
4343 case 0xc6: /* pshufx insn */
4344 val = ldub_code(s->pc++);
4345 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4346 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4347 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4348 break;
4349 case 0xc2:
4350 /* compare insns */
4351 val = ldub_code(s->pc++);
4352 if (val >= 8)
4353 goto illegal_op;
4354 sse_op2 = sse_op_table4[val][b1];
4355 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4356 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4357 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4358 break;
4359 case 0xf7:
4360 /* maskmov : we must prepare A0 */
4361 if (mod != 3)
4362 goto illegal_op;
4363#ifdef TARGET_X86_64
4364 if (s->aflag == 2) {
4365 gen_op_movq_A0_reg(R_EDI);
4366 } else
4367#endif
4368 {
4369 gen_op_movl_A0_reg(R_EDI);
4370 if (s->aflag == 0)
4371 gen_op_andl_A0_ffff();
4372 }
4373 gen_add_A0_ds_seg(s);
4374
4375 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4376 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4377 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4378 break;
4379 default:
4380 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4381 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4382 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4383 break;
4384 }
4385 if (b == 0x2e || b == 0x2f) {
4386 s->cc_op = CC_OP_EFLAGS;
4387 }
4388 }
4389}
4390
4391#ifdef VBOX
4392/* Checks if it's an invalid lock sequence. Only a few instructions
4393 can be used together with the lock prefix and of those only the
4394 form that write a memory operand. So, this is kind of annoying
4395 work to do...
4396 The AMD manual lists the following instructions.
4397 ADC
4398 ADD
4399 AND
4400 BTC
4401 BTR
4402 BTS
4403 CMPXCHG
4404 CMPXCHG8B
4405 CMPXCHG16B
4406 DEC
4407 INC
4408 NEG
4409 NOT
4410 OR
4411 SBB
4412 SUB
4413 XADD
4414 XCHG
4415 XOR */
4416static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4417{
4418 target_ulong pc = s->pc;
4419 int modrm, mod, op;
4420
4421 /* X={8,16,32,64} Y={16,32,64} */
4422 switch (b)
4423 {
4424 /* /2: ADC reg/memX, immX */
4425 /* /0: ADD reg/memX, immX */
4426 /* /4: AND reg/memX, immX */
4427 /* /1: OR reg/memX, immX */
4428 /* /3: SBB reg/memX, immX */
4429 /* /5: SUB reg/memX, immX */
4430 /* /6: XOR reg/memX, immX */
4431 case 0x80:
4432 case 0x81:
4433 case 0x83:
4434 modrm = ldub_code(pc++);
4435 op = (modrm >> 3) & 7;
4436 if (op == 7) /* /7: CMP */
4437 break;
4438 mod = (modrm >> 6) & 3;
4439 if (mod == 3) /* register destination */
4440 break;
4441 return false;
4442
4443 case 0x10: /* /r: ADC reg/mem8, reg8 */
4444 case 0x11: /* /r: ADC reg/memX, regY */
4445 case 0x00: /* /r: ADD reg/mem8, reg8 */
4446 case 0x01: /* /r: ADD reg/memX, regY */
4447 case 0x20: /* /r: AND reg/mem8, reg8 */
4448 case 0x21: /* /r: AND reg/memY, regY */
4449 case 0x08: /* /r: OR reg/mem8, reg8 */
4450 case 0x09: /* /r: OR reg/memY, regY */
4451 case 0x18: /* /r: SBB reg/mem8, reg8 */
4452 case 0x19: /* /r: SBB reg/memY, regY */
4453 case 0x28: /* /r: SUB reg/mem8, reg8 */
4454 case 0x29: /* /r: SUB reg/memY, regY */
4455 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4456 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4457 case 0x30: /* /r: XOR reg/mem8, reg8 */
4458 case 0x31: /* /r: XOR reg/memY, regY */
4459 modrm = ldub_code(pc++);
4460 mod = (modrm >> 6) & 3;
4461 if (mod == 3) /* register destination */
4462 break;
4463 return false;
4464
4465 /* /1: DEC reg/memX */
4466 /* /0: INC reg/memX */
4467 case 0xfe:
4468 case 0xff:
4469 modrm = ldub_code(pc++);
4470 mod = (modrm >> 6) & 3;
4471 if (mod == 3) /* register destination */
4472 break;
4473 return false;
4474
4475 /* /3: NEG reg/memX */
4476 /* /2: NOT reg/memX */
4477 case 0xf6:
4478 case 0xf7:
4479 modrm = ldub_code(pc++);
4480 mod = (modrm >> 6) & 3;
4481 if (mod == 3) /* register destination */
4482 break;
4483 return false;
4484
4485 case 0x0f:
4486 b = ldub_code(pc++);
4487 switch (b)
4488 {
4489 /* /7: BTC reg/memY, imm8 */
4490 /* /6: BTR reg/memY, imm8 */
4491 /* /5: BTS reg/memY, imm8 */
4492 case 0xba:
4493 modrm = ldub_code(pc++);
4494 op = (modrm >> 3) & 7;
4495 if (op < 5)
4496 break;
4497 mod = (modrm >> 6) & 3;
4498 if (mod == 3) /* register destination */
4499 break;
4500 return false;
4501
4502 case 0xbb: /* /r: BTC reg/memY, regY */
4503 case 0xb3: /* /r: BTR reg/memY, regY */
4504 case 0xab: /* /r: BTS reg/memY, regY */
4505 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4506 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4507 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4508 case 0xc1: /* /r: XADD reg/memY, regY */
4509 modrm = ldub_code(pc++);
4510 mod = (modrm >> 6) & 3;
4511 if (mod == 3) /* register destination */
4512 break;
4513 return false;
4514
4515 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4516 case 0xc7:
4517 modrm = ldub_code(pc++);
4518 op = (modrm >> 3) & 7;
4519 if (op != 1)
4520 break;
4521 return false;
4522 }
4523 break;
4524 }
4525
4526 /* illegal sequence. The s->pc is past the lock prefix and that
4527 is sufficient for the TB, I think. */
4528 Log(("illegal lock sequence %VGv (b=%#x)\n", pc_start, b));
4529 return true;
4530}
4531#endif /* VBOX */
4532
4533
4534/* convert one instruction. s->is_jmp is set if the translation must
4535 be stopped. Return the next pc value */
4536static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4537{
4538 int b, prefixes, aflag, dflag;
4539 int shift, ot;
4540 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4541 target_ulong next_eip, tval;
4542 int rex_w, rex_r;
4543
4544 if (unlikely(loglevel & CPU_LOG_TB_OP))
4545 tcg_gen_debug_insn_start(pc_start);
4546 s->pc = pc_start;
4547 prefixes = 0;
4548 aflag = s->code32;
4549 dflag = s->code32;
4550 s->override = -1;
4551 rex_w = -1;
4552 rex_r = 0;
4553#ifdef TARGET_X86_64
4554 s->rex_x = 0;
4555 s->rex_b = 0;
4556 x86_64_hregs = 0;
4557#endif
4558 s->rip_offset = 0; /* for relative ip address */
4559#ifdef VBOX
4560 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4561 gen_update_eip(pc_start - s->cs_base);
4562#endif
4563 next_byte:
4564 b = ldub_code(s->pc);
4565 s->pc++;
4566 /* check prefixes */
4567#ifdef TARGET_X86_64
4568 if (CODE64(s)) {
4569 switch (b) {
4570 case 0xf3:
4571 prefixes |= PREFIX_REPZ;
4572 goto next_byte;
4573 case 0xf2:
4574 prefixes |= PREFIX_REPNZ;
4575 goto next_byte;
4576 case 0xf0:
4577 prefixes |= PREFIX_LOCK;
4578 goto next_byte;
4579 case 0x2e:
4580 s->override = R_CS;
4581 goto next_byte;
4582 case 0x36:
4583 s->override = R_SS;
4584 goto next_byte;
4585 case 0x3e:
4586 s->override = R_DS;
4587 goto next_byte;
4588 case 0x26:
4589 s->override = R_ES;
4590 goto next_byte;
4591 case 0x64:
4592 s->override = R_FS;
4593 goto next_byte;
4594 case 0x65:
4595 s->override = R_GS;
4596 goto next_byte;
4597 case 0x66:
4598 prefixes |= PREFIX_DATA;
4599 goto next_byte;
4600 case 0x67:
4601 prefixes |= PREFIX_ADR;
4602 goto next_byte;
4603 case 0x40 ... 0x4f:
4604 /* REX prefix */
4605 rex_w = (b >> 3) & 1;
4606 rex_r = (b & 0x4) << 1;
4607 s->rex_x = (b & 0x2) << 2;
4608 REX_B(s) = (b & 0x1) << 3;
4609 x86_64_hregs = 1; /* select uniform byte register addressing */
4610 goto next_byte;
4611 }
4612 if (rex_w == 1) {
4613 /* 0x66 is ignored if rex.w is set */
4614 dflag = 2;
4615 } else {
4616 if (prefixes & PREFIX_DATA)
4617 dflag ^= 1;
4618 }
4619 if (!(prefixes & PREFIX_ADR))
4620 aflag = 2;
4621 } else
4622#endif
4623 {
4624 switch (b) {
4625 case 0xf3:
4626 prefixes |= PREFIX_REPZ;
4627 goto next_byte;
4628 case 0xf2:
4629 prefixes |= PREFIX_REPNZ;
4630 goto next_byte;
4631 case 0xf0:
4632 prefixes |= PREFIX_LOCK;
4633 goto next_byte;
4634 case 0x2e:
4635 s->override = R_CS;
4636 goto next_byte;
4637 case 0x36:
4638 s->override = R_SS;
4639 goto next_byte;
4640 case 0x3e:
4641 s->override = R_DS;
4642 goto next_byte;
4643 case 0x26:
4644 s->override = R_ES;
4645 goto next_byte;
4646 case 0x64:
4647 s->override = R_FS;
4648 goto next_byte;
4649 case 0x65:
4650 s->override = R_GS;
4651 goto next_byte;
4652 case 0x66:
4653 prefixes |= PREFIX_DATA;
4654 goto next_byte;
4655 case 0x67:
4656 prefixes |= PREFIX_ADR;
4657 goto next_byte;
4658 }
4659 if (prefixes & PREFIX_DATA)
4660 dflag ^= 1;
4661 if (prefixes & PREFIX_ADR)
4662 aflag ^= 1;
4663 }
4664
4665 s->prefix = prefixes;
4666 s->aflag = aflag;
4667 s->dflag = dflag;
4668
4669 /* lock generation */
4670#ifndef VBOX
4671 if (prefixes & PREFIX_LOCK)
4672 tcg_gen_helper_0_0(helper_lock);
4673#else /* VBOX */
4674 if (prefixes & PREFIX_LOCK) {
4675 if (is_invalid_lock_sequence(s, pc_start, b)) {
4676 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4677 return s->pc;
4678 }
4679 tcg_gen_helper_0_0(helper_lock);
4680 }
4681#endif /* VBOX */
4682
4683 /* now check op code */
4684 reswitch:
4685 switch(b) {
4686 case 0x0f:
4687 /**************************/
4688 /* extended op code */
4689 b = ldub_code(s->pc++) | 0x100;
4690 goto reswitch;
4691
4692 /**************************/
4693 /* arith & logic */
4694 case 0x00 ... 0x05:
4695 case 0x08 ... 0x0d:
4696 case 0x10 ... 0x15:
4697 case 0x18 ... 0x1d:
4698 case 0x20 ... 0x25:
4699 case 0x28 ... 0x2d:
4700 case 0x30 ... 0x35:
4701 case 0x38 ... 0x3d:
4702 {
4703 int op, f, val;
4704 op = (b >> 3) & 7;
4705 f = (b >> 1) & 3;
4706
4707 if ((b & 1) == 0)
4708 ot = OT_BYTE;
4709 else
4710 ot = dflag + OT_WORD;
4711
4712 switch(f) {
4713 case 0: /* OP Ev, Gv */
4714 modrm = ldub_code(s->pc++);
4715 reg = ((modrm >> 3) & 7) | rex_r;
4716 mod = (modrm >> 6) & 3;
4717 rm = (modrm & 7) | REX_B(s);
4718 if (mod != 3) {
4719 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4720 opreg = OR_TMP0;
4721 } else if (op == OP_XORL && rm == reg) {
4722 xor_zero:
4723 /* xor reg, reg optimisation */
4724 gen_op_movl_T0_0();
4725 s->cc_op = CC_OP_LOGICB + ot;
4726 gen_op_mov_reg_T0(ot, reg);
4727 gen_op_update1_cc();
4728 break;
4729 } else {
4730 opreg = rm;
4731 }
4732 gen_op_mov_TN_reg(ot, 1, reg);
4733 gen_op(s, op, ot, opreg);
4734 break;
4735 case 1: /* OP Gv, Ev */
4736 modrm = ldub_code(s->pc++);
4737 mod = (modrm >> 6) & 3;
4738 reg = ((modrm >> 3) & 7) | rex_r;
4739 rm = (modrm & 7) | REX_B(s);
4740 if (mod != 3) {
4741 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4742 gen_op_ld_T1_A0(ot + s->mem_index);
4743 } else if (op == OP_XORL && rm == reg) {
4744 goto xor_zero;
4745 } else {
4746 gen_op_mov_TN_reg(ot, 1, rm);
4747 }
4748 gen_op(s, op, ot, reg);
4749 break;
4750 case 2: /* OP A, Iv */
4751 val = insn_get(s, ot);
4752 gen_op_movl_T1_im(val);
4753 gen_op(s, op, ot, OR_EAX);
4754 break;
4755 }
4756 }
4757 break;
4758
4759 case 0x82:
4760 if (CODE64(s))
4761 goto illegal_op;
4762 case 0x80: /* GRP1 */
4763 case 0x81:
4764 case 0x83:
4765 {
4766 int val;
4767
4768 if ((b & 1) == 0)
4769 ot = OT_BYTE;
4770 else
4771 ot = dflag + OT_WORD;
4772
4773 modrm = ldub_code(s->pc++);
4774 mod = (modrm >> 6) & 3;
4775 rm = (modrm & 7) | REX_B(s);
4776 op = (modrm >> 3) & 7;
4777
4778 if (mod != 3) {
4779 if (b == 0x83)
4780 s->rip_offset = 1;
4781 else
4782 s->rip_offset = insn_const_size(ot);
4783 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4784 opreg = OR_TMP0;
4785 } else {
4786 opreg = rm;
4787 }
4788
4789 switch(b) {
4790 default:
4791 case 0x80:
4792 case 0x81:
4793 case 0x82:
4794 val = insn_get(s, ot);
4795 break;
4796 case 0x83:
4797 val = (int8_t)insn_get(s, OT_BYTE);
4798 break;
4799 }
4800 gen_op_movl_T1_im(val);
4801 gen_op(s, op, ot, opreg);
4802 }
4803 break;
4804
4805 /**************************/
4806 /* inc, dec, and other misc arith */
4807 case 0x40 ... 0x47: /* inc Gv */
4808 ot = dflag ? OT_LONG : OT_WORD;
4809 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4810 break;
4811 case 0x48 ... 0x4f: /* dec Gv */
4812 ot = dflag ? OT_LONG : OT_WORD;
4813 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4814 break;
4815 case 0xf6: /* GRP3 */
4816 case 0xf7:
4817 if ((b & 1) == 0)
4818 ot = OT_BYTE;
4819 else
4820 ot = dflag + OT_WORD;
4821
4822 modrm = ldub_code(s->pc++);
4823 mod = (modrm >> 6) & 3;
4824 rm = (modrm & 7) | REX_B(s);
4825 op = (modrm >> 3) & 7;
4826 if (mod != 3) {
4827 if (op == 0)
4828 s->rip_offset = insn_const_size(ot);
4829 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4830 gen_op_ld_T0_A0(ot + s->mem_index);
4831 } else {
4832 gen_op_mov_TN_reg(ot, 0, rm);
4833 }
4834
4835 switch(op) {
4836 case 0: /* test */
4837 val = insn_get(s, ot);
4838 gen_op_movl_T1_im(val);
4839 gen_op_testl_T0_T1_cc();
4840 s->cc_op = CC_OP_LOGICB + ot;
4841 break;
4842 case 2: /* not */
4843 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4844 if (mod != 3) {
4845 gen_op_st_T0_A0(ot + s->mem_index);
4846 } else {
4847 gen_op_mov_reg_T0(ot, rm);
4848 }
4849 break;
4850 case 3: /* neg */
4851 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4852 if (mod != 3) {
4853 gen_op_st_T0_A0(ot + s->mem_index);
4854 } else {
4855 gen_op_mov_reg_T0(ot, rm);
4856 }
4857 gen_op_update_neg_cc();
4858 s->cc_op = CC_OP_SUBB + ot;
4859 break;
4860 case 4: /* mul */
4861 switch(ot) {
4862 case OT_BYTE:
4863 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4864 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4865 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4866 /* XXX: use 32 bit mul which could be faster */
4867 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4868 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4869 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4870 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4871 s->cc_op = CC_OP_MULB;
4872 break;
4873 case OT_WORD:
4874 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4875 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4876 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4877 /* XXX: use 32 bit mul which could be faster */
4878 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4879 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4880 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4881 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4882 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4883 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4884 s->cc_op = CC_OP_MULW;
4885 break;
4886 default:
4887 case OT_LONG:
4888#ifdef TARGET_X86_64
4889 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4890 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4891 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4892 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4893 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4894 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4895 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4896 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4897 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4898#else
4899 {
4900 TCGv t0, t1;
4901 t0 = tcg_temp_new(TCG_TYPE_I64);
4902 t1 = tcg_temp_new(TCG_TYPE_I64);
4903 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4904 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4905 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4906 tcg_gen_mul_i64(t0, t0, t1);
4907 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4908 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4909 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4910 tcg_gen_shri_i64(t0, t0, 32);
4911 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4912 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4913 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4914 }
4915#endif
4916 s->cc_op = CC_OP_MULL;
4917 break;
4918#ifdef TARGET_X86_64
4919 case OT_QUAD:
4920 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
4921 s->cc_op = CC_OP_MULQ;
4922 break;
4923#endif
4924 }
4925 break;
4926 case 5: /* imul */
4927 switch(ot) {
4928 case OT_BYTE:
4929 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4930 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4931 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4932 /* XXX: use 32 bit mul which could be faster */
4933 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4934 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4935 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4936 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4937 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4938 s->cc_op = CC_OP_MULB;
4939 break;
4940 case OT_WORD:
4941 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4942 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4943 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4944 /* XXX: use 32 bit mul which could be faster */
4945 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4946 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4947 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4948 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4949 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4950 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4951 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4952 s->cc_op = CC_OP_MULW;
4953 break;
4954 default:
4955 case OT_LONG:
4956#ifdef TARGET_X86_64
4957 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4958 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4959 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4960 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4961 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4962 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4963 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4964 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4965 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4966 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4967#else
4968 {
4969 TCGv t0, t1;
4970 t0 = tcg_temp_new(TCG_TYPE_I64);
4971 t1 = tcg_temp_new(TCG_TYPE_I64);
4972 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4973 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4974 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4975 tcg_gen_mul_i64(t0, t0, t1);
4976 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4977 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4978 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4979 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4980 tcg_gen_shri_i64(t0, t0, 32);
4981 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4982 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4983 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4984 }
4985#endif
4986 s->cc_op = CC_OP_MULL;
4987 break;
4988#ifdef TARGET_X86_64
4989 case OT_QUAD:
4990 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
4991 s->cc_op = CC_OP_MULQ;
4992 break;
4993#endif
4994 }
4995 break;
4996 case 6: /* div */
4997 switch(ot) {
4998 case OT_BYTE:
4999 gen_jmp_im(pc_start - s->cs_base);
5000 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5001 break;
5002 case OT_WORD:
5003 gen_jmp_im(pc_start - s->cs_base);
5004 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5005 break;
5006 default:
5007 case OT_LONG:
5008 gen_jmp_im(pc_start - s->cs_base);
5009 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5010 break;
5011#ifdef TARGET_X86_64
5012 case OT_QUAD:
5013 gen_jmp_im(pc_start - s->cs_base);
5014 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5015 break;
5016#endif
5017 }
5018 break;
5019 case 7: /* idiv */
5020 switch(ot) {
5021 case OT_BYTE:
5022 gen_jmp_im(pc_start - s->cs_base);
5023 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5024 break;
5025 case OT_WORD:
5026 gen_jmp_im(pc_start - s->cs_base);
5027 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5028 break;
5029 default:
5030 case OT_LONG:
5031 gen_jmp_im(pc_start - s->cs_base);
5032 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5033 break;
5034#ifdef TARGET_X86_64
5035 case OT_QUAD:
5036 gen_jmp_im(pc_start - s->cs_base);
5037 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5038 break;
5039#endif
5040 }
5041 break;
5042 default:
5043 goto illegal_op;
5044 }
5045 break;
5046
5047 case 0xfe: /* GRP4 */
5048 case 0xff: /* GRP5 */
5049 if ((b & 1) == 0)
5050 ot = OT_BYTE;
5051 else
5052 ot = dflag + OT_WORD;
5053
5054 modrm = ldub_code(s->pc++);
5055 mod = (modrm >> 6) & 3;
5056 rm = (modrm & 7) | REX_B(s);
5057 op = (modrm >> 3) & 7;
5058 if (op >= 2 && b == 0xfe) {
5059 goto illegal_op;
5060 }
5061 if (CODE64(s)) {
5062 if (op == 2 || op == 4) {
5063 /* operand size for jumps is 64 bit */
5064 ot = OT_QUAD;
5065 } else if (op == 3 || op == 5) {
5066 /* for call calls, the operand is 16 or 32 bit, even
5067 in long mode */
5068 ot = dflag ? OT_LONG : OT_WORD;
5069 } else if (op == 6) {
5070 /* default push size is 64 bit */
5071 ot = dflag ? OT_QUAD : OT_WORD;
5072 }
5073 }
5074 if (mod != 3) {
5075 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5076 if (op >= 2 && op != 3 && op != 5)
5077 gen_op_ld_T0_A0(ot + s->mem_index);
5078 } else {
5079 gen_op_mov_TN_reg(ot, 0, rm);
5080 }
5081
5082 switch(op) {
5083 case 0: /* inc Ev */
5084 if (mod != 3)
5085 opreg = OR_TMP0;
5086 else
5087 opreg = rm;
5088 gen_inc(s, ot, opreg, 1);
5089 break;
5090 case 1: /* dec Ev */
5091 if (mod != 3)
5092 opreg = OR_TMP0;
5093 else
5094 opreg = rm;
5095 gen_inc(s, ot, opreg, -1);
5096 break;
5097 case 2: /* call Ev */
5098 /* XXX: optimize if memory (no 'and' is necessary) */
5099#ifdef VBOX_WITH_CALL_RECORD
5100 if (s->record_call)
5101 gen_op_record_call();
5102#endif
5103 if (s->dflag == 0)
5104 gen_op_andl_T0_ffff();
5105 next_eip = s->pc - s->cs_base;
5106 gen_movtl_T1_im(next_eip);
5107 gen_push_T1(s);
5108 gen_op_jmp_T0();
5109 gen_eob(s);
5110 break;
5111 case 3: /* lcall Ev */
5112 gen_op_ld_T1_A0(ot + s->mem_index);
5113 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5114 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5115 do_lcall:
5116 if (s->pe && !s->vm86) {
5117 if (s->cc_op != CC_OP_DYNAMIC)
5118 gen_op_set_cc_op(s->cc_op);
5119 gen_jmp_im(pc_start - s->cs_base);
5120 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5121 tcg_gen_helper_0_4(helper_lcall_protected,
5122 cpu_tmp2_i32, cpu_T[1],
5123 tcg_const_i32(dflag),
5124 tcg_const_i32(s->pc - pc_start));
5125 } else {
5126 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5127 tcg_gen_helper_0_4(helper_lcall_real,
5128 cpu_tmp2_i32, cpu_T[1],
5129 tcg_const_i32(dflag),
5130 tcg_const_i32(s->pc - s->cs_base));
5131 }
5132 gen_eob(s);
5133 break;
5134 case 4: /* jmp Ev */
5135 if (s->dflag == 0)
5136 gen_op_andl_T0_ffff();
5137 gen_op_jmp_T0();
5138 gen_eob(s);
5139 break;
5140 case 5: /* ljmp Ev */
5141 gen_op_ld_T1_A0(ot + s->mem_index);
5142 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5143 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5144 do_ljmp:
5145 if (s->pe && !s->vm86) {
5146 if (s->cc_op != CC_OP_DYNAMIC)
5147 gen_op_set_cc_op(s->cc_op);
5148 gen_jmp_im(pc_start - s->cs_base);
5149 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5150 tcg_gen_helper_0_3(helper_ljmp_protected,
5151 cpu_tmp2_i32,
5152 cpu_T[1],
5153 tcg_const_i32(s->pc - pc_start));
5154 } else {
5155 gen_op_movl_seg_T0_vm(R_CS);
5156 gen_op_movl_T0_T1();
5157 gen_op_jmp_T0();
5158 }
5159 gen_eob(s);
5160 break;
5161 case 6: /* push Ev */
5162 gen_push_T0(s);
5163 break;
5164 default:
5165 goto illegal_op;
5166 }
5167 break;
5168
5169 case 0x84: /* test Ev, Gv */
5170 case 0x85:
5171 if ((b & 1) == 0)
5172 ot = OT_BYTE;
5173 else
5174 ot = dflag + OT_WORD;
5175
5176 modrm = ldub_code(s->pc++);
5177 mod = (modrm >> 6) & 3;
5178 rm = (modrm & 7) | REX_B(s);
5179 reg = ((modrm >> 3) & 7) | rex_r;
5180
5181 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5182 gen_op_mov_TN_reg(ot, 1, reg);
5183 gen_op_testl_T0_T1_cc();
5184 s->cc_op = CC_OP_LOGICB + ot;
5185 break;
5186
5187 case 0xa8: /* test eAX, Iv */
5188 case 0xa9:
5189 if ((b & 1) == 0)
5190 ot = OT_BYTE;
5191 else
5192 ot = dflag + OT_WORD;
5193 val = insn_get(s, ot);
5194
5195 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5196 gen_op_movl_T1_im(val);
5197 gen_op_testl_T0_T1_cc();
5198 s->cc_op = CC_OP_LOGICB + ot;
5199 break;
5200
5201 case 0x98: /* CWDE/CBW */
5202#ifdef TARGET_X86_64
5203 if (dflag == 2) {
5204 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5205 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5206 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5207 } else
5208#endif
5209 if (dflag == 1) {
5210 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5211 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5212 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5213 } else {
5214 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5215 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5216 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5217 }
5218 break;
5219 case 0x99: /* CDQ/CWD */
5220#ifdef TARGET_X86_64
5221 if (dflag == 2) {
5222 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5223 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5224 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5225 } else
5226#endif
5227 if (dflag == 1) {
5228 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5229 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5230 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5231 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5232 } else {
5233 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5234 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5235 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5236 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5237 }
5238 break;
5239 case 0x1af: /* imul Gv, Ev */
5240 case 0x69: /* imul Gv, Ev, I */
5241 case 0x6b:
5242 ot = dflag + OT_WORD;
5243 modrm = ldub_code(s->pc++);
5244 reg = ((modrm >> 3) & 7) | rex_r;
5245 if (b == 0x69)
5246 s->rip_offset = insn_const_size(ot);
5247 else if (b == 0x6b)
5248 s->rip_offset = 1;
5249 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5250 if (b == 0x69) {
5251 val = insn_get(s, ot);
5252 gen_op_movl_T1_im(val);
5253 } else if (b == 0x6b) {
5254 val = (int8_t)insn_get(s, OT_BYTE);
5255 gen_op_movl_T1_im(val);
5256 } else {
5257 gen_op_mov_TN_reg(ot, 1, reg);
5258 }
5259
5260#ifdef TARGET_X86_64
5261 if (ot == OT_QUAD) {
5262 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5263 } else
5264#endif
5265 if (ot == OT_LONG) {
5266#ifdef TARGET_X86_64
5267 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5268 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5269 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5270 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5271 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5272 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5273#else
5274 {
5275 TCGv t0, t1;
5276 t0 = tcg_temp_new(TCG_TYPE_I64);
5277 t1 = tcg_temp_new(TCG_TYPE_I64);
5278 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5279 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5280 tcg_gen_mul_i64(t0, t0, t1);
5281 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5282 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5283 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5284 tcg_gen_shri_i64(t0, t0, 32);
5285 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5286 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5287 }
5288#endif
5289 } else {
5290 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5291 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5292 /* XXX: use 32 bit mul which could be faster */
5293 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5294 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5295 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5296 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5297 }
5298 gen_op_mov_reg_T0(ot, reg);
5299 s->cc_op = CC_OP_MULB + ot;
5300 break;
5301 case 0x1c0:
5302 case 0x1c1: /* xadd Ev, Gv */
5303 if ((b & 1) == 0)
5304 ot = OT_BYTE;
5305 else
5306 ot = dflag + OT_WORD;
5307 modrm = ldub_code(s->pc++);
5308 reg = ((modrm >> 3) & 7) | rex_r;
5309 mod = (modrm >> 6) & 3;
5310 if (mod == 3) {
5311 rm = (modrm & 7) | REX_B(s);
5312 gen_op_mov_TN_reg(ot, 0, reg);
5313 gen_op_mov_TN_reg(ot, 1, rm);
5314 gen_op_addl_T0_T1();
5315 gen_op_mov_reg_T1(ot, reg);
5316 gen_op_mov_reg_T0(ot, rm);
5317 } else {
5318 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5319 gen_op_mov_TN_reg(ot, 0, reg);
5320 gen_op_ld_T1_A0(ot + s->mem_index);
5321 gen_op_addl_T0_T1();
5322 gen_op_st_T0_A0(ot + s->mem_index);
5323 gen_op_mov_reg_T1(ot, reg);
5324 }
5325 gen_op_update2_cc();
5326 s->cc_op = CC_OP_ADDB + ot;
5327 break;
5328 case 0x1b0:
5329 case 0x1b1: /* cmpxchg Ev, Gv */
5330 {
5331 int label1, label2;
5332 TCGv t0, t1, t2, a0;
5333
5334 if ((b & 1) == 0)
5335 ot = OT_BYTE;
5336 else
5337 ot = dflag + OT_WORD;
5338 modrm = ldub_code(s->pc++);
5339 reg = ((modrm >> 3) & 7) | rex_r;
5340 mod = (modrm >> 6) & 3;
5341 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5342 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5343 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5344 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5345 gen_op_mov_v_reg(ot, t1, reg);
5346 if (mod == 3) {
5347 rm = (modrm & 7) | REX_B(s);
5348 gen_op_mov_v_reg(ot, t0, rm);
5349 } else {
5350 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5351 tcg_gen_mov_tl(a0, cpu_A0);
5352 gen_op_ld_v(ot + s->mem_index, t0, a0);
5353 rm = 0; /* avoid warning */
5354 }
5355 label1 = gen_new_label();
5356 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5357 tcg_gen_sub_tl(t2, t2, t0);
5358 gen_extu(ot, t2);
5359 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5360 if (mod == 3) {
5361 label2 = gen_new_label();
5362 gen_op_mov_reg_v(ot, R_EAX, t0);
5363 tcg_gen_br(label2);
5364 gen_set_label(label1);
5365 gen_op_mov_reg_v(ot, rm, t1);
5366 gen_set_label(label2);
5367 } else {
5368 tcg_gen_mov_tl(t1, t0);
5369 gen_op_mov_reg_v(ot, R_EAX, t0);
5370 gen_set_label(label1);
5371 /* always store */
5372 gen_op_st_v(ot + s->mem_index, t1, a0);
5373 }
5374 tcg_gen_mov_tl(cpu_cc_src, t0);
5375 tcg_gen_mov_tl(cpu_cc_dst, t2);
5376 s->cc_op = CC_OP_SUBB + ot;
5377 tcg_temp_free(t0);
5378 tcg_temp_free(t1);
5379 tcg_temp_free(t2);
5380 tcg_temp_free(a0);
5381 }
5382 break;
5383 case 0x1c7: /* cmpxchg8b */
5384 modrm = ldub_code(s->pc++);
5385 mod = (modrm >> 6) & 3;
5386 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5387 goto illegal_op;
5388#ifdef TARGET_X86_64
5389 if (dflag == 2) {
5390 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5391 goto illegal_op;
5392 gen_jmp_im(pc_start - s->cs_base);
5393 if (s->cc_op != CC_OP_DYNAMIC)
5394 gen_op_set_cc_op(s->cc_op);
5395 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5396 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5397 } else
5398#endif
5399 {
5400 if (!(s->cpuid_features & CPUID_CX8))
5401 goto illegal_op;
5402 gen_jmp_im(pc_start - s->cs_base);
5403 if (s->cc_op != CC_OP_DYNAMIC)
5404 gen_op_set_cc_op(s->cc_op);
5405 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5406 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5407 }
5408 s->cc_op = CC_OP_EFLAGS;
5409 break;
5410
5411 /**************************/
5412 /* push/pop */
5413 case 0x50 ... 0x57: /* push */
5414 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5415 gen_push_T0(s);
5416 break;
5417 case 0x58 ... 0x5f: /* pop */
5418 if (CODE64(s)) {
5419 ot = dflag ? OT_QUAD : OT_WORD;
5420 } else {
5421 ot = dflag + OT_WORD;
5422 }
5423 gen_pop_T0(s);
5424 /* NOTE: order is important for pop %sp */
5425 gen_pop_update(s);
5426 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5427 break;
5428 case 0x60: /* pusha */
5429 if (CODE64(s))
5430 goto illegal_op;
5431 gen_pusha(s);
5432 break;
5433 case 0x61: /* popa */
5434 if (CODE64(s))
5435 goto illegal_op;
5436 gen_popa(s);
5437 break;
5438 case 0x68: /* push Iv */
5439 case 0x6a:
5440 if (CODE64(s)) {
5441 ot = dflag ? OT_QUAD : OT_WORD;
5442 } else {
5443 ot = dflag + OT_WORD;
5444 }
5445 if (b == 0x68)
5446 val = insn_get(s, ot);
5447 else
5448 val = (int8_t)insn_get(s, OT_BYTE);
5449 gen_op_movl_T0_im(val);
5450 gen_push_T0(s);
5451 break;
5452 case 0x8f: /* pop Ev */
5453 if (CODE64(s)) {
5454 ot = dflag ? OT_QUAD : OT_WORD;
5455 } else {
5456 ot = dflag + OT_WORD;
5457 }
5458 modrm = ldub_code(s->pc++);
5459 mod = (modrm >> 6) & 3;
5460 gen_pop_T0(s);
5461 if (mod == 3) {
5462 /* NOTE: order is important for pop %sp */
5463 gen_pop_update(s);
5464 rm = (modrm & 7) | REX_B(s);
5465 gen_op_mov_reg_T0(ot, rm);
5466 } else {
5467 /* NOTE: order is important too for MMU exceptions */
5468 s->popl_esp_hack = 1 << ot;
5469 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5470 s->popl_esp_hack = 0;
5471 gen_pop_update(s);
5472 }
5473 break;
5474 case 0xc8: /* enter */
5475 {
5476 int level;
5477 val = lduw_code(s->pc);
5478 s->pc += 2;
5479 level = ldub_code(s->pc++);
5480 gen_enter(s, val, level);
5481 }
5482 break;
5483 case 0xc9: /* leave */
5484 /* XXX: exception not precise (ESP is updated before potential exception) */
5485 if (CODE64(s)) {
5486 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5487 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5488 } else if (s->ss32) {
5489 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5490 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5491 } else {
5492 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5493 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5494 }
5495 gen_pop_T0(s);
5496 if (CODE64(s)) {
5497 ot = dflag ? OT_QUAD : OT_WORD;
5498 } else {
5499 ot = dflag + OT_WORD;
5500 }
5501 gen_op_mov_reg_T0(ot, R_EBP);
5502 gen_pop_update(s);
5503 break;
5504 case 0x06: /* push es */
5505 case 0x0e: /* push cs */
5506 case 0x16: /* push ss */
5507 case 0x1e: /* push ds */
5508 if (CODE64(s))
5509 goto illegal_op;
5510 gen_op_movl_T0_seg(b >> 3);
5511 gen_push_T0(s);
5512 break;
5513 case 0x1a0: /* push fs */
5514 case 0x1a8: /* push gs */
5515 gen_op_movl_T0_seg((b >> 3) & 7);
5516 gen_push_T0(s);
5517 break;
5518 case 0x07: /* pop es */
5519 case 0x17: /* pop ss */
5520 case 0x1f: /* pop ds */
5521 if (CODE64(s))
5522 goto illegal_op;
5523 reg = b >> 3;
5524 gen_pop_T0(s);
5525 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5526 gen_pop_update(s);
5527 if (reg == R_SS) {
5528 /* if reg == SS, inhibit interrupts/trace. */
5529 /* If several instructions disable interrupts, only the
5530 _first_ does it */
5531 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5532 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5533 s->tf = 0;
5534 }
5535 if (s->is_jmp) {
5536 gen_jmp_im(s->pc - s->cs_base);
5537 gen_eob(s);
5538 }
5539 break;
5540 case 0x1a1: /* pop fs */
5541 case 0x1a9: /* pop gs */
5542 gen_pop_T0(s);
5543 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5544 gen_pop_update(s);
5545 if (s->is_jmp) {
5546 gen_jmp_im(s->pc - s->cs_base);
5547 gen_eob(s);
5548 }
5549 break;
5550
5551 /**************************/
5552 /* mov */
5553 case 0x88:
5554 case 0x89: /* mov Gv, Ev */
5555 if ((b & 1) == 0)
5556 ot = OT_BYTE;
5557 else
5558 ot = dflag + OT_WORD;
5559 modrm = ldub_code(s->pc++);
5560 reg = ((modrm >> 3) & 7) | rex_r;
5561
5562 /* generate a generic store */
5563 gen_ldst_modrm(s, modrm, ot, reg, 1);
5564 break;
5565 case 0xc6:
5566 case 0xc7: /* mov Ev, Iv */
5567 if ((b & 1) == 0)
5568 ot = OT_BYTE;
5569 else
5570 ot = dflag + OT_WORD;
5571 modrm = ldub_code(s->pc++);
5572 mod = (modrm >> 6) & 3;
5573 if (mod != 3) {
5574 s->rip_offset = insn_const_size(ot);
5575 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5576 }
5577 val = insn_get(s, ot);
5578 gen_op_movl_T0_im(val);
5579 if (mod != 3)
5580 gen_op_st_T0_A0(ot + s->mem_index);
5581 else
5582 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5583 break;
5584 case 0x8a:
5585 case 0x8b: /* mov Ev, Gv */
5586#ifdef VBOX /* dtrace hot fix */
5587 if (prefixes & PREFIX_LOCK)
5588 goto illegal_op;
5589#endif
5590 if ((b & 1) == 0)
5591 ot = OT_BYTE;
5592 else
5593 ot = OT_WORD + dflag;
5594 modrm = ldub_code(s->pc++);
5595 reg = ((modrm >> 3) & 7) | rex_r;
5596
5597 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5598 gen_op_mov_reg_T0(ot, reg);
5599 break;
5600 case 0x8e: /* mov seg, Gv */
5601 modrm = ldub_code(s->pc++);
5602 reg = (modrm >> 3) & 7;
5603 if (reg >= 6 || reg == R_CS)
5604 goto illegal_op;
5605 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5606 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5607 if (reg == R_SS) {
5608 /* if reg == SS, inhibit interrupts/trace */
5609 /* If several instructions disable interrupts, only the
5610 _first_ does it */
5611 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5612 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5613 s->tf = 0;
5614 }
5615 if (s->is_jmp) {
5616 gen_jmp_im(s->pc - s->cs_base);
5617 gen_eob(s);
5618 }
5619 break;
5620 case 0x8c: /* mov Gv, seg */
5621 modrm = ldub_code(s->pc++);
5622 reg = (modrm >> 3) & 7;
5623 mod = (modrm >> 6) & 3;
5624 if (reg >= 6)
5625 goto illegal_op;
5626 gen_op_movl_T0_seg(reg);
5627 if (mod == 3)
5628 ot = OT_WORD + dflag;
5629 else
5630 ot = OT_WORD;
5631 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5632 break;
5633
5634 case 0x1b6: /* movzbS Gv, Eb */
5635 case 0x1b7: /* movzwS Gv, Eb */
5636 case 0x1be: /* movsbS Gv, Eb */
5637 case 0x1bf: /* movswS Gv, Eb */
5638 {
5639 int d_ot;
5640 /* d_ot is the size of destination */
5641 d_ot = dflag + OT_WORD;
5642 /* ot is the size of source */
5643 ot = (b & 1) + OT_BYTE;
5644 modrm = ldub_code(s->pc++);
5645 reg = ((modrm >> 3) & 7) | rex_r;
5646 mod = (modrm >> 6) & 3;
5647 rm = (modrm & 7) | REX_B(s);
5648
5649 if (mod == 3) {
5650 gen_op_mov_TN_reg(ot, 0, rm);
5651 switch(ot | (b & 8)) {
5652 case OT_BYTE:
5653 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5654 break;
5655 case OT_BYTE | 8:
5656 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5657 break;
5658 case OT_WORD:
5659 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5660 break;
5661 default:
5662 case OT_WORD | 8:
5663 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5664 break;
5665 }
5666 gen_op_mov_reg_T0(d_ot, reg);
5667 } else {
5668 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5669 if (b & 8) {
5670 gen_op_lds_T0_A0(ot + s->mem_index);
5671 } else {
5672 gen_op_ldu_T0_A0(ot + s->mem_index);
5673 }
5674 gen_op_mov_reg_T0(d_ot, reg);
5675 }
5676 }
5677 break;
5678
5679 case 0x8d: /* lea */
5680 ot = dflag + OT_WORD;
5681 modrm = ldub_code(s->pc++);
5682 mod = (modrm >> 6) & 3;
5683 if (mod == 3)
5684 goto illegal_op;
5685 reg = ((modrm >> 3) & 7) | rex_r;
5686 /* we must ensure that no segment is added */
5687 s->override = -1;
5688 val = s->addseg;
5689 s->addseg = 0;
5690 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5691 s->addseg = val;
5692 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5693 break;
5694
5695 case 0xa0: /* mov EAX, Ov */
5696 case 0xa1:
5697 case 0xa2: /* mov Ov, EAX */
5698 case 0xa3:
5699 {
5700 target_ulong offset_addr;
5701
5702 if ((b & 1) == 0)
5703 ot = OT_BYTE;
5704 else
5705 ot = dflag + OT_WORD;
5706#ifdef TARGET_X86_64
5707 if (s->aflag == 2) {
5708 offset_addr = ldq_code(s->pc);
5709 s->pc += 8;
5710 gen_op_movq_A0_im(offset_addr);
5711 } else
5712#endif
5713 {
5714 if (s->aflag) {
5715 offset_addr = insn_get(s, OT_LONG);
5716 } else {
5717 offset_addr = insn_get(s, OT_WORD);
5718 }
5719 gen_op_movl_A0_im(offset_addr);
5720 }
5721 gen_add_A0_ds_seg(s);
5722 if ((b & 2) == 0) {
5723 gen_op_ld_T0_A0(ot + s->mem_index);
5724 gen_op_mov_reg_T0(ot, R_EAX);
5725 } else {
5726 gen_op_mov_TN_reg(ot, 0, R_EAX);
5727 gen_op_st_T0_A0(ot + s->mem_index);
5728 }
5729 }
5730 break;
5731 case 0xd7: /* xlat */
5732#ifdef TARGET_X86_64
5733 if (s->aflag == 2) {
5734 gen_op_movq_A0_reg(R_EBX);
5735 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5736 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5737 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5738 } else
5739#endif
5740 {
5741 gen_op_movl_A0_reg(R_EBX);
5742 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5743 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5744 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5745 if (s->aflag == 0)
5746 gen_op_andl_A0_ffff();
5747 else
5748 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5749 }
5750 gen_add_A0_ds_seg(s);
5751 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5752 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5753 break;
5754 case 0xb0 ... 0xb7: /* mov R, Ib */
5755 val = insn_get(s, OT_BYTE);
5756 gen_op_movl_T0_im(val);
5757 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5758 break;
5759 case 0xb8 ... 0xbf: /* mov R, Iv */
5760#ifdef TARGET_X86_64
5761 if (dflag == 2) {
5762 uint64_t tmp;
5763 /* 64 bit case */
5764 tmp = ldq_code(s->pc);
5765 s->pc += 8;
5766 reg = (b & 7) | REX_B(s);
5767 gen_movtl_T0_im(tmp);
5768 gen_op_mov_reg_T0(OT_QUAD, reg);
5769 } else
5770#endif
5771 {
5772 ot = dflag ? OT_LONG : OT_WORD;
5773 val = insn_get(s, ot);
5774 reg = (b & 7) | REX_B(s);
5775 gen_op_movl_T0_im(val);
5776 gen_op_mov_reg_T0(ot, reg);
5777 }
5778 break;
5779
5780 case 0x91 ... 0x97: /* xchg R, EAX */
5781 ot = dflag + OT_WORD;
5782 reg = (b & 7) | REX_B(s);
5783 rm = R_EAX;
5784 goto do_xchg_reg;
5785 case 0x86:
5786 case 0x87: /* xchg Ev, Gv */
5787 if ((b & 1) == 0)
5788 ot = OT_BYTE;
5789 else
5790 ot = dflag + OT_WORD;
5791 modrm = ldub_code(s->pc++);
5792 reg = ((modrm >> 3) & 7) | rex_r;
5793 mod = (modrm >> 6) & 3;
5794 if (mod == 3) {
5795 rm = (modrm & 7) | REX_B(s);
5796 do_xchg_reg:
5797 gen_op_mov_TN_reg(ot, 0, reg);
5798 gen_op_mov_TN_reg(ot, 1, rm);
5799 gen_op_mov_reg_T0(ot, rm);
5800 gen_op_mov_reg_T1(ot, reg);
5801 } else {
5802 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5803 gen_op_mov_TN_reg(ot, 0, reg);
5804 /* for xchg, lock is implicit */
5805 if (!(prefixes & PREFIX_LOCK))
5806 tcg_gen_helper_0_0(helper_lock);
5807 gen_op_ld_T1_A0(ot + s->mem_index);
5808 gen_op_st_T0_A0(ot + s->mem_index);
5809 if (!(prefixes & PREFIX_LOCK))
5810 tcg_gen_helper_0_0(helper_unlock);
5811 gen_op_mov_reg_T1(ot, reg);
5812 }
5813 break;
5814 case 0xc4: /* les Gv */
5815 if (CODE64(s))
5816 goto illegal_op;
5817 op = R_ES;
5818 goto do_lxx;
5819 case 0xc5: /* lds Gv */
5820 if (CODE64(s))
5821 goto illegal_op;
5822 op = R_DS;
5823 goto do_lxx;
5824 case 0x1b2: /* lss Gv */
5825 op = R_SS;
5826 goto do_lxx;
5827 case 0x1b4: /* lfs Gv */
5828 op = R_FS;
5829 goto do_lxx;
5830 case 0x1b5: /* lgs Gv */
5831 op = R_GS;
5832 do_lxx:
5833 ot = dflag ? OT_LONG : OT_WORD;
5834 modrm = ldub_code(s->pc++);
5835 reg = ((modrm >> 3) & 7) | rex_r;
5836 mod = (modrm >> 6) & 3;
5837 if (mod == 3)
5838 goto illegal_op;
5839 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5840 gen_op_ld_T1_A0(ot + s->mem_index);
5841 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5842 /* load the segment first to handle exceptions properly */
5843 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5844 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5845 /* then put the data */
5846 gen_op_mov_reg_T1(ot, reg);
5847 if (s->is_jmp) {
5848 gen_jmp_im(s->pc - s->cs_base);
5849 gen_eob(s);
5850 }
5851 break;
5852
5853 /************************/
5854 /* shifts */
5855 case 0xc0:
5856 case 0xc1:
5857 /* shift Ev,Ib */
5858 shift = 2;
5859 grp2:
5860 {
5861 if ((b & 1) == 0)
5862 ot = OT_BYTE;
5863 else
5864 ot = dflag + OT_WORD;
5865
5866 modrm = ldub_code(s->pc++);
5867 mod = (modrm >> 6) & 3;
5868 op = (modrm >> 3) & 7;
5869
5870 if (mod != 3) {
5871 if (shift == 2) {
5872 s->rip_offset = 1;
5873 }
5874 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5875 opreg = OR_TMP0;
5876 } else {
5877 opreg = (modrm & 7) | REX_B(s);
5878 }
5879
5880 /* simpler op */
5881 if (shift == 0) {
5882 gen_shift(s, op, ot, opreg, OR_ECX);
5883 } else {
5884 if (shift == 2) {
5885 shift = ldub_code(s->pc++);
5886 }
5887 gen_shifti(s, op, ot, opreg, shift);
5888 }
5889 }
5890 break;
5891 case 0xd0:
5892 case 0xd1:
5893 /* shift Ev,1 */
5894 shift = 1;
5895 goto grp2;
5896 case 0xd2:
5897 case 0xd3:
5898 /* shift Ev,cl */
5899 shift = 0;
5900 goto grp2;
5901
5902 case 0x1a4: /* shld imm */
5903 op = 0;
5904 shift = 1;
5905 goto do_shiftd;
5906 case 0x1a5: /* shld cl */
5907 op = 0;
5908 shift = 0;
5909 goto do_shiftd;
5910 case 0x1ac: /* shrd imm */
5911 op = 1;
5912 shift = 1;
5913 goto do_shiftd;
5914 case 0x1ad: /* shrd cl */
5915 op = 1;
5916 shift = 0;
5917 do_shiftd:
5918 ot = dflag + OT_WORD;
5919 modrm = ldub_code(s->pc++);
5920 mod = (modrm >> 6) & 3;
5921 rm = (modrm & 7) | REX_B(s);
5922 reg = ((modrm >> 3) & 7) | rex_r;
5923 if (mod != 3) {
5924 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5925 opreg = OR_TMP0;
5926 } else {
5927 opreg = rm;
5928 }
5929 gen_op_mov_TN_reg(ot, 1, reg);
5930
5931 if (shift) {
5932 val = ldub_code(s->pc++);
5933 tcg_gen_movi_tl(cpu_T3, val);
5934 } else {
5935 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
5936 }
5937 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5938 break;
5939
5940 /************************/
5941 /* floats */
5942 case 0xd8 ... 0xdf:
5943 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5944 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5945 /* XXX: what to do if illegal op ? */
5946 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5947 break;
5948 }
5949 modrm = ldub_code(s->pc++);
5950 mod = (modrm >> 6) & 3;
5951 rm = modrm & 7;
5952 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5953 if (mod != 3) {
5954 /* memory op */
5955 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5956 switch(op) {
5957 case 0x00 ... 0x07: /* fxxxs */
5958 case 0x10 ... 0x17: /* fixxxl */
5959 case 0x20 ... 0x27: /* fxxxl */
5960 case 0x30 ... 0x37: /* fixxx */
5961 {
5962 int op1;
5963 op1 = op & 7;
5964
5965 switch(op >> 4) {
5966 case 0:
5967 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5968 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5969 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
5970 break;
5971 case 1:
5972 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5973 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5974 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5975 break;
5976 case 2:
5977 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5978 (s->mem_index >> 2) - 1);
5979 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
5980 break;
5981 case 3:
5982 default:
5983 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5984 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5985 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5986 break;
5987 }
5988
5989 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5990 if (op1 == 3) {
5991 /* fcomp needs pop */
5992 tcg_gen_helper_0_0(helper_fpop);
5993 }
5994 }
5995 break;
5996 case 0x08: /* flds */
5997 case 0x0a: /* fsts */
5998 case 0x0b: /* fstps */
5999 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6000 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6001 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6002 switch(op & 7) {
6003 case 0:
6004 switch(op >> 4) {
6005 case 0:
6006 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6007 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6008 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6009 break;
6010 case 1:
6011 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6012 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6013 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6014 break;
6015 case 2:
6016 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6017 (s->mem_index >> 2) - 1);
6018 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6019 break;
6020 case 3:
6021 default:
6022 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6023 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6024 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6025 break;
6026 }
6027 break;
6028 case 1:
6029 /* XXX: the corresponding CPUID bit must be tested ! */
6030 switch(op >> 4) {
6031 case 1:
6032 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6033 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6034 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6035 break;
6036 case 2:
6037 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6038 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6039 (s->mem_index >> 2) - 1);
6040 break;
6041 case 3:
6042 default:
6043 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6044 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6045 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6046 break;
6047 }
6048 tcg_gen_helper_0_0(helper_fpop);
6049 break;
6050 default:
6051 switch(op >> 4) {
6052 case 0:
6053 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6054 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6055 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6056 break;
6057 case 1:
6058 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6059 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6060 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6061 break;
6062 case 2:
6063 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6064 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6065 (s->mem_index >> 2) - 1);
6066 break;
6067 case 3:
6068 default:
6069 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6070 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6071 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6072 break;
6073 }
6074 if ((op & 7) == 3)
6075 tcg_gen_helper_0_0(helper_fpop);
6076 break;
6077 }
6078 break;
6079 case 0x0c: /* fldenv mem */
6080 if (s->cc_op != CC_OP_DYNAMIC)
6081 gen_op_set_cc_op(s->cc_op);
6082 gen_jmp_im(pc_start - s->cs_base);
6083 tcg_gen_helper_0_2(helper_fldenv,
6084 cpu_A0, tcg_const_i32(s->dflag));
6085 break;
6086 case 0x0d: /* fldcw mem */
6087 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6088 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6089 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6090 break;
6091 case 0x0e: /* fnstenv mem */
6092 if (s->cc_op != CC_OP_DYNAMIC)
6093 gen_op_set_cc_op(s->cc_op);
6094 gen_jmp_im(pc_start - s->cs_base);
6095 tcg_gen_helper_0_2(helper_fstenv,
6096 cpu_A0, tcg_const_i32(s->dflag));
6097 break;
6098 case 0x0f: /* fnstcw mem */
6099 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6100 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6101 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6102 break;
6103 case 0x1d: /* fldt mem */
6104 if (s->cc_op != CC_OP_DYNAMIC)
6105 gen_op_set_cc_op(s->cc_op);
6106 gen_jmp_im(pc_start - s->cs_base);
6107 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6108 break;
6109 case 0x1f: /* fstpt mem */
6110 if (s->cc_op != CC_OP_DYNAMIC)
6111 gen_op_set_cc_op(s->cc_op);
6112 gen_jmp_im(pc_start - s->cs_base);
6113 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6114 tcg_gen_helper_0_0(helper_fpop);
6115 break;
6116 case 0x2c: /* frstor mem */
6117 if (s->cc_op != CC_OP_DYNAMIC)
6118 gen_op_set_cc_op(s->cc_op);
6119 gen_jmp_im(pc_start - s->cs_base);
6120 tcg_gen_helper_0_2(helper_frstor,
6121 cpu_A0, tcg_const_i32(s->dflag));
6122 break;
6123 case 0x2e: /* fnsave mem */
6124 if (s->cc_op != CC_OP_DYNAMIC)
6125 gen_op_set_cc_op(s->cc_op);
6126 gen_jmp_im(pc_start - s->cs_base);
6127 tcg_gen_helper_0_2(helper_fsave,
6128 cpu_A0, tcg_const_i32(s->dflag));
6129 break;
6130 case 0x2f: /* fnstsw mem */
6131 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6132 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6133 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6134 break;
6135 case 0x3c: /* fbld */
6136 if (s->cc_op != CC_OP_DYNAMIC)
6137 gen_op_set_cc_op(s->cc_op);
6138 gen_jmp_im(pc_start - s->cs_base);
6139 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6140 break;
6141 case 0x3e: /* fbstp */
6142 if (s->cc_op != CC_OP_DYNAMIC)
6143 gen_op_set_cc_op(s->cc_op);
6144 gen_jmp_im(pc_start - s->cs_base);
6145 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6146 tcg_gen_helper_0_0(helper_fpop);
6147 break;
6148 case 0x3d: /* fildll */
6149 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6150 (s->mem_index >> 2) - 1);
6151 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6152 break;
6153 case 0x3f: /* fistpll */
6154 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6155 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6156 (s->mem_index >> 2) - 1);
6157 tcg_gen_helper_0_0(helper_fpop);
6158 break;
6159 default:
6160 goto illegal_op;
6161 }
6162 } else {
6163 /* register float ops */
6164 opreg = rm;
6165
6166 switch(op) {
6167 case 0x08: /* fld sti */
6168 tcg_gen_helper_0_0(helper_fpush);
6169 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6170 break;
6171 case 0x09: /* fxchg sti */
6172 case 0x29: /* fxchg4 sti, undocumented op */
6173 case 0x39: /* fxchg7 sti, undocumented op */
6174 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6175 break;
6176 case 0x0a: /* grp d9/2 */
6177 switch(rm) {
6178 case 0: /* fnop */
6179 /* check exceptions (FreeBSD FPU probe) */
6180 if (s->cc_op != CC_OP_DYNAMIC)
6181 gen_op_set_cc_op(s->cc_op);
6182 gen_jmp_im(pc_start - s->cs_base);
6183 tcg_gen_helper_0_0(helper_fwait);
6184 break;
6185 default:
6186 goto illegal_op;
6187 }
6188 break;
6189 case 0x0c: /* grp d9/4 */
6190 switch(rm) {
6191 case 0: /* fchs */
6192 tcg_gen_helper_0_0(helper_fchs_ST0);
6193 break;
6194 case 1: /* fabs */
6195 tcg_gen_helper_0_0(helper_fabs_ST0);
6196 break;
6197 case 4: /* ftst */
6198 tcg_gen_helper_0_0(helper_fldz_FT0);
6199 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6200 break;
6201 case 5: /* fxam */
6202 tcg_gen_helper_0_0(helper_fxam_ST0);
6203 break;
6204 default:
6205 goto illegal_op;
6206 }
6207 break;
6208 case 0x0d: /* grp d9/5 */
6209 {
6210 switch(rm) {
6211 case 0:
6212 tcg_gen_helper_0_0(helper_fpush);
6213 tcg_gen_helper_0_0(helper_fld1_ST0);
6214 break;
6215 case 1:
6216 tcg_gen_helper_0_0(helper_fpush);
6217 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6218 break;
6219 case 2:
6220 tcg_gen_helper_0_0(helper_fpush);
6221 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6222 break;
6223 case 3:
6224 tcg_gen_helper_0_0(helper_fpush);
6225 tcg_gen_helper_0_0(helper_fldpi_ST0);
6226 break;
6227 case 4:
6228 tcg_gen_helper_0_0(helper_fpush);
6229 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6230 break;
6231 case 5:
6232 tcg_gen_helper_0_0(helper_fpush);
6233 tcg_gen_helper_0_0(helper_fldln2_ST0);
6234 break;
6235 case 6:
6236 tcg_gen_helper_0_0(helper_fpush);
6237 tcg_gen_helper_0_0(helper_fldz_ST0);
6238 break;
6239 default:
6240 goto illegal_op;
6241 }
6242 }
6243 break;
6244 case 0x0e: /* grp d9/6 */
6245 switch(rm) {
6246 case 0: /* f2xm1 */
6247 tcg_gen_helper_0_0(helper_f2xm1);
6248 break;
6249 case 1: /* fyl2x */
6250 tcg_gen_helper_0_0(helper_fyl2x);
6251 break;
6252 case 2: /* fptan */
6253 tcg_gen_helper_0_0(helper_fptan);
6254 break;
6255 case 3: /* fpatan */
6256 tcg_gen_helper_0_0(helper_fpatan);
6257 break;
6258 case 4: /* fxtract */
6259 tcg_gen_helper_0_0(helper_fxtract);
6260 break;
6261 case 5: /* fprem1 */
6262 tcg_gen_helper_0_0(helper_fprem1);
6263 break;
6264 case 6: /* fdecstp */
6265 tcg_gen_helper_0_0(helper_fdecstp);
6266 break;
6267 default:
6268 case 7: /* fincstp */
6269 tcg_gen_helper_0_0(helper_fincstp);
6270 break;
6271 }
6272 break;
6273 case 0x0f: /* grp d9/7 */
6274 switch(rm) {
6275 case 0: /* fprem */
6276 tcg_gen_helper_0_0(helper_fprem);
6277 break;
6278 case 1: /* fyl2xp1 */
6279 tcg_gen_helper_0_0(helper_fyl2xp1);
6280 break;
6281 case 2: /* fsqrt */
6282 tcg_gen_helper_0_0(helper_fsqrt);
6283 break;
6284 case 3: /* fsincos */
6285 tcg_gen_helper_0_0(helper_fsincos);
6286 break;
6287 case 5: /* fscale */
6288 tcg_gen_helper_0_0(helper_fscale);
6289 break;
6290 case 4: /* frndint */
6291 tcg_gen_helper_0_0(helper_frndint);
6292 break;
6293 case 6: /* fsin */
6294 tcg_gen_helper_0_0(helper_fsin);
6295 break;
6296 default:
6297 case 7: /* fcos */
6298 tcg_gen_helper_0_0(helper_fcos);
6299 break;
6300 }
6301 break;
6302 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6303 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6304 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6305 {
6306 int op1;
6307
6308 op1 = op & 7;
6309 if (op >= 0x20) {
6310 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6311 if (op >= 0x30)
6312 tcg_gen_helper_0_0(helper_fpop);
6313 } else {
6314 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6315 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6316 }
6317 }
6318 break;
6319 case 0x02: /* fcom */
6320 case 0x22: /* fcom2, undocumented op */
6321 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6322 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6323 break;
6324 case 0x03: /* fcomp */
6325 case 0x23: /* fcomp3, undocumented op */
6326 case 0x32: /* fcomp5, undocumented op */
6327 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6328 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6329 tcg_gen_helper_0_0(helper_fpop);
6330 break;
6331 case 0x15: /* da/5 */
6332 switch(rm) {
6333 case 1: /* fucompp */
6334 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6335 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6336 tcg_gen_helper_0_0(helper_fpop);
6337 tcg_gen_helper_0_0(helper_fpop);
6338 break;
6339 default:
6340 goto illegal_op;
6341 }
6342 break;
6343 case 0x1c:
6344 switch(rm) {
6345 case 0: /* feni (287 only, just do nop here) */
6346 break;
6347 case 1: /* fdisi (287 only, just do nop here) */
6348 break;
6349 case 2: /* fclex */
6350 tcg_gen_helper_0_0(helper_fclex);
6351 break;
6352 case 3: /* fninit */
6353 tcg_gen_helper_0_0(helper_fninit);
6354 break;
6355 case 4: /* fsetpm (287 only, just do nop here) */
6356 break;
6357 default:
6358 goto illegal_op;
6359 }
6360 break;
6361 case 0x1d: /* fucomi */
6362 if (s->cc_op != CC_OP_DYNAMIC)
6363 gen_op_set_cc_op(s->cc_op);
6364 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6365 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6366 s->cc_op = CC_OP_EFLAGS;
6367 break;
6368 case 0x1e: /* fcomi */
6369 if (s->cc_op != CC_OP_DYNAMIC)
6370 gen_op_set_cc_op(s->cc_op);
6371 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6372 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6373 s->cc_op = CC_OP_EFLAGS;
6374 break;
6375 case 0x28: /* ffree sti */
6376 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6377 break;
6378 case 0x2a: /* fst sti */
6379 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6380 break;
6381 case 0x2b: /* fstp sti */
6382 case 0x0b: /* fstp1 sti, undocumented op */
6383 case 0x3a: /* fstp8 sti, undocumented op */
6384 case 0x3b: /* fstp9 sti, undocumented op */
6385 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6386 tcg_gen_helper_0_0(helper_fpop);
6387 break;
6388 case 0x2c: /* fucom st(i) */
6389 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6390 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6391 break;
6392 case 0x2d: /* fucomp st(i) */
6393 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6394 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6395 tcg_gen_helper_0_0(helper_fpop);
6396 break;
6397 case 0x33: /* de/3 */
6398 switch(rm) {
6399 case 1: /* fcompp */
6400 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6401 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6402 tcg_gen_helper_0_0(helper_fpop);
6403 tcg_gen_helper_0_0(helper_fpop);
6404 break;
6405 default:
6406 goto illegal_op;
6407 }
6408 break;
6409 case 0x38: /* ffreep sti, undocumented op */
6410 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6411 tcg_gen_helper_0_0(helper_fpop);
6412 break;
6413 case 0x3c: /* df/4 */
6414 switch(rm) {
6415 case 0:
6416 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6417 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6418 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6419 break;
6420 default:
6421 goto illegal_op;
6422 }
6423 break;
6424 case 0x3d: /* fucomip */
6425 if (s->cc_op != CC_OP_DYNAMIC)
6426 gen_op_set_cc_op(s->cc_op);
6427 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6428 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6429 tcg_gen_helper_0_0(helper_fpop);
6430 s->cc_op = CC_OP_EFLAGS;
6431 break;
6432 case 0x3e: /* fcomip */
6433 if (s->cc_op != CC_OP_DYNAMIC)
6434 gen_op_set_cc_op(s->cc_op);
6435 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6436 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6437 tcg_gen_helper_0_0(helper_fpop);
6438 s->cc_op = CC_OP_EFLAGS;
6439 break;
6440 case 0x10 ... 0x13: /* fcmovxx */
6441 case 0x18 ... 0x1b:
6442 {
6443 int op1, l1;
6444 static const uint8_t fcmov_cc[8] = {
6445 (JCC_B << 1),
6446 (JCC_Z << 1),
6447 (JCC_BE << 1),
6448 (JCC_P << 1),
6449 };
6450 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6451 l1 = gen_new_label();
6452 gen_jcc1(s, s->cc_op, op1, l1);
6453 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6454 gen_set_label(l1);
6455 }
6456 break;
6457 default:
6458 goto illegal_op;
6459 }
6460 }
6461 break;
6462 /************************/
6463 /* string ops */
6464
6465 case 0xa4: /* movsS */
6466 case 0xa5:
6467 if ((b & 1) == 0)
6468 ot = OT_BYTE;
6469 else
6470 ot = dflag + OT_WORD;
6471
6472 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6473 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6474 } else {
6475 gen_movs(s, ot);
6476 }
6477 break;
6478
6479 case 0xaa: /* stosS */
6480 case 0xab:
6481 if ((b & 1) == 0)
6482 ot = OT_BYTE;
6483 else
6484 ot = dflag + OT_WORD;
6485
6486 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6487 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6488 } else {
6489 gen_stos(s, ot);
6490 }
6491 break;
6492 case 0xac: /* lodsS */
6493 case 0xad:
6494 if ((b & 1) == 0)
6495 ot = OT_BYTE;
6496 else
6497 ot = dflag + OT_WORD;
6498 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6499 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6500 } else {
6501 gen_lods(s, ot);
6502 }
6503 break;
6504 case 0xae: /* scasS */
6505 case 0xaf:
6506 if ((b & 1) == 0)
6507 ot = OT_BYTE;
6508 else
6509 ot = dflag + OT_WORD;
6510 if (prefixes & PREFIX_REPNZ) {
6511 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6512 } else if (prefixes & PREFIX_REPZ) {
6513 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6514 } else {
6515 gen_scas(s, ot);
6516 s->cc_op = CC_OP_SUBB + ot;
6517 }
6518 break;
6519
6520 case 0xa6: /* cmpsS */
6521 case 0xa7:
6522 if ((b & 1) == 0)
6523 ot = OT_BYTE;
6524 else
6525 ot = dflag + OT_WORD;
6526 if (prefixes & PREFIX_REPNZ) {
6527 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6528 } else if (prefixes & PREFIX_REPZ) {
6529 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6530 } else {
6531 gen_cmps(s, ot);
6532 s->cc_op = CC_OP_SUBB + ot;
6533 }
6534 break;
6535 case 0x6c: /* insS */
6536 case 0x6d:
6537 if ((b & 1) == 0)
6538 ot = OT_BYTE;
6539 else
6540 ot = dflag ? OT_LONG : OT_WORD;
6541 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6542 gen_op_andl_T0_ffff();
6543 gen_check_io(s, ot, pc_start - s->cs_base,
6544 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6545 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6546 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6547 } else {
6548 gen_ins(s, ot);
6549 if (use_icount) {
6550 gen_jmp(s, s->pc - s->cs_base);
6551 }
6552 }
6553 break;
6554 case 0x6e: /* outsS */
6555 case 0x6f:
6556 if ((b & 1) == 0)
6557 ot = OT_BYTE;
6558 else
6559 ot = dflag ? OT_LONG : OT_WORD;
6560 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6561 gen_op_andl_T0_ffff();
6562 gen_check_io(s, ot, pc_start - s->cs_base,
6563 svm_is_rep(prefixes) | 4);
6564 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6565 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6566 } else {
6567 gen_outs(s, ot);
6568 if (use_icount) {
6569 gen_jmp(s, s->pc - s->cs_base);
6570 }
6571 }
6572 break;
6573
6574 /************************/
6575 /* port I/O */
6576
6577 case 0xe4:
6578 case 0xe5:
6579 if ((b & 1) == 0)
6580 ot = OT_BYTE;
6581 else
6582 ot = dflag ? OT_LONG : OT_WORD;
6583 val = ldub_code(s->pc++);
6584 gen_op_movl_T0_im(val);
6585 gen_check_io(s, ot, pc_start - s->cs_base,
6586 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6587 if (use_icount)
6588 gen_io_start();
6589 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6590 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6591 gen_op_mov_reg_T1(ot, R_EAX);
6592 if (use_icount) {
6593 gen_io_end();
6594 gen_jmp(s, s->pc - s->cs_base);
6595 }
6596 break;
6597 case 0xe6:
6598 case 0xe7:
6599 if ((b & 1) == 0)
6600 ot = OT_BYTE;
6601 else
6602 ot = dflag ? OT_LONG : OT_WORD;
6603 val = ldub_code(s->pc++);
6604 gen_op_movl_T0_im(val);
6605 gen_check_io(s, ot, pc_start - s->cs_base,
6606 svm_is_rep(prefixes));
6607#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6608 if (val == 0x80)
6609 break;
6610#endif /* VBOX */
6611 gen_op_mov_TN_reg(ot, 1, R_EAX);
6612
6613 if (use_icount)
6614 gen_io_start();
6615 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6616 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6617 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6618 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6619 if (use_icount) {
6620 gen_io_end();
6621 gen_jmp(s, s->pc - s->cs_base);
6622 }
6623 break;
6624 case 0xec:
6625 case 0xed:
6626 if ((b & 1) == 0)
6627 ot = OT_BYTE;
6628 else
6629 ot = dflag ? OT_LONG : OT_WORD;
6630 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6631 gen_op_andl_T0_ffff();
6632 gen_check_io(s, ot, pc_start - s->cs_base,
6633 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6634 if (use_icount)
6635 gen_io_start();
6636 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6637 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6638 gen_op_mov_reg_T1(ot, R_EAX);
6639 if (use_icount) {
6640 gen_io_end();
6641 gen_jmp(s, s->pc - s->cs_base);
6642 }
6643 break;
6644 case 0xee:
6645 case 0xef:
6646 if ((b & 1) == 0)
6647 ot = OT_BYTE;
6648 else
6649 ot = dflag ? OT_LONG : OT_WORD;
6650 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6651 gen_op_andl_T0_ffff();
6652 gen_check_io(s, ot, pc_start - s->cs_base,
6653 svm_is_rep(prefixes));
6654 gen_op_mov_TN_reg(ot, 1, R_EAX);
6655
6656 if (use_icount)
6657 gen_io_start();
6658 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6659 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6660 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6661 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6662 if (use_icount) {
6663 gen_io_end();
6664 gen_jmp(s, s->pc - s->cs_base);
6665 }
6666 break;
6667
6668 /************************/
6669 /* control */
6670 case 0xc2: /* ret im */
6671 val = ldsw_code(s->pc);
6672 s->pc += 2;
6673 gen_pop_T0(s);
6674 if (CODE64(s) && s->dflag)
6675 s->dflag = 2;
6676 gen_stack_update(s, val + (2 << s->dflag));
6677 if (s->dflag == 0)
6678 gen_op_andl_T0_ffff();
6679 gen_op_jmp_T0();
6680 gen_eob(s);
6681 break;
6682 case 0xc3: /* ret */
6683 gen_pop_T0(s);
6684 gen_pop_update(s);
6685 if (s->dflag == 0)
6686 gen_op_andl_T0_ffff();
6687 gen_op_jmp_T0();
6688 gen_eob(s);
6689 break;
6690 case 0xca: /* lret im */
6691 val = ldsw_code(s->pc);
6692 s->pc += 2;
6693 do_lret:
6694 if (s->pe && !s->vm86) {
6695 if (s->cc_op != CC_OP_DYNAMIC)
6696 gen_op_set_cc_op(s->cc_op);
6697 gen_jmp_im(pc_start - s->cs_base);
6698 tcg_gen_helper_0_2(helper_lret_protected,
6699 tcg_const_i32(s->dflag),
6700 tcg_const_i32(val));
6701 } else {
6702 gen_stack_A0(s);
6703 /* pop offset */
6704 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6705 if (s->dflag == 0)
6706 gen_op_andl_T0_ffff();
6707 /* NOTE: keeping EIP updated is not a problem in case of
6708 exception */
6709 gen_op_jmp_T0();
6710 /* pop selector */
6711 gen_op_addl_A0_im(2 << s->dflag);
6712 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6713 gen_op_movl_seg_T0_vm(R_CS);
6714 /* add stack offset */
6715 gen_stack_update(s, val + (4 << s->dflag));
6716 }
6717 gen_eob(s);
6718 break;
6719 case 0xcb: /* lret */
6720 val = 0;
6721 goto do_lret;
6722 case 0xcf: /* iret */
6723 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6724 if (!s->pe) {
6725 /* real mode */
6726 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6727 s->cc_op = CC_OP_EFLAGS;
6728 } else if (s->vm86) {
6729#ifdef VBOX
6730 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6731#else
6732 if (s->iopl != 3) {
6733#endif
6734 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6735 } else {
6736 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6737 s->cc_op = CC_OP_EFLAGS;
6738 }
6739 } else {
6740 if (s->cc_op != CC_OP_DYNAMIC)
6741 gen_op_set_cc_op(s->cc_op);
6742 gen_jmp_im(pc_start - s->cs_base);
6743 tcg_gen_helper_0_2(helper_iret_protected,
6744 tcg_const_i32(s->dflag),
6745 tcg_const_i32(s->pc - s->cs_base));
6746 s->cc_op = CC_OP_EFLAGS;
6747 }
6748 gen_eob(s);
6749 break;
6750 case 0xe8: /* call im */
6751 {
6752 if (dflag)
6753 tval = (int32_t)insn_get(s, OT_LONG);
6754 else
6755 tval = (int16_t)insn_get(s, OT_WORD);
6756 next_eip = s->pc - s->cs_base;
6757 tval += next_eip;
6758 if (s->dflag == 0)
6759 tval &= 0xffff;
6760 gen_movtl_T0_im(next_eip);
6761 gen_push_T0(s);
6762 gen_jmp(s, tval);
6763 }
6764 break;
6765 case 0x9a: /* lcall im */
6766 {
6767 unsigned int selector, offset;
6768
6769 if (CODE64(s))
6770 goto illegal_op;
6771 ot = dflag ? OT_LONG : OT_WORD;
6772 offset = insn_get(s, ot);
6773 selector = insn_get(s, OT_WORD);
6774
6775 gen_op_movl_T0_im(selector);
6776 gen_op_movl_T1_imu(offset);
6777 }
6778 goto do_lcall;
6779 case 0xe9: /* jmp im */
6780 if (dflag)
6781 tval = (int32_t)insn_get(s, OT_LONG);
6782 else
6783 tval = (int16_t)insn_get(s, OT_WORD);
6784 tval += s->pc - s->cs_base;
6785 if (s->dflag == 0)
6786 tval &= 0xffff;
6787 gen_jmp(s, tval);
6788 break;
6789 case 0xea: /* ljmp im */
6790 {
6791 unsigned int selector, offset;
6792
6793 if (CODE64(s))
6794 goto illegal_op;
6795 ot = dflag ? OT_LONG : OT_WORD;
6796 offset = insn_get(s, ot);
6797 selector = insn_get(s, OT_WORD);
6798
6799 gen_op_movl_T0_im(selector);
6800 gen_op_movl_T1_imu(offset);
6801 }
6802 goto do_ljmp;
6803 case 0xeb: /* jmp Jb */
6804 tval = (int8_t)insn_get(s, OT_BYTE);
6805 tval += s->pc - s->cs_base;
6806 if (s->dflag == 0)
6807 tval &= 0xffff;
6808 gen_jmp(s, tval);
6809 break;
6810 case 0x70 ... 0x7f: /* jcc Jb */
6811 tval = (int8_t)insn_get(s, OT_BYTE);
6812 goto do_jcc;
6813 case 0x180 ... 0x18f: /* jcc Jv */
6814 if (dflag) {
6815 tval = (int32_t)insn_get(s, OT_LONG);
6816 } else {
6817 tval = (int16_t)insn_get(s, OT_WORD);
6818 }
6819 do_jcc:
6820 next_eip = s->pc - s->cs_base;
6821 tval += next_eip;
6822 if (s->dflag == 0)
6823 tval &= 0xffff;
6824 gen_jcc(s, b, tval, next_eip);
6825 break;
6826
6827 case 0x190 ... 0x19f: /* setcc Gv */
6828 modrm = ldub_code(s->pc++);
6829 gen_setcc(s, b);
6830 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6831 break;
6832 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6833 {
6834 int l1;
6835 TCGv t0;
6836
6837 ot = dflag + OT_WORD;
6838 modrm = ldub_code(s->pc++);
6839 reg = ((modrm >> 3) & 7) | rex_r;
6840 mod = (modrm >> 6) & 3;
6841 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6842 if (mod != 3) {
6843 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6844 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6845 } else {
6846 rm = (modrm & 7) | REX_B(s);
6847 gen_op_mov_v_reg(ot, t0, rm);
6848 }
6849#ifdef TARGET_X86_64
6850 if (ot == OT_LONG) {
6851 /* XXX: specific Intel behaviour ? */
6852 l1 = gen_new_label();
6853 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6854 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6855 gen_set_label(l1);
6856 tcg_gen_movi_tl(cpu_tmp0, 0);
6857 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6858 } else
6859#endif
6860 {
6861 l1 = gen_new_label();
6862 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6863 gen_op_mov_reg_v(ot, reg, t0);
6864 gen_set_label(l1);
6865 }
6866 tcg_temp_free(t0);
6867 }
6868 break;
6869
6870 /************************/
6871 /* flags */
6872 case 0x9c: /* pushf */
6873 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6874#ifdef VBOX
6875 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6876#else
6877 if (s->vm86 && s->iopl != 3) {
6878#endif
6879 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6880 } else {
6881 if (s->cc_op != CC_OP_DYNAMIC)
6882 gen_op_set_cc_op(s->cc_op);
6883#ifdef VBOX
6884 if (s->vm86 && s->vme && s->iopl != 3)
6885 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6886 else
6887#endif
6888 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6889 gen_push_T0(s);
6890 }
6891 break;
6892 case 0x9d: /* popf */
6893 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6894#ifdef VBOX
6895 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6896#else
6897 if (s->vm86 && s->iopl != 3) {
6898#endif
6899 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6900 } else {
6901 gen_pop_T0(s);
6902 if (s->cpl == 0) {
6903 if (s->dflag) {
6904 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6905 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6906 } else {
6907 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6908 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6909 }
6910 } else {
6911 if (s->cpl <= s->iopl) {
6912 if (s->dflag) {
6913 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6914 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6915 } else {
6916 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6917 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6918 }
6919 } else {
6920 if (s->dflag) {
6921 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6922 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6923 } else {
6924#ifdef VBOX
6925 if (s->vm86 && s->vme)
6926 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
6927 else
6928#endif
6929 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6930 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6931 }
6932 }
6933 }
6934 gen_pop_update(s);
6935 s->cc_op = CC_OP_EFLAGS;
6936 /* abort translation because TF flag may change */
6937 gen_jmp_im(s->pc - s->cs_base);
6938 gen_eob(s);
6939 }
6940 break;
6941 case 0x9e: /* sahf */
6942 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6943 goto illegal_op;
6944 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6945 if (s->cc_op != CC_OP_DYNAMIC)
6946 gen_op_set_cc_op(s->cc_op);
6947 gen_compute_eflags(cpu_cc_src);
6948 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6949 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6950 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6951 s->cc_op = CC_OP_EFLAGS;
6952 break;
6953 case 0x9f: /* lahf */
6954 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6955 goto illegal_op;
6956 if (s->cc_op != CC_OP_DYNAMIC)
6957 gen_op_set_cc_op(s->cc_op);
6958 gen_compute_eflags(cpu_T[0]);
6959 /* Note: gen_compute_eflags() only gives the condition codes */
6960 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6961 gen_op_mov_reg_T0(OT_BYTE, R_AH);
6962 break;
6963 case 0xf5: /* cmc */
6964 if (s->cc_op != CC_OP_DYNAMIC)
6965 gen_op_set_cc_op(s->cc_op);
6966 gen_compute_eflags(cpu_cc_src);
6967 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6968 s->cc_op = CC_OP_EFLAGS;
6969 break;
6970 case 0xf8: /* clc */
6971 if (s->cc_op != CC_OP_DYNAMIC)
6972 gen_op_set_cc_op(s->cc_op);
6973 gen_compute_eflags(cpu_cc_src);
6974 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6975 s->cc_op = CC_OP_EFLAGS;
6976 break;
6977 case 0xf9: /* stc */
6978 if (s->cc_op != CC_OP_DYNAMIC)
6979 gen_op_set_cc_op(s->cc_op);
6980 gen_compute_eflags(cpu_cc_src);
6981 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6982 s->cc_op = CC_OP_EFLAGS;
6983 break;
6984 case 0xfc: /* cld */
6985 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
6986 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6987 break;
6988 case 0xfd: /* std */
6989 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
6990 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6991 break;
6992
6993 /************************/
6994 /* bit operations */
6995 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6996 ot = dflag + OT_WORD;
6997 modrm = ldub_code(s->pc++);
6998 op = (modrm >> 3) & 7;
6999 mod = (modrm >> 6) & 3;
7000 rm = (modrm & 7) | REX_B(s);
7001 if (mod != 3) {
7002 s->rip_offset = 1;
7003 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7004 gen_op_ld_T0_A0(ot + s->mem_index);
7005 } else {
7006 gen_op_mov_TN_reg(ot, 0, rm);
7007 }
7008 /* load shift */
7009 val = ldub_code(s->pc++);
7010 gen_op_movl_T1_im(val);
7011 if (op < 4)
7012 goto illegal_op;
7013 op -= 4;
7014 goto bt_op;
7015 case 0x1a3: /* bt Gv, Ev */
7016 op = 0;
7017 goto do_btx;
7018 case 0x1ab: /* bts */
7019 op = 1;
7020 goto do_btx;
7021 case 0x1b3: /* btr */
7022 op = 2;
7023 goto do_btx;
7024 case 0x1bb: /* btc */
7025 op = 3;
7026 do_btx:
7027 ot = dflag + OT_WORD;
7028 modrm = ldub_code(s->pc++);
7029 reg = ((modrm >> 3) & 7) | rex_r;
7030 mod = (modrm >> 6) & 3;
7031 rm = (modrm & 7) | REX_B(s);
7032 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7033 if (mod != 3) {
7034 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7035 /* specific case: we need to add a displacement */
7036 gen_exts(ot, cpu_T[1]);
7037 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7038 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7039 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7040 gen_op_ld_T0_A0(ot + s->mem_index);
7041 } else {
7042 gen_op_mov_TN_reg(ot, 0, rm);
7043 }
7044 bt_op:
7045 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7046 switch(op) {
7047 case 0:
7048 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7049 tcg_gen_movi_tl(cpu_cc_dst, 0);
7050 break;
7051 case 1:
7052 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7053 tcg_gen_movi_tl(cpu_tmp0, 1);
7054 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7055 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7056 break;
7057 case 2:
7058 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7059 tcg_gen_movi_tl(cpu_tmp0, 1);
7060 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7061 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7062 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7063 break;
7064 default:
7065 case 3:
7066 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7067 tcg_gen_movi_tl(cpu_tmp0, 1);
7068 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7069 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7070 break;
7071 }
7072 s->cc_op = CC_OP_SARB + ot;
7073 if (op != 0) {
7074 if (mod != 3)
7075 gen_op_st_T0_A0(ot + s->mem_index);
7076 else
7077 gen_op_mov_reg_T0(ot, rm);
7078 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7079 tcg_gen_movi_tl(cpu_cc_dst, 0);
7080 }
7081 break;
7082 case 0x1bc: /* bsf */
7083 case 0x1bd: /* bsr */
7084 {
7085 int label1;
7086 TCGv t0;
7087
7088 ot = dflag + OT_WORD;
7089 modrm = ldub_code(s->pc++);
7090 reg = ((modrm >> 3) & 7) | rex_r;
7091 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7092 gen_extu(ot, cpu_T[0]);
7093 label1 = gen_new_label();
7094 tcg_gen_movi_tl(cpu_cc_dst, 0);
7095 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7096 tcg_gen_mov_tl(t0, cpu_T[0]);
7097 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7098 if (b & 1) {
7099 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7100 } else {
7101 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7102 }
7103 gen_op_mov_reg_T0(ot, reg);
7104 tcg_gen_movi_tl(cpu_cc_dst, 1);
7105 gen_set_label(label1);
7106 tcg_gen_discard_tl(cpu_cc_src);
7107 s->cc_op = CC_OP_LOGICB + ot;
7108 tcg_temp_free(t0);
7109 }
7110 break;
7111 /************************/
7112 /* bcd */
7113 case 0x27: /* daa */
7114 if (CODE64(s))
7115 goto illegal_op;
7116 if (s->cc_op != CC_OP_DYNAMIC)
7117 gen_op_set_cc_op(s->cc_op);
7118 tcg_gen_helper_0_0(helper_daa);
7119 s->cc_op = CC_OP_EFLAGS;
7120 break;
7121 case 0x2f: /* das */
7122 if (CODE64(s))
7123 goto illegal_op;
7124 if (s->cc_op != CC_OP_DYNAMIC)
7125 gen_op_set_cc_op(s->cc_op);
7126 tcg_gen_helper_0_0(helper_das);
7127 s->cc_op = CC_OP_EFLAGS;
7128 break;
7129 case 0x37: /* aaa */
7130 if (CODE64(s))
7131 goto illegal_op;
7132 if (s->cc_op != CC_OP_DYNAMIC)
7133 gen_op_set_cc_op(s->cc_op);
7134 tcg_gen_helper_0_0(helper_aaa);
7135 s->cc_op = CC_OP_EFLAGS;
7136 break;
7137 case 0x3f: /* aas */
7138 if (CODE64(s))
7139 goto illegal_op;
7140 if (s->cc_op != CC_OP_DYNAMIC)
7141 gen_op_set_cc_op(s->cc_op);
7142 tcg_gen_helper_0_0(helper_aas);
7143 s->cc_op = CC_OP_EFLAGS;
7144 break;
7145 case 0xd4: /* aam */
7146 if (CODE64(s))
7147 goto illegal_op;
7148 val = ldub_code(s->pc++);
7149 if (val == 0) {
7150 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7151 } else {
7152 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7153 s->cc_op = CC_OP_LOGICB;
7154 }
7155 break;
7156 case 0xd5: /* aad */
7157 if (CODE64(s))
7158 goto illegal_op;
7159 val = ldub_code(s->pc++);
7160 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7161 s->cc_op = CC_OP_LOGICB;
7162 break;
7163 /************************/
7164 /* misc */
7165 case 0x90: /* nop */
7166 /* XXX: xchg + rex handling */
7167 /* XXX: correct lock test for all insn */
7168 if (prefixes & PREFIX_LOCK)
7169 goto illegal_op;
7170 if (prefixes & PREFIX_REPZ) {
7171 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7172 }
7173 break;
7174 case 0x9b: /* fwait */
7175 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7176 (HF_MP_MASK | HF_TS_MASK)) {
7177 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7178 } else {
7179 if (s->cc_op != CC_OP_DYNAMIC)
7180 gen_op_set_cc_op(s->cc_op);
7181 gen_jmp_im(pc_start - s->cs_base);
7182 tcg_gen_helper_0_0(helper_fwait);
7183 }
7184 break;
7185 case 0xcc: /* int3 */
7186#ifdef VBOX
7187 if (s->vm86 && s->iopl != 3 && !s->vme) {
7188 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7189 } else
7190#endif
7191 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7192 break;
7193 case 0xcd: /* int N */
7194 val = ldub_code(s->pc++);
7195#ifdef VBOX
7196 if (s->vm86 && s->iopl != 3 && !s->vme) {
7197#else
7198 if (s->vm86 && s->iopl != 3) {
7199#endif
7200 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7201 } else {
7202 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7203 }
7204 break;
7205 case 0xce: /* into */
7206 if (CODE64(s))
7207 goto illegal_op;
7208 if (s->cc_op != CC_OP_DYNAMIC)
7209 gen_op_set_cc_op(s->cc_op);
7210 gen_jmp_im(pc_start - s->cs_base);
7211 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7212 break;
7213 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7214 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7215#if 1
7216 gen_debug(s, pc_start - s->cs_base);
7217#else
7218 /* start debug */
7219 tb_flush(cpu_single_env);
7220 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7221#endif
7222 break;
7223 case 0xfa: /* cli */
7224 if (!s->vm86) {
7225 if (s->cpl <= s->iopl) {
7226 tcg_gen_helper_0_0(helper_cli);
7227 } else {
7228 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7229 }
7230 } else {
7231 if (s->iopl == 3) {
7232 tcg_gen_helper_0_0(helper_cli);
7233#ifdef VBOX
7234 } else if (s->iopl != 3 && s->vme) {
7235 tcg_gen_helper_0_0(helper_cli_vme);
7236#endif
7237 } else {
7238 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7239 }
7240 }
7241 break;
7242 case 0xfb: /* sti */
7243 if (!s->vm86) {
7244 if (s->cpl <= s->iopl) {
7245 gen_sti:
7246 tcg_gen_helper_0_0(helper_sti);
7247 /* interruptions are enabled only the first insn after sti */
7248 /* If several instructions disable interrupts, only the
7249 _first_ does it */
7250 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7251 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7252 /* give a chance to handle pending irqs */
7253 gen_jmp_im(s->pc - s->cs_base);
7254 gen_eob(s);
7255 } else {
7256 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7257 }
7258 } else {
7259 if (s->iopl == 3) {
7260 goto gen_sti;
7261#ifdef VBOX
7262 } else if (s->iopl != 3 && s->vme) {
7263 tcg_gen_helper_0_0(helper_sti_vme);
7264 /* give a chance to handle pending irqs */
7265 gen_jmp_im(s->pc - s->cs_base);
7266 gen_eob(s);
7267#endif
7268 } else {
7269 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7270 }
7271 }
7272 break;
7273 case 0x62: /* bound */
7274 if (CODE64(s))
7275 goto illegal_op;
7276 ot = dflag ? OT_LONG : OT_WORD;
7277 modrm = ldub_code(s->pc++);
7278 reg = (modrm >> 3) & 7;
7279 mod = (modrm >> 6) & 3;
7280 if (mod == 3)
7281 goto illegal_op;
7282 gen_op_mov_TN_reg(ot, 0, reg);
7283 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7284 gen_jmp_im(pc_start - s->cs_base);
7285 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7286 if (ot == OT_WORD)
7287 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7288 else
7289 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7290 break;
7291 case 0x1c8 ... 0x1cf: /* bswap reg */
7292 reg = (b & 7) | REX_B(s);
7293#ifdef TARGET_X86_64
7294 if (dflag == 2) {
7295 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7296 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7297 gen_op_mov_reg_T0(OT_QUAD, reg);
7298 } else
7299 {
7300 TCGv tmp0;
7301 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7302
7303 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7304 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7305 tcg_gen_bswap_i32(tmp0, tmp0);
7306 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7307 gen_op_mov_reg_T0(OT_LONG, reg);
7308 }
7309#else
7310 {
7311 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7312 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7313 gen_op_mov_reg_T0(OT_LONG, reg);
7314 }
7315#endif
7316 break;
7317 case 0xd6: /* salc */
7318 if (CODE64(s))
7319 goto illegal_op;
7320 if (s->cc_op != CC_OP_DYNAMIC)
7321 gen_op_set_cc_op(s->cc_op);
7322 gen_compute_eflags_c(cpu_T[0]);
7323 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7324 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7325 break;
7326 case 0xe0: /* loopnz */
7327 case 0xe1: /* loopz */
7328 case 0xe2: /* loop */
7329 case 0xe3: /* jecxz */
7330 {
7331 int l1, l2, l3;
7332
7333 tval = (int8_t)insn_get(s, OT_BYTE);
7334 next_eip = s->pc - s->cs_base;
7335 tval += next_eip;
7336 if (s->dflag == 0)
7337 tval &= 0xffff;
7338
7339 l1 = gen_new_label();
7340 l2 = gen_new_label();
7341 l3 = gen_new_label();
7342 b &= 3;
7343 switch(b) {
7344 case 0: /* loopnz */
7345 case 1: /* loopz */
7346 if (s->cc_op != CC_OP_DYNAMIC)
7347 gen_op_set_cc_op(s->cc_op);
7348 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7349 gen_op_jz_ecx(s->aflag, l3);
7350 gen_compute_eflags(cpu_tmp0);
7351 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7352 if (b == 0) {
7353 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7354 } else {
7355 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7356 }
7357 break;
7358 case 2: /* loop */
7359 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7360 gen_op_jnz_ecx(s->aflag, l1);
7361 break;
7362 default:
7363 case 3: /* jcxz */
7364 gen_op_jz_ecx(s->aflag, l1);
7365 break;
7366 }
7367
7368 gen_set_label(l3);
7369 gen_jmp_im(next_eip);
7370 tcg_gen_br(l2);
7371
7372 gen_set_label(l1);
7373 gen_jmp_im(tval);
7374 gen_set_label(l2);
7375 gen_eob(s);
7376 }
7377 break;
7378 case 0x130: /* wrmsr */
7379 case 0x132: /* rdmsr */
7380 if (s->cpl != 0) {
7381 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7382 } else {
7383 if (s->cc_op != CC_OP_DYNAMIC)
7384 gen_op_set_cc_op(s->cc_op);
7385 gen_jmp_im(pc_start - s->cs_base);
7386 if (b & 2) {
7387 tcg_gen_helper_0_0(helper_rdmsr);
7388 } else {
7389 tcg_gen_helper_0_0(helper_wrmsr);
7390 }
7391 }
7392 break;
7393 case 0x131: /* rdtsc */
7394 if (s->cc_op != CC_OP_DYNAMIC)
7395 gen_op_set_cc_op(s->cc_op);
7396 gen_jmp_im(pc_start - s->cs_base);
7397 if (use_icount)
7398 gen_io_start();
7399 tcg_gen_helper_0_0(helper_rdtsc);
7400 if (use_icount) {
7401 gen_io_end();
7402 gen_jmp(s, s->pc - s->cs_base);
7403 }
7404 break;
7405 case 0x133: /* rdpmc */
7406 if (s->cc_op != CC_OP_DYNAMIC)
7407 gen_op_set_cc_op(s->cc_op);
7408 gen_jmp_im(pc_start - s->cs_base);
7409 tcg_gen_helper_0_0(helper_rdpmc);
7410 break;
7411 case 0x134: /* sysenter */
7412#ifndef VBOX
7413 /* For Intel SYSENTER is valid on 64-bit */
7414 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7415#else
7416 /** @todo: make things right */
7417 if (CODE64(s))
7418#endif
7419 goto illegal_op;
7420 if (!s->pe) {
7421 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7422 } else {
7423 if (s->cc_op != CC_OP_DYNAMIC) {
7424 gen_op_set_cc_op(s->cc_op);
7425 s->cc_op = CC_OP_DYNAMIC;
7426 }
7427 gen_jmp_im(pc_start - s->cs_base);
7428 tcg_gen_helper_0_0(helper_sysenter);
7429 gen_eob(s);
7430 }
7431 break;
7432 case 0x135: /* sysexit */
7433#ifndef VBOX
7434 /* For Intel SYSEXIT is valid on 64-bit */
7435 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7436#else
7437 /** @todo: make things right */
7438 if (CODE64(s))
7439#endif
7440 goto illegal_op;
7441 if (!s->pe) {
7442 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7443 } else {
7444 if (s->cc_op != CC_OP_DYNAMIC) {
7445 gen_op_set_cc_op(s->cc_op);
7446 s->cc_op = CC_OP_DYNAMIC;
7447 }
7448 gen_jmp_im(pc_start - s->cs_base);
7449 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7450 gen_eob(s);
7451 }
7452 break;
7453#ifdef TARGET_X86_64
7454 case 0x105: /* syscall */
7455 /* XXX: is it usable in real mode ? */
7456 if (s->cc_op != CC_OP_DYNAMIC) {
7457 gen_op_set_cc_op(s->cc_op);
7458 s->cc_op = CC_OP_DYNAMIC;
7459 }
7460 gen_jmp_im(pc_start - s->cs_base);
7461 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7462 gen_eob(s);
7463 break;
7464 case 0x107: /* sysret */
7465 if (!s->pe) {
7466 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7467 } else {
7468 if (s->cc_op != CC_OP_DYNAMIC) {
7469 gen_op_set_cc_op(s->cc_op);
7470 s->cc_op = CC_OP_DYNAMIC;
7471 }
7472 gen_jmp_im(pc_start - s->cs_base);
7473 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7474 /* condition codes are modified only in long mode */
7475 if (s->lma)
7476 s->cc_op = CC_OP_EFLAGS;
7477 gen_eob(s);
7478 }
7479 break;
7480#endif
7481 case 0x1a2: /* cpuid */
7482 if (s->cc_op != CC_OP_DYNAMIC)
7483 gen_op_set_cc_op(s->cc_op);
7484 gen_jmp_im(pc_start - s->cs_base);
7485 tcg_gen_helper_0_0(helper_cpuid);
7486 break;
7487 case 0xf4: /* hlt */
7488 if (s->cpl != 0) {
7489 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7490 } else {
7491 if (s->cc_op != CC_OP_DYNAMIC)
7492 gen_op_set_cc_op(s->cc_op);
7493 gen_jmp_im(pc_start - s->cs_base);
7494 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7495 s->is_jmp = 3;
7496 }
7497 break;
7498 case 0x100:
7499 modrm = ldub_code(s->pc++);
7500 mod = (modrm >> 6) & 3;
7501 op = (modrm >> 3) & 7;
7502 switch(op) {
7503 case 0: /* sldt */
7504 if (!s->pe || s->vm86)
7505 goto illegal_op;
7506 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7507 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7508 ot = OT_WORD;
7509 if (mod == 3)
7510 ot += s->dflag;
7511 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7512 break;
7513 case 2: /* lldt */
7514 if (!s->pe || s->vm86)
7515 goto illegal_op;
7516 if (s->cpl != 0) {
7517 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7518 } else {
7519 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7520 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7521 gen_jmp_im(pc_start - s->cs_base);
7522 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7523 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7524 }
7525 break;
7526 case 1: /* str */
7527 if (!s->pe || s->vm86)
7528 goto illegal_op;
7529 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7530 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7531 ot = OT_WORD;
7532 if (mod == 3)
7533 ot += s->dflag;
7534 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7535 break;
7536 case 3: /* ltr */
7537 if (!s->pe || s->vm86)
7538 goto illegal_op;
7539 if (s->cpl != 0) {
7540 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7541 } else {
7542 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7543 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7544 gen_jmp_im(pc_start - s->cs_base);
7545 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7546 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7547 }
7548 break;
7549 case 4: /* verr */
7550 case 5: /* verw */
7551 if (!s->pe || s->vm86)
7552 goto illegal_op;
7553 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7554 if (s->cc_op != CC_OP_DYNAMIC)
7555 gen_op_set_cc_op(s->cc_op);
7556 if (op == 4)
7557 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7558 else
7559 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7560 s->cc_op = CC_OP_EFLAGS;
7561 break;
7562 default:
7563 goto illegal_op;
7564 }
7565 break;
7566 case 0x101:
7567 modrm = ldub_code(s->pc++);
7568 mod = (modrm >> 6) & 3;
7569 op = (modrm >> 3) & 7;
7570 rm = modrm & 7;
7571 switch(op) {
7572 case 0: /* sgdt */
7573 if (mod == 3)
7574 goto illegal_op;
7575 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7576 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7577 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7578 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7579 gen_add_A0_im(s, 2);
7580 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7581 if (!s->dflag)
7582 gen_op_andl_T0_im(0xffffff);
7583 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7584 break;
7585 case 1:
7586 if (mod == 3) {
7587 switch (rm) {
7588 case 0: /* monitor */
7589 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7590 s->cpl != 0)
7591 goto illegal_op;
7592 if (s->cc_op != CC_OP_DYNAMIC)
7593 gen_op_set_cc_op(s->cc_op);
7594 gen_jmp_im(pc_start - s->cs_base);
7595#ifdef TARGET_X86_64
7596 if (s->aflag == 2) {
7597 gen_op_movq_A0_reg(R_EAX);
7598 } else
7599#endif
7600 {
7601 gen_op_movl_A0_reg(R_EAX);
7602 if (s->aflag == 0)
7603 gen_op_andl_A0_ffff();
7604 }
7605 gen_add_A0_ds_seg(s);
7606 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7607 break;
7608 case 1: /* mwait */
7609 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7610 s->cpl != 0)
7611 goto illegal_op;
7612 if (s->cc_op != CC_OP_DYNAMIC) {
7613 gen_op_set_cc_op(s->cc_op);
7614 s->cc_op = CC_OP_DYNAMIC;
7615 }
7616 gen_jmp_im(pc_start - s->cs_base);
7617 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7618 gen_eob(s);
7619 break;
7620 default:
7621 goto illegal_op;
7622 }
7623 } else { /* sidt */
7624 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7625 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7626 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7627 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7628 gen_add_A0_im(s, 2);
7629 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7630 if (!s->dflag)
7631 gen_op_andl_T0_im(0xffffff);
7632 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7633 }
7634 break;
7635 case 2: /* lgdt */
7636 case 3: /* lidt */
7637 if (mod == 3) {
7638 if (s->cc_op != CC_OP_DYNAMIC)
7639 gen_op_set_cc_op(s->cc_op);
7640 gen_jmp_im(pc_start - s->cs_base);
7641 switch(rm) {
7642 case 0: /* VMRUN */
7643 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7644 goto illegal_op;
7645 if (s->cpl != 0) {
7646 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7647 break;
7648 } else {
7649 tcg_gen_helper_0_2(helper_vmrun,
7650 tcg_const_i32(s->aflag),
7651 tcg_const_i32(s->pc - pc_start));
7652 tcg_gen_exit_tb(0);
7653 s->is_jmp = 3;
7654 }
7655 break;
7656 case 1: /* VMMCALL */
7657 if (!(s->flags & HF_SVME_MASK))
7658 goto illegal_op;
7659 tcg_gen_helper_0_0(helper_vmmcall);
7660 break;
7661 case 2: /* VMLOAD */
7662 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7663 goto illegal_op;
7664 if (s->cpl != 0) {
7665 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7666 break;
7667 } else {
7668 tcg_gen_helper_0_1(helper_vmload,
7669 tcg_const_i32(s->aflag));
7670 }
7671 break;
7672 case 3: /* VMSAVE */
7673 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7674 goto illegal_op;
7675 if (s->cpl != 0) {
7676 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7677 break;
7678 } else {
7679 tcg_gen_helper_0_1(helper_vmsave,
7680 tcg_const_i32(s->aflag));
7681 }
7682 break;
7683 case 4: /* STGI */
7684 if ((!(s->flags & HF_SVME_MASK) &&
7685 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7686 !s->pe)
7687 goto illegal_op;
7688 if (s->cpl != 0) {
7689 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7690 break;
7691 } else {
7692 tcg_gen_helper_0_0(helper_stgi);
7693 }
7694 break;
7695 case 5: /* CLGI */
7696 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7697 goto illegal_op;
7698 if (s->cpl != 0) {
7699 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7700 break;
7701 } else {
7702 tcg_gen_helper_0_0(helper_clgi);
7703 }
7704 break;
7705 case 6: /* SKINIT */
7706 if ((!(s->flags & HF_SVME_MASK) &&
7707 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7708 !s->pe)
7709 goto illegal_op;
7710 tcg_gen_helper_0_0(helper_skinit);
7711 break;
7712 case 7: /* INVLPGA */
7713 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7714 goto illegal_op;
7715 if (s->cpl != 0) {
7716 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7717 break;
7718 } else {
7719 tcg_gen_helper_0_1(helper_invlpga,
7720 tcg_const_i32(s->aflag));
7721 }
7722 break;
7723 default:
7724 goto illegal_op;
7725 }
7726 } else if (s->cpl != 0) {
7727 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7728 } else {
7729 gen_svm_check_intercept(s, pc_start,
7730 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7731 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7732 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7733 gen_add_A0_im(s, 2);
7734 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7735 if (!s->dflag)
7736 gen_op_andl_T0_im(0xffffff);
7737 if (op == 2) {
7738 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7739 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7740 } else {
7741 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7742 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7743 }
7744 }
7745 break;
7746 case 4: /* smsw */
7747 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7748 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7749 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7750 break;
7751 case 6: /* lmsw */
7752 if (s->cpl != 0) {
7753 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7754 } else {
7755 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7756 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7757 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7758 gen_jmp_im(s->pc - s->cs_base);
7759 gen_eob(s);
7760 }
7761 break;
7762 case 7: /* invlpg */
7763 if (s->cpl != 0) {
7764 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7765 } else {
7766 if (mod == 3) {
7767#ifdef TARGET_X86_64
7768 if (CODE64(s) && rm == 0) {
7769 /* swapgs */
7770 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7771 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7772 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7773 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7774 } else
7775#endif
7776 {
7777 goto illegal_op;
7778 }
7779 } else {
7780 if (s->cc_op != CC_OP_DYNAMIC)
7781 gen_op_set_cc_op(s->cc_op);
7782 gen_jmp_im(pc_start - s->cs_base);
7783 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7784 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7785 gen_jmp_im(s->pc - s->cs_base);
7786 gen_eob(s);
7787 }
7788 }
7789 break;
7790 default:
7791 goto illegal_op;
7792 }
7793 break;
7794 case 0x108: /* invd */
7795 case 0x109: /* wbinvd */
7796 if (s->cpl != 0) {
7797 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7798 } else {
7799 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7800 /* nothing to do */
7801 }
7802 break;
7803 case 0x63: /* arpl or movslS (x86_64) */
7804#ifdef TARGET_X86_64
7805 if (CODE64(s)) {
7806 int d_ot;
7807 /* d_ot is the size of destination */
7808 d_ot = dflag + OT_WORD;
7809
7810 modrm = ldub_code(s->pc++);
7811 reg = ((modrm >> 3) & 7) | rex_r;
7812 mod = (modrm >> 6) & 3;
7813 rm = (modrm & 7) | REX_B(s);
7814
7815 if (mod == 3) {
7816 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7817 /* sign extend */
7818 if (d_ot == OT_QUAD)
7819 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7820 gen_op_mov_reg_T0(d_ot, reg);
7821 } else {
7822 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7823 if (d_ot == OT_QUAD) {
7824 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7825 } else {
7826 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7827 }
7828 gen_op_mov_reg_T0(d_ot, reg);
7829 }
7830 } else
7831#endif
7832 {
7833 int label1;
7834 TCGv t0, t1, t2;
7835
7836 if (!s->pe || s->vm86)
7837 goto illegal_op;
7838 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7839 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7840 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7841 ot = OT_WORD;
7842 modrm = ldub_code(s->pc++);
7843 reg = (modrm >> 3) & 7;
7844 mod = (modrm >> 6) & 3;
7845 rm = modrm & 7;
7846#ifdef VBOX /* Fix for obvious bug - T1 needs to be loaded */
7847 /** @todo: how to do that right? */
7848 //gen_op_mov_TN_reg[ot][1][reg]();
7849#endif
7850 if (mod != 3) {
7851 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7852 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7853 } else {
7854 gen_op_mov_v_reg(ot, t0, rm);
7855 }
7856 gen_op_mov_v_reg(ot, t1, reg);
7857 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7858 tcg_gen_andi_tl(t1, t1, 3);
7859 tcg_gen_movi_tl(t2, 0);
7860 label1 = gen_new_label();
7861 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7862 tcg_gen_andi_tl(t0, t0, ~3);
7863 tcg_gen_or_tl(t0, t0, t1);
7864 tcg_gen_movi_tl(t2, CC_Z);
7865 gen_set_label(label1);
7866 if (mod != 3) {
7867 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7868 } else {
7869 gen_op_mov_reg_v(ot, rm, t0);
7870 }
7871 if (s->cc_op != CC_OP_DYNAMIC)
7872 gen_op_set_cc_op(s->cc_op);
7873 gen_compute_eflags(cpu_cc_src);
7874 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7875 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7876 s->cc_op = CC_OP_EFLAGS;
7877 tcg_temp_free(t0);
7878 tcg_temp_free(t1);
7879 tcg_temp_free(t2);
7880 }
7881 break;
7882 case 0x102: /* lar */
7883 case 0x103: /* lsl */
7884 {
7885 int label1;
7886 TCGv t0;
7887 if (!s->pe || s->vm86)
7888 goto illegal_op;
7889 ot = dflag ? OT_LONG : OT_WORD;
7890 modrm = ldub_code(s->pc++);
7891 reg = ((modrm >> 3) & 7) | rex_r;
7892 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7893 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7894 if (s->cc_op != CC_OP_DYNAMIC)
7895 gen_op_set_cc_op(s->cc_op);
7896 if (b == 0x102)
7897 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
7898 else
7899 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
7900 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7901 label1 = gen_new_label();
7902 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7903 gen_op_mov_reg_v(ot, reg, t0);
7904 gen_set_label(label1);
7905 s->cc_op = CC_OP_EFLAGS;
7906 tcg_temp_free(t0);
7907 }
7908 break;
7909 case 0x118:
7910 modrm = ldub_code(s->pc++);
7911 mod = (modrm >> 6) & 3;
7912 op = (modrm >> 3) & 7;
7913 switch(op) {
7914 case 0: /* prefetchnta */
7915 case 1: /* prefetchnt0 */
7916 case 2: /* prefetchnt0 */
7917 case 3: /* prefetchnt0 */
7918 if (mod == 3)
7919 goto illegal_op;
7920 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7921 /* nothing more to do */
7922 break;
7923 default: /* nop (multi byte) */
7924 gen_nop_modrm(s, modrm);
7925 break;
7926 }
7927 break;
7928 case 0x119 ... 0x11f: /* nop (multi byte) */
7929 modrm = ldub_code(s->pc++);
7930 gen_nop_modrm(s, modrm);
7931 break;
7932 case 0x120: /* mov reg, crN */
7933 case 0x122: /* mov crN, reg */
7934 if (s->cpl != 0) {
7935 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7936 } else {
7937 modrm = ldub_code(s->pc++);
7938 if ((modrm & 0xc0) != 0xc0)
7939 goto illegal_op;
7940 rm = (modrm & 7) | REX_B(s);
7941 reg = ((modrm >> 3) & 7) | rex_r;
7942 if (CODE64(s))
7943 ot = OT_QUAD;
7944 else
7945 ot = OT_LONG;
7946 switch(reg) {
7947 case 0:
7948 case 2:
7949 case 3:
7950 case 4:
7951 case 8:
7952 if (s->cc_op != CC_OP_DYNAMIC)
7953 gen_op_set_cc_op(s->cc_op);
7954 gen_jmp_im(pc_start - s->cs_base);
7955 if (b & 2) {
7956 gen_op_mov_TN_reg(ot, 0, rm);
7957 tcg_gen_helper_0_2(helper_write_crN,
7958 tcg_const_i32(reg), cpu_T[0]);
7959 gen_jmp_im(s->pc - s->cs_base);
7960 gen_eob(s);
7961 } else {
7962 tcg_gen_helper_1_1(helper_read_crN,
7963 cpu_T[0], tcg_const_i32(reg));
7964 gen_op_mov_reg_T0(ot, rm);
7965 }
7966 break;
7967 default:
7968 goto illegal_op;
7969 }
7970 }
7971 break;
7972 case 0x121: /* mov reg, drN */
7973 case 0x123: /* mov drN, reg */
7974 if (s->cpl != 0) {
7975 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7976 } else {
7977 modrm = ldub_code(s->pc++);
7978 if ((modrm & 0xc0) != 0xc0)
7979 goto illegal_op;
7980 rm = (modrm & 7) | REX_B(s);
7981 reg = ((modrm >> 3) & 7) | rex_r;
7982 if (CODE64(s))
7983 ot = OT_QUAD;
7984 else
7985 ot = OT_LONG;
7986 /* XXX: do it dynamically with CR4.DE bit */
7987 if (reg == 4 || reg == 5 || reg >= 8)
7988 goto illegal_op;
7989 if (b & 2) {
7990 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
7991 gen_op_mov_TN_reg(ot, 0, rm);
7992 tcg_gen_helper_0_2(helper_movl_drN_T0,
7993 tcg_const_i32(reg), cpu_T[0]);
7994 gen_jmp_im(s->pc - s->cs_base);
7995 gen_eob(s);
7996 } else {
7997 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
7998 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
7999 gen_op_mov_reg_T0(ot, rm);
8000 }
8001 }
8002 break;
8003 case 0x106: /* clts */
8004 if (s->cpl != 0) {
8005 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8006 } else {
8007 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8008 tcg_gen_helper_0_0(helper_clts);
8009 /* abort block because static cpu state changed */
8010 gen_jmp_im(s->pc - s->cs_base);
8011 gen_eob(s);
8012 }
8013 break;
8014 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8015 case 0x1c3: /* MOVNTI reg, mem */
8016 if (!(s->cpuid_features & CPUID_SSE2))
8017 goto illegal_op;
8018 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8019 modrm = ldub_code(s->pc++);
8020 mod = (modrm >> 6) & 3;
8021 if (mod == 3)
8022 goto illegal_op;
8023 reg = ((modrm >> 3) & 7) | rex_r;
8024 /* generate a generic store */
8025 gen_ldst_modrm(s, modrm, ot, reg, 1);
8026 break;
8027 case 0x1ae:
8028 modrm = ldub_code(s->pc++);
8029 mod = (modrm >> 6) & 3;
8030 op = (modrm >> 3) & 7;
8031 switch(op) {
8032 case 0: /* fxsave */
8033 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8034 (s->flags & HF_EM_MASK))
8035 goto illegal_op;
8036 if (s->flags & HF_TS_MASK) {
8037 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8038 break;
8039 }
8040 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8041 if (s->cc_op != CC_OP_DYNAMIC)
8042 gen_op_set_cc_op(s->cc_op);
8043 gen_jmp_im(pc_start - s->cs_base);
8044 tcg_gen_helper_0_2(helper_fxsave,
8045 cpu_A0, tcg_const_i32((s->dflag == 2)));
8046 break;
8047 case 1: /* fxrstor */
8048 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8049 (s->flags & HF_EM_MASK))
8050 goto illegal_op;
8051 if (s->flags & HF_TS_MASK) {
8052 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8053 break;
8054 }
8055 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8056 if (s->cc_op != CC_OP_DYNAMIC)
8057 gen_op_set_cc_op(s->cc_op);
8058 gen_jmp_im(pc_start - s->cs_base);
8059 tcg_gen_helper_0_2(helper_fxrstor,
8060 cpu_A0, tcg_const_i32((s->dflag == 2)));
8061 break;
8062 case 2: /* ldmxcsr */
8063 case 3: /* stmxcsr */
8064 if (s->flags & HF_TS_MASK) {
8065 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8066 break;
8067 }
8068 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8069 mod == 3)
8070 goto illegal_op;
8071 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8072 if (op == 2) {
8073 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8074 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8075 } else {
8076 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8077 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8078 }
8079 break;
8080 case 5: /* lfence */
8081 case 6: /* mfence */
8082 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8083 goto illegal_op;
8084 break;
8085 case 7: /* sfence / clflush */
8086 if ((modrm & 0xc7) == 0xc0) {
8087 /* sfence */
8088 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8089 if (!(s->cpuid_features & CPUID_SSE))
8090 goto illegal_op;
8091 } else {
8092 /* clflush */
8093 if (!(s->cpuid_features & CPUID_CLFLUSH))
8094 goto illegal_op;
8095 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8096 }
8097 break;
8098 default:
8099 goto illegal_op;
8100 }
8101 break;
8102 case 0x10d: /* 3DNow! prefetch(w) */
8103 modrm = ldub_code(s->pc++);
8104 mod = (modrm >> 6) & 3;
8105 if (mod == 3)
8106 goto illegal_op;
8107 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8108 /* ignore for now */
8109 break;
8110 case 0x1aa: /* rsm */
8111 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8112 if (!(s->flags & HF_SMM_MASK))
8113 goto illegal_op;
8114 if (s->cc_op != CC_OP_DYNAMIC) {
8115 gen_op_set_cc_op(s->cc_op);
8116 s->cc_op = CC_OP_DYNAMIC;
8117 }
8118 gen_jmp_im(s->pc - s->cs_base);
8119 tcg_gen_helper_0_0(helper_rsm);
8120 gen_eob(s);
8121 break;
8122 case 0x1b8: /* SSE4.2 popcnt */
8123 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8124 PREFIX_REPZ)
8125 goto illegal_op;
8126 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8127 goto illegal_op;
8128
8129 modrm = ldub_code(s->pc++);
8130 reg = ((modrm >> 3) & 7);
8131
8132 if (s->prefix & PREFIX_DATA)
8133 ot = OT_WORD;
8134 else if (s->dflag != 2)
8135 ot = OT_LONG;
8136 else
8137 ot = OT_QUAD;
8138
8139 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8140 tcg_gen_helper_1_2(helper_popcnt,
8141 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8142 gen_op_mov_reg_T0(ot, reg);
8143
8144 s->cc_op = CC_OP_EFLAGS;
8145 break;
8146 case 0x10e ... 0x10f:
8147 /* 3DNow! instructions, ignore prefixes */
8148 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8149 case 0x110 ... 0x117:
8150 case 0x128 ... 0x12f:
8151 case 0x138 ... 0x13a:
8152 case 0x150 ... 0x177:
8153 case 0x17c ... 0x17f:
8154 case 0x1c2:
8155 case 0x1c4 ... 0x1c6:
8156 case 0x1d0 ... 0x1fe:
8157 gen_sse(s, b, pc_start, rex_r);
8158 break;
8159 default:
8160 goto illegal_op;
8161 }
8162 /* lock generation */
8163 if (s->prefix & PREFIX_LOCK)
8164 tcg_gen_helper_0_0(helper_unlock);
8165 return s->pc;
8166 illegal_op:
8167 if (s->prefix & PREFIX_LOCK)
8168 tcg_gen_helper_0_0(helper_unlock);
8169 /* XXX: ensure that no lock was generated */
8170 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8171 return s->pc;
8172}
8173
8174void optimize_flags_init(void)
8175{
8176#ifndef VBOX
8177#if TCG_TARGET_REG_BITS == 32
8178 assert(sizeof(CCTable) == (1 << 3));
8179#else
8180 assert(sizeof(CCTable) == (1 << 4));
8181#endif
8182#endif
8183 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8184 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8185 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8186 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8187 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8188 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8189 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8190 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8191 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8192
8193 /* register helpers */
8194
8195#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8196#include "helper.h"
8197}
8198
8199/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8200 basic block 'tb'. If search_pc is TRUE, also generate PC
8201 information for each intermediate instruction. */
8202#ifndef VBOX
8203static inline void gen_intermediate_code_internal(CPUState *env,
8204#else /* VBOX */
8205DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8206#endif /* VBOX */
8207 TranslationBlock *tb,
8208 int search_pc)
8209{
8210 DisasContext dc1, *dc = &dc1;
8211 target_ulong pc_ptr;
8212 uint16_t *gen_opc_end;
8213 int j, lj, cflags;
8214 uint64_t flags;
8215 target_ulong pc_start;
8216 target_ulong cs_base;
8217 int num_insns;
8218 int max_insns;
8219
8220 /* generate intermediate code */
8221 pc_start = tb->pc;
8222 cs_base = tb->cs_base;
8223 flags = tb->flags;
8224 cflags = tb->cflags;
8225
8226 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8227 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8228 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8229 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8230 dc->f_st = 0;
8231 dc->vm86 = (flags >> VM_SHIFT) & 1;
8232 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8233 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8234 dc->tf = (flags >> TF_SHIFT) & 1;
8235 dc->singlestep_enabled = env->singlestep_enabled;
8236 dc->cc_op = CC_OP_DYNAMIC;
8237 dc->cs_base = cs_base;
8238 dc->tb = tb;
8239 dc->popl_esp_hack = 0;
8240 /* select memory access functions */
8241 dc->mem_index = 0;
8242 if (flags & HF_SOFTMMU_MASK) {
8243 if (dc->cpl == 3)
8244 dc->mem_index = 2 * 4;
8245 else
8246 dc->mem_index = 1 * 4;
8247 }
8248 dc->cpuid_features = env->cpuid_features;
8249 dc->cpuid_ext_features = env->cpuid_ext_features;
8250 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8251 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8252#ifdef TARGET_X86_64
8253 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8254 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8255#endif
8256 dc->flags = flags;
8257 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8258 (flags & HF_INHIBIT_IRQ_MASK)
8259#ifndef CONFIG_SOFTMMU
8260 || (flags & HF_SOFTMMU_MASK)
8261#endif
8262 );
8263#if 0
8264 /* check addseg logic */
8265 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8266 printf("ERROR addseg\n");
8267#endif
8268
8269 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8270 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8271 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8272 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8273
8274 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8275 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8276 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8277 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8278 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8279 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8280 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8281 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8282 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8283
8284 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8285
8286 dc->is_jmp = DISAS_NEXT;
8287 pc_ptr = pc_start;
8288 lj = -1;
8289 num_insns = 0;
8290 max_insns = tb->cflags & CF_COUNT_MASK;
8291 if (max_insns == 0)
8292 max_insns = CF_COUNT_MASK;
8293
8294 gen_icount_start();
8295 for(;;) {
8296 if (env->nb_breakpoints > 0) {
8297 for(j = 0; j < env->nb_breakpoints; j++) {
8298 if (env->breakpoints[j] == pc_ptr) {
8299 gen_debug(dc, pc_ptr - dc->cs_base);
8300 break;
8301 }
8302 }
8303 }
8304 if (search_pc) {
8305 j = gen_opc_ptr - gen_opc_buf;
8306 if (lj < j) {
8307 lj++;
8308 while (lj < j)
8309 gen_opc_instr_start[lj++] = 0;
8310 }
8311 gen_opc_pc[lj] = pc_ptr;
8312 gen_opc_cc_op[lj] = dc->cc_op;
8313 gen_opc_instr_start[lj] = 1;
8314 gen_opc_icount[lj] = num_insns;
8315 }
8316 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8317 gen_io_start();
8318
8319 pc_ptr = disas_insn(dc, pc_ptr);
8320 num_insns++;
8321 /* stop translation if indicated */
8322 if (dc->is_jmp)
8323 break;
8324 /* if single step mode, we generate only one instruction and
8325 generate an exception */
8326 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8327 the flag and abort the translation to give the irqs a
8328 change to be happen */
8329 if (dc->tf || dc->singlestep_enabled ||
8330 (flags & HF_INHIBIT_IRQ_MASK)) {
8331 gen_jmp_im(pc_ptr - dc->cs_base);
8332 gen_eob(dc);
8333 break;
8334 }
8335 /* if too long translation, stop generation too */
8336 if (gen_opc_ptr >= gen_opc_end ||
8337 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8338 num_insns >= max_insns) {
8339 gen_jmp_im(pc_ptr - dc->cs_base);
8340 gen_eob(dc);
8341 break;
8342 }
8343 }
8344 if (tb->cflags & CF_LAST_IO)
8345 gen_io_end();
8346 gen_icount_end(tb, num_insns);
8347 *gen_opc_ptr = INDEX_op_end;
8348 /* we don't forget to fill the last values */
8349 if (search_pc) {
8350 j = gen_opc_ptr - gen_opc_buf;
8351 lj++;
8352 while (lj <= j)
8353 gen_opc_instr_start[lj++] = 0;
8354 }
8355
8356#ifdef DEBUG_DISAS
8357 if (loglevel & CPU_LOG_TB_CPU) {
8358 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8359 }
8360 if (loglevel & CPU_LOG_TB_IN_ASM) {
8361 int disas_flags;
8362 fprintf(logfile, "----------------\n");
8363 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8364#ifdef TARGET_X86_64
8365 if (dc->code64)
8366 disas_flags = 2;
8367 else
8368#endif
8369 disas_flags = !dc->code32;
8370 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8371 fprintf(logfile, "\n");
8372 }
8373#endif
8374
8375 if (!search_pc) {
8376 tb->size = pc_ptr - pc_start;
8377 tb->icount = num_insns;
8378 }
8379}
8380
8381void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8382{
8383 gen_intermediate_code_internal(env, tb, 0);
8384}
8385
8386void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8387{
8388 gen_intermediate_code_internal(env, tb, 1);
8389}
8390
8391void gen_pc_load(CPUState *env, TranslationBlock *tb,
8392 unsigned long searched_pc, int pc_pos, void *puc)
8393{
8394 int cc_op;
8395#ifdef DEBUG_DISAS
8396 if (loglevel & CPU_LOG_TB_OP) {
8397 int i;
8398 fprintf(logfile, "RESTORE:\n");
8399 for(i = 0;i <= pc_pos; i++) {
8400 if (gen_opc_instr_start[i]) {
8401 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8402 }
8403 }
8404 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8405 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8406 (uint32_t)tb->cs_base);
8407 }
8408#endif
8409 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8410 cc_op = gen_opc_cc_op[pc_pos];
8411 if (cc_op != CC_OP_DYNAMIC)
8412 env->cc_op = cc_op;
8413}
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette