VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 13559

最後變更 在這個檔案從13559是 13559,由 vboxsync 提交於 16 年 前

made TCG generate VBOX-aware phys mem access code, some more QEMU code merged

  • 屬性 svn:eol-style 設為 native
檔案大小: 272.6 KB
 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int record_call; /* record calls for CSAM or not? */
148#endif
149 int cpl;
150 int iopl;
151 int tf; /* TF cpu flag */
152 int singlestep_enabled; /* "hardware" single step enabled */
153 int jmp_opt; /* use direct block chaining for direct jumps */
154 int mem_index; /* select memory access functions */
155 uint64_t flags; /* all execution flags */
156 struct TranslationBlock *tb;
157 int popl_esp_hack; /* for correct popl with esp base handling */
158 int rip_offset; /* only used in x86_64, but left for simplicity */
159 int cpuid_features;
160 int cpuid_ext_features;
161 int cpuid_ext2_features;
162 int cpuid_ext3_features;
163} DisasContext;
164
165static void gen_eob(DisasContext *s);
166static void gen_jmp(DisasContext *s, target_ulong eip);
167static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
168
169#ifdef VBOX
170static void gen_check_external_event();
171#endif
172
173/* i386 arith/logic operations */
174enum {
175 OP_ADDL,
176 OP_ORL,
177 OP_ADCL,
178 OP_SBBL,
179 OP_ANDL,
180 OP_SUBL,
181 OP_XORL,
182 OP_CMPL,
183};
184
185/* i386 shift ops */
186enum {
187 OP_ROL,
188 OP_ROR,
189 OP_RCL,
190 OP_RCR,
191 OP_SHL,
192 OP_SHR,
193 OP_SHL1, /* undocumented */
194 OP_SAR = 7,
195};
196
197enum {
198 JCC_O,
199 JCC_B,
200 JCC_Z,
201 JCC_BE,
202 JCC_S,
203 JCC_P,
204 JCC_L,
205 JCC_LE,
206};
207
208/* operand size */
209enum {
210 OT_BYTE = 0,
211 OT_WORD,
212 OT_LONG,
213 OT_QUAD,
214};
215
216enum {
217 /* I386 int registers */
218 OR_EAX, /* MUST be even numbered */
219 OR_ECX,
220 OR_EDX,
221 OR_EBX,
222 OR_ESP,
223 OR_EBP,
224 OR_ESI,
225 OR_EDI,
226
227 OR_TMP0 = 16, /* temporary operand register */
228 OR_TMP1,
229 OR_A0, /* temporary register used when doing address evaluation */
230};
231
232#ifndef VBOX
233static inline void gen_op_movl_T0_0(void)
234#else /* VBOX */
235DECLINLINE(void) gen_op_movl_T0_0(void)
236#endif /* VBOX */
237{
238 tcg_gen_movi_tl(cpu_T[0], 0);
239}
240
241#ifndef VBOX
242static inline void gen_op_movl_T0_im(int32_t val)
243#else /* VBOX */
244DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
245#endif /* VBOX */
246{
247 tcg_gen_movi_tl(cpu_T[0], val);
248}
249
250#ifndef VBOX
251static inline void gen_op_movl_T0_imu(uint32_t val)
252#else /* VBOX */
253DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
254#endif /* VBOX */
255{
256 tcg_gen_movi_tl(cpu_T[0], val);
257}
258
259#ifndef VBOX
260static inline void gen_op_movl_T1_im(int32_t val)
261#else /* VBOX */
262DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
263#endif /* VBOX */
264{
265 tcg_gen_movi_tl(cpu_T[1], val);
266}
267
268#ifndef VBOX
269static inline void gen_op_movl_T1_imu(uint32_t val)
270#else /* VBOX */
271DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
272#endif /* VBOX */
273{
274 tcg_gen_movi_tl(cpu_T[1], val);
275}
276
277#ifndef VBOX
278static inline void gen_op_movl_A0_im(uint32_t val)
279#else /* VBOX */
280DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
281#endif /* VBOX */
282{
283 tcg_gen_movi_tl(cpu_A0, val);
284}
285
286#ifdef TARGET_X86_64
287#ifndef VBOX
288static inline void gen_op_movq_A0_im(int64_t val)
289#else /* VBOX */
290DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
291#endif /* VBOX */
292{
293 tcg_gen_movi_tl(cpu_A0, val);
294}
295#endif
296
297#ifndef VBOX
298static inline void gen_movtl_T0_im(target_ulong val)
299#else /* VBOX */
300DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
301#endif /* VBOX */
302{
303 tcg_gen_movi_tl(cpu_T[0], val);
304}
305
306#ifndef VBOX
307static inline void gen_movtl_T1_im(target_ulong val)
308#else /* VBOX */
309DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
310#endif /* VBOX */
311{
312 tcg_gen_movi_tl(cpu_T[1], val);
313}
314
315#ifndef VBOX
316static inline void gen_op_andl_T0_ffff(void)
317#else /* VBOX */
318DECLINLINE(void) gen_op_andl_T0_ffff(void)
319#endif /* VBOX */
320{
321 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
322}
323
324#ifndef VBOX
325static inline void gen_op_andl_T0_im(uint32_t val)
326#else /* VBOX */
327DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
328#endif /* VBOX */
329{
330 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
331}
332
333#ifndef VBOX
334static inline void gen_op_movl_T0_T1(void)
335#else /* VBOX */
336DECLINLINE(void) gen_op_movl_T0_T1(void)
337#endif /* VBOX */
338{
339 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
340}
341
342#ifndef VBOX
343static inline void gen_op_andl_A0_ffff(void)
344#else /* VBOX */
345DECLINLINE(void) gen_op_andl_A0_ffff(void)
346#endif /* VBOX */
347{
348 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
349}
350
351#ifdef TARGET_X86_64
352
353#define NB_OP_SIZES 4
354
355#else /* !TARGET_X86_64 */
356
357#define NB_OP_SIZES 3
358
359#endif /* !TARGET_X86_64 */
360
361#if defined(WORDS_BIGENDIAN)
362#define REG_B_OFFSET (sizeof(target_ulong) - 1)
363#define REG_H_OFFSET (sizeof(target_ulong) - 2)
364#define REG_W_OFFSET (sizeof(target_ulong) - 2)
365#define REG_L_OFFSET (sizeof(target_ulong) - 4)
366#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
367#else
368#define REG_B_OFFSET 0
369#define REG_H_OFFSET 1
370#define REG_W_OFFSET 0
371#define REG_L_OFFSET 0
372#define REG_LH_OFFSET 4
373#endif
374
375#ifndef VBOX
376static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
377#else /* VBOX */
378DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#endif /* VBOX */
380{
381 switch(ot) {
382 case OT_BYTE:
383 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
384 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
385 } else {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
387 }
388 break;
389 case OT_WORD:
390 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
391 break;
392#ifdef TARGET_X86_64
393 case OT_LONG:
394 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
395 /* high part of register set to zero */
396 tcg_gen_movi_tl(cpu_tmp0, 0);
397 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
398 break;
399 default:
400 case OT_QUAD:
401 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
402 break;
403#else
404 default:
405 case OT_LONG:
406 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
407 break;
408#endif
409 }
410}
411
412#ifndef VBOX
413static inline void gen_op_mov_reg_T0(int ot, int reg)
414#else /* VBOX */
415DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
416#endif /* VBOX */
417{
418 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
419}
420
421#ifndef VBOX
422static inline void gen_op_mov_reg_T1(int ot, int reg)
423#else /* VBOX */
424DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
425#endif /* VBOX */
426{
427 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
428}
429
430#ifndef VBOX
431static inline void gen_op_mov_reg_A0(int size, int reg)
432#else /* VBOX */
433DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
434#endif /* VBOX */
435{
436 switch(size) {
437 case 0:
438 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
439 break;
440#ifdef TARGET_X86_64
441 case 1:
442 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
443 /* high part of register set to zero */
444 tcg_gen_movi_tl(cpu_tmp0, 0);
445 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
446 break;
447 default:
448 case 2:
449 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
450 break;
451#else
452 default:
453 case 1:
454 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
455 break;
456#endif
457 }
458}
459
460#ifndef VBOX
461static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
462#else /* VBOX */
463DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#endif /* VBOX */
465{
466 switch(ot) {
467 case OT_BYTE:
468 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
469 goto std_case;
470 } else {
471 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
472 }
473 break;
474 default:
475 std_case:
476 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
477 break;
478 }
479}
480
481#ifndef VBOX
482static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
483#else /* VBOX */
484DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
485#endif /* VBOX */
486{
487 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
488}
489
490#ifndef VBOX
491static inline void gen_op_movl_A0_reg(int reg)
492#else /* VBOX */
493DECLINLINE(void) gen_op_movl_A0_reg(int reg)
494#endif /* VBOX */
495{
496 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
497}
498
499#ifndef VBOX
500static inline void gen_op_addl_A0_im(int32_t val)
501#else /* VBOX */
502DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
503#endif /* VBOX */
504{
505 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
506#ifdef TARGET_X86_64
507 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
508#endif
509}
510
511#ifdef TARGET_X86_64
512#ifndef VBOX
513static inline void gen_op_addq_A0_im(int64_t val)
514#else /* VBOX */
515DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
516#endif /* VBOX */
517{
518 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
519}
520#endif
521
522static void gen_add_A0_im(DisasContext *s, int val)
523{
524#ifdef TARGET_X86_64
525 if (CODE64(s))
526 gen_op_addq_A0_im(val);
527 else
528#endif
529 gen_op_addl_A0_im(val);
530}
531
532#ifndef VBOX
533static inline void gen_op_addl_T0_T1(void)
534#else /* VBOX */
535DECLINLINE(void) gen_op_addl_T0_T1(void)
536#endif /* VBOX */
537{
538 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
539}
540
541#ifndef VBOX
542static inline void gen_op_jmp_T0(void)
543#else /* VBOX */
544DECLINLINE(void) gen_op_jmp_T0(void)
545#endif /* VBOX */
546{
547 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
548}
549
550#ifndef VBOX
551static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
552#else /* VBOX */
553DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
554#endif /* VBOX */
555{
556 switch(size) {
557 case 0:
558 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
559 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
560 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
561 break;
562 case 1:
563 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
564 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
565#ifdef TARGET_X86_64
566 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
567#endif
568 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
569 break;
570#ifdef TARGET_X86_64
571 case 2:
572 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
573 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
574 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
575 break;
576#endif
577 }
578}
579
580#ifndef VBOX
581static inline void gen_op_add_reg_T0(int size, int reg)
582#else /* VBOX */
583DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
584#endif /* VBOX */
585{
586 switch(size) {
587 case 0:
588 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
589 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
590 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
591 break;
592 case 1:
593 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
594 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
595#ifdef TARGET_X86_64
596 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
597#endif
598 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
599 break;
600#ifdef TARGET_X86_64
601 case 2:
602 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
603 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
604 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
605 break;
606#endif
607 }
608}
609
610#ifndef VBOX
611static inline void gen_op_set_cc_op(int32_t val)
612#else /* VBOX */
613DECLINLINE(void) gen_op_set_cc_op(int32_t val)
614#endif /* VBOX */
615{
616 tcg_gen_movi_i32(cpu_cc_op, val);
617}
618
619#ifndef VBOX
620static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
621#else /* VBOX */
622DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
623#endif /* VBOX */
624{
625 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
626 if (shift != 0)
627 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
628 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
629#ifdef TARGET_X86_64
630 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
631#endif
632}
633
634#ifndef VBOX
635static inline void gen_op_movl_A0_seg(int reg)
636#else /* VBOX */
637DECLINLINE(void) gen_op_movl_A0_seg(int reg)
638#endif /* VBOX */
639{
640 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
641}
642
643#ifndef VBOX
644static inline void gen_op_addl_A0_seg(int reg)
645#else /* VBOX */
646DECLINLINE(void) gen_op_addl_A0_seg(int reg)
647#endif /* VBOX */
648{
649 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
650 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
651#ifdef TARGET_X86_64
652 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
653#endif
654}
655
656#ifdef TARGET_X86_64
657#ifndef VBOX
658static inline void gen_op_movq_A0_seg(int reg)
659#else /* VBOX */
660DECLINLINE(void) gen_op_movq_A0_seg(int reg)
661#endif /* VBOX */
662{
663 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
664}
665
666#ifndef VBOX
667static inline void gen_op_addq_A0_seg(int reg)
668#else /* VBOX */
669DECLINLINE(void) gen_op_addq_A0_seg(int reg)
670#endif /* VBOX */
671{
672 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
673 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
674}
675
676#ifndef VBOX
677static inline void gen_op_movq_A0_reg(int reg)
678#else /* VBOX */
679DECLINLINE(void) gen_op_movq_A0_reg(int reg)
680#endif /* VBOX */
681{
682 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
683}
684
685#ifndef VBOX
686static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
687#else /* VBOX */
688DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
689#endif /* VBOX */
690{
691 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
692 if (shift != 0)
693 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
694 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
695}
696#endif
697
698#ifndef VBOX
699static inline void gen_op_lds_T0_A0(int idx)
700#else /* VBOX */
701DECLINLINE(void) gen_op_lds_T0_A0(int idx)
702#endif /* VBOX */
703{
704 int mem_index = (idx >> 2) - 1;
705 switch(idx & 3) {
706 case 0:
707 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
708 break;
709 case 1:
710 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
711 break;
712 default:
713 case 2:
714 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
715 break;
716 }
717}
718
719#ifndef VBOX
720static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
721#else /* VBOX */
722DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
723#endif /* VBOX */
724{
725 int mem_index = (idx >> 2) - 1;
726 switch(idx & 3) {
727 case 0:
728 tcg_gen_qemu_ld8u(t0, a0, mem_index);
729 break;
730 case 1:
731 tcg_gen_qemu_ld16u(t0, a0, mem_index);
732 break;
733 case 2:
734 tcg_gen_qemu_ld32u(t0, a0, mem_index);
735 break;
736 default:
737 case 3:
738 tcg_gen_qemu_ld64(t0, a0, mem_index);
739 break;
740 }
741}
742
743/* XXX: always use ldu or lds */
744#ifndef VBOX
745static inline void gen_op_ld_T0_A0(int idx)
746#else /* VBOX */
747DECLINLINE(void) gen_op_ld_T0_A0(int idx)
748#endif /* VBOX */
749{
750 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
751}
752
753#ifndef VBOX
754static inline void gen_op_ldu_T0_A0(int idx)
755#else /* VBOX */
756DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
757#endif /* VBOX */
758{
759 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
760}
761
762#ifndef VBOX
763static inline void gen_op_ld_T1_A0(int idx)
764#else /* VBOX */
765DECLINLINE(void) gen_op_ld_T1_A0(int idx)
766#endif /* VBOX */
767{
768 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
769}
770
771#ifndef VBOX
772static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
773#else /* VBOX */
774DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
775#endif /* VBOX */
776{
777 int mem_index = (idx >> 2) - 1;
778 switch(idx & 3) {
779 case 0:
780 tcg_gen_qemu_st8(t0, a0, mem_index);
781 break;
782 case 1:
783 tcg_gen_qemu_st16(t0, a0, mem_index);
784 break;
785 case 2:
786 tcg_gen_qemu_st32(t0, a0, mem_index);
787 break;
788 default:
789 case 3:
790 tcg_gen_qemu_st64(t0, a0, mem_index);
791 break;
792 }
793}
794
795#ifndef VBOX
796static inline void gen_op_st_T0_A0(int idx)
797#else /* VBOX */
798DECLINLINE(void) gen_op_st_T0_A0(int idx)
799#endif /* VBOX */
800{
801 gen_op_st_v(idx, cpu_T[0], cpu_A0);
802}
803
804#ifndef VBOX
805static inline void gen_op_st_T1_A0(int idx)
806#else /* VBOX */
807DECLINLINE(void) gen_op_st_T1_A0(int idx)
808#endif /* VBOX */
809{
810 gen_op_st_v(idx, cpu_T[1], cpu_A0);
811}
812
813#ifndef VBOX
814static inline void gen_jmp_im(target_ulong pc)
815#else /* VBOX */
816DECLINLINE(void) gen_jmp_im(target_ulong pc)
817#endif /* VBOX */
818{
819#ifdef VBOX
820 gen_check_external_event();
821#endif /* VBOX */
822 tcg_gen_movi_tl(cpu_tmp0, pc);
823 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
824}
825
826#ifdef VBOX
827static void gen_check_external_event()
828{
829 int skip_label;
830
831 skip_label = gen_new_label();
832 tcg_gen_ld32u_tl(cpu_tmp0, cpu_env, offsetof(CPUState, interrupt_request));
833 /* Keep in sync with helper_check_external_event() */
834 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0,
835 CPU_INTERRUPT_EXTERNAL_EXIT
836 | CPU_INTERRUPT_EXTERNAL_TIMER
837 | CPU_INTERRUPT_EXTERNAL_DMA
838 | CPU_INTERRUPT_EXTERNAL_HARD);
839 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, skip_label);
840
841 tcg_gen_helper_0_0(helper_check_external_event);
842
843 gen_set_label(skip_label);
844}
845
846#ifndef VBOX
847static inline void gen_update_eip(target_ulong pc)
848#else /* VBOX */
849DECLINLINE(void) gen_update_eip(target_ulong pc)
850#endif /* VBOX */
851{
852 gen_jmp_im(pc);
853
854}
855#endif
856
857#ifndef VBOX
858static inline void gen_string_movl_A0_ESI(DisasContext *s)
859#else /* VBOX */
860DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
861#endif /* VBOX */
862{
863 int override;
864
865 override = s->override;
866#ifdef TARGET_X86_64
867 if (s->aflag == 2) {
868 if (override >= 0) {
869 gen_op_movq_A0_seg(override);
870 gen_op_addq_A0_reg_sN(0, R_ESI);
871 } else {
872 gen_op_movq_A0_reg(R_ESI);
873 }
874 } else
875#endif
876 if (s->aflag) {
877 /* 32 bit address */
878 if (s->addseg && override < 0)
879 override = R_DS;
880 if (override >= 0) {
881 gen_op_movl_A0_seg(override);
882 gen_op_addl_A0_reg_sN(0, R_ESI);
883 } else {
884 gen_op_movl_A0_reg(R_ESI);
885 }
886 } else {
887 /* 16 address, always override */
888 if (override < 0)
889 override = R_DS;
890 gen_op_movl_A0_reg(R_ESI);
891 gen_op_andl_A0_ffff();
892 gen_op_addl_A0_seg(override);
893 }
894}
895
896#ifndef VBOX
897static inline void gen_string_movl_A0_EDI(DisasContext *s)
898#else /* VBOX */
899DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
900#endif /* VBOX */
901{
902#ifdef TARGET_X86_64
903 if (s->aflag == 2) {
904 gen_op_movq_A0_reg(R_EDI);
905 } else
906#endif
907 if (s->aflag) {
908 if (s->addseg) {
909 gen_op_movl_A0_seg(R_ES);
910 gen_op_addl_A0_reg_sN(0, R_EDI);
911 } else {
912 gen_op_movl_A0_reg(R_EDI);
913 }
914 } else {
915 gen_op_movl_A0_reg(R_EDI);
916 gen_op_andl_A0_ffff();
917 gen_op_addl_A0_seg(R_ES);
918 }
919}
920
921#ifndef VBOX
922static inline void gen_op_movl_T0_Dshift(int ot)
923#else /* VBOX */
924DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
925#endif /* VBOX */
926{
927 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
928 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
929};
930
931static void gen_extu(int ot, TCGv reg)
932{
933 switch(ot) {
934 case OT_BYTE:
935 tcg_gen_ext8u_tl(reg, reg);
936 break;
937 case OT_WORD:
938 tcg_gen_ext16u_tl(reg, reg);
939 break;
940 case OT_LONG:
941 tcg_gen_ext32u_tl(reg, reg);
942 break;
943 default:
944 break;
945 }
946}
947
948static void gen_exts(int ot, TCGv reg)
949{
950 switch(ot) {
951 case OT_BYTE:
952 tcg_gen_ext8s_tl(reg, reg);
953 break;
954 case OT_WORD:
955 tcg_gen_ext16s_tl(reg, reg);
956 break;
957 case OT_LONG:
958 tcg_gen_ext32s_tl(reg, reg);
959 break;
960 default:
961 break;
962 }
963}
964
965#ifndef VBOX
966static inline void gen_op_jnz_ecx(int size, int label1)
967#else /* VBOX */
968DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
969#endif /* VBOX */
970{
971 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
972 gen_extu(size + 1, cpu_tmp0);
973 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
974}
975
976#ifndef VBOX
977static inline void gen_op_jz_ecx(int size, int label1)
978#else /* VBOX */
979DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
980#endif /* VBOX */
981{
982 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
983 gen_extu(size + 1, cpu_tmp0);
984 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
985}
986
987static void *helper_in_func[3] = {
988 helper_inb,
989 helper_inw,
990 helper_inl,
991};
992
993static void *helper_out_func[3] = {
994 helper_outb,
995 helper_outw,
996 helper_outl,
997};
998
999static void *gen_check_io_func[3] = {
1000 helper_check_iob,
1001 helper_check_iow,
1002 helper_check_iol,
1003};
1004
1005static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1006 uint32_t svm_flags)
1007{
1008 int state_saved;
1009 target_ulong next_eip;
1010
1011 state_saved = 0;
1012 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1013 if (s->cc_op != CC_OP_DYNAMIC)
1014 gen_op_set_cc_op(s->cc_op);
1015 gen_jmp_im(cur_eip);
1016 state_saved = 1;
1017 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1018 tcg_gen_helper_0_1(gen_check_io_func[ot],
1019 cpu_tmp2_i32);
1020 }
1021 if(s->flags & HF_SVMI_MASK) {
1022 if (!state_saved) {
1023 if (s->cc_op != CC_OP_DYNAMIC)
1024 gen_op_set_cc_op(s->cc_op);
1025 gen_jmp_im(cur_eip);
1026 state_saved = 1;
1027 }
1028 svm_flags |= (1 << (4 + ot));
1029 next_eip = s->pc - s->cs_base;
1030 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1031 tcg_gen_helper_0_3(helper_svm_check_io,
1032 cpu_tmp2_i32,
1033 tcg_const_i32(svm_flags),
1034 tcg_const_i32(next_eip - cur_eip));
1035 }
1036}
1037
1038#ifndef VBOX
1039static inline void gen_movs(DisasContext *s, int ot)
1040#else /* VBOX */
1041DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1042#endif /* VBOX */
1043{
1044 gen_string_movl_A0_ESI(s);
1045 gen_op_ld_T0_A0(ot + s->mem_index);
1046 gen_string_movl_A0_EDI(s);
1047 gen_op_st_T0_A0(ot + s->mem_index);
1048 gen_op_movl_T0_Dshift(ot);
1049 gen_op_add_reg_T0(s->aflag, R_ESI);
1050 gen_op_add_reg_T0(s->aflag, R_EDI);
1051}
1052
1053#ifndef VBOX
1054static inline void gen_update_cc_op(DisasContext *s)
1055#else /* VBOX */
1056DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1057#endif /* VBOX */
1058{
1059 if (s->cc_op != CC_OP_DYNAMIC) {
1060 gen_op_set_cc_op(s->cc_op);
1061 s->cc_op = CC_OP_DYNAMIC;
1062 }
1063}
1064
1065static void gen_op_update1_cc(void)
1066{
1067 tcg_gen_discard_tl(cpu_cc_src);
1068 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1069}
1070
1071static void gen_op_update2_cc(void)
1072{
1073 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1074 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1075}
1076
1077#ifndef VBOX
1078static inline void gen_op_cmpl_T0_T1_cc(void)
1079#else /* VBOX */
1080DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1081#endif /* VBOX */
1082{
1083 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1084 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1085}
1086
1087#ifndef VBOX
1088static inline void gen_op_testl_T0_T1_cc(void)
1089#else /* VBOX */
1090DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1091#endif /* VBOX */
1092{
1093 tcg_gen_discard_tl(cpu_cc_src);
1094 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1095}
1096
1097static void gen_op_update_neg_cc(void)
1098{
1099 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1100 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1101}
1102
1103/* compute eflags.C to reg */
1104static void gen_compute_eflags_c(TCGv reg)
1105{
1106#if TCG_TARGET_REG_BITS == 32
1107 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1108 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1109 (long)cc_table + offsetof(CCTable, compute_c));
1110 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1111 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1112 1, &cpu_tmp2_i32, 0, NULL);
1113#else
1114 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1115 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1116 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1117 (long)cc_table + offsetof(CCTable, compute_c));
1118 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1119 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1120 1, &cpu_tmp2_i32, 0, NULL);
1121#endif
1122 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1123}
1124
1125/* compute all eflags to cc_src */
1126static void gen_compute_eflags(TCGv reg)
1127{
1128#if TCG_TARGET_REG_BITS == 32
1129 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1130 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1131 (long)cc_table + offsetof(CCTable, compute_all));
1132 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1133 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1134 1, &cpu_tmp2_i32, 0, NULL);
1135#else
1136 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1137 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1138 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1139 (long)cc_table + offsetof(CCTable, compute_all));
1140 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1141 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1142 1, &cpu_tmp2_i32, 0, NULL);
1143#endif
1144 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1145}
1146
1147#ifndef VBOX
1148static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1149#else /* VBOX */
1150DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1151#endif /* VBOX */
1152{
1153 if (s->cc_op != CC_OP_DYNAMIC)
1154 gen_op_set_cc_op(s->cc_op);
1155 switch(jcc_op) {
1156 case JCC_O:
1157 gen_compute_eflags(cpu_T[0]);
1158 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1159 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1160 break;
1161 case JCC_B:
1162 gen_compute_eflags_c(cpu_T[0]);
1163 break;
1164 case JCC_Z:
1165 gen_compute_eflags(cpu_T[0]);
1166 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1167 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1168 break;
1169 case JCC_BE:
1170 gen_compute_eflags(cpu_tmp0);
1171 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1172 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1173 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1174 break;
1175 case JCC_S:
1176 gen_compute_eflags(cpu_T[0]);
1177 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1178 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1179 break;
1180 case JCC_P:
1181 gen_compute_eflags(cpu_T[0]);
1182 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1183 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1184 break;
1185 case JCC_L:
1186 gen_compute_eflags(cpu_tmp0);
1187 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1188 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1189 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1190 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1191 break;
1192 default:
1193 case JCC_LE:
1194 gen_compute_eflags(cpu_tmp0);
1195 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1196 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1197 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1198 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1199 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1200 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1201 break;
1202 }
1203}
1204
1205/* return true if setcc_slow is not needed (WARNING: must be kept in
1206 sync with gen_jcc1) */
1207static int is_fast_jcc_case(DisasContext *s, int b)
1208{
1209 int jcc_op;
1210 jcc_op = (b >> 1) & 7;
1211 switch(s->cc_op) {
1212 /* we optimize the cmp/jcc case */
1213 case CC_OP_SUBB:
1214 case CC_OP_SUBW:
1215 case CC_OP_SUBL:
1216 case CC_OP_SUBQ:
1217 if (jcc_op == JCC_O || jcc_op == JCC_P)
1218 goto slow_jcc;
1219 break;
1220
1221 /* some jumps are easy to compute */
1222 case CC_OP_ADDB:
1223 case CC_OP_ADDW:
1224 case CC_OP_ADDL:
1225 case CC_OP_ADDQ:
1226
1227 case CC_OP_LOGICB:
1228 case CC_OP_LOGICW:
1229 case CC_OP_LOGICL:
1230 case CC_OP_LOGICQ:
1231
1232 case CC_OP_INCB:
1233 case CC_OP_INCW:
1234 case CC_OP_INCL:
1235 case CC_OP_INCQ:
1236
1237 case CC_OP_DECB:
1238 case CC_OP_DECW:
1239 case CC_OP_DECL:
1240 case CC_OP_DECQ:
1241
1242 case CC_OP_SHLB:
1243 case CC_OP_SHLW:
1244 case CC_OP_SHLL:
1245 case CC_OP_SHLQ:
1246 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1247 goto slow_jcc;
1248 break;
1249 default:
1250 slow_jcc:
1251 return 0;
1252 }
1253 return 1;
1254}
1255
1256/* generate a conditional jump to label 'l1' according to jump opcode
1257 value 'b'. In the fast case, T0 is guaranted not to be used. */
1258#ifndef VBOX
1259static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1260#else /* VBOX */
1261DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1262#endif /* VBOX */
1263{
1264 int inv, jcc_op, size, cond;
1265 TCGv t0;
1266
1267 inv = b & 1;
1268 jcc_op = (b >> 1) & 7;
1269
1270 switch(cc_op) {
1271 /* we optimize the cmp/jcc case */
1272 case CC_OP_SUBB:
1273 case CC_OP_SUBW:
1274 case CC_OP_SUBL:
1275 case CC_OP_SUBQ:
1276
1277 size = cc_op - CC_OP_SUBB;
1278 switch(jcc_op) {
1279 case JCC_Z:
1280 fast_jcc_z:
1281 switch(size) {
1282 case 0:
1283 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1284 t0 = cpu_tmp0;
1285 break;
1286 case 1:
1287 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1288 t0 = cpu_tmp0;
1289 break;
1290#ifdef TARGET_X86_64
1291 case 2:
1292 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1293 t0 = cpu_tmp0;
1294 break;
1295#endif
1296 default:
1297 t0 = cpu_cc_dst;
1298 break;
1299 }
1300 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1301 break;
1302 case JCC_S:
1303 fast_jcc_s:
1304 switch(size) {
1305 case 0:
1306 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1307 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1308 0, l1);
1309 break;
1310 case 1:
1311 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1312 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1313 0, l1);
1314 break;
1315#ifdef TARGET_X86_64
1316 case 2:
1317 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1318 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1319 0, l1);
1320 break;
1321#endif
1322 default:
1323 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1324 0, l1);
1325 break;
1326 }
1327 break;
1328
1329 case JCC_B:
1330 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1331 goto fast_jcc_b;
1332 case JCC_BE:
1333 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1334 fast_jcc_b:
1335 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1336 switch(size) {
1337 case 0:
1338 t0 = cpu_tmp0;
1339 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1340 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1341 break;
1342 case 1:
1343 t0 = cpu_tmp0;
1344 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1345 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1346 break;
1347#ifdef TARGET_X86_64
1348 case 2:
1349 t0 = cpu_tmp0;
1350 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1351 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1352 break;
1353#endif
1354 default:
1355 t0 = cpu_cc_src;
1356 break;
1357 }
1358 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1359 break;
1360
1361 case JCC_L:
1362 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1363 goto fast_jcc_l;
1364 case JCC_LE:
1365 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1366 fast_jcc_l:
1367 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1368 switch(size) {
1369 case 0:
1370 t0 = cpu_tmp0;
1371 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1372 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1373 break;
1374 case 1:
1375 t0 = cpu_tmp0;
1376 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1377 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1378 break;
1379#ifdef TARGET_X86_64
1380 case 2:
1381 t0 = cpu_tmp0;
1382 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1383 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1384 break;
1385#endif
1386 default:
1387 t0 = cpu_cc_src;
1388 break;
1389 }
1390 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1391 break;
1392
1393 default:
1394 goto slow_jcc;
1395 }
1396 break;
1397
1398 /* some jumps are easy to compute */
1399 case CC_OP_ADDB:
1400 case CC_OP_ADDW:
1401 case CC_OP_ADDL:
1402 case CC_OP_ADDQ:
1403
1404 case CC_OP_ADCB:
1405 case CC_OP_ADCW:
1406 case CC_OP_ADCL:
1407 case CC_OP_ADCQ:
1408
1409 case CC_OP_SBBB:
1410 case CC_OP_SBBW:
1411 case CC_OP_SBBL:
1412 case CC_OP_SBBQ:
1413
1414 case CC_OP_LOGICB:
1415 case CC_OP_LOGICW:
1416 case CC_OP_LOGICL:
1417 case CC_OP_LOGICQ:
1418
1419 case CC_OP_INCB:
1420 case CC_OP_INCW:
1421 case CC_OP_INCL:
1422 case CC_OP_INCQ:
1423
1424 case CC_OP_DECB:
1425 case CC_OP_DECW:
1426 case CC_OP_DECL:
1427 case CC_OP_DECQ:
1428
1429 case CC_OP_SHLB:
1430 case CC_OP_SHLW:
1431 case CC_OP_SHLL:
1432 case CC_OP_SHLQ:
1433
1434 case CC_OP_SARB:
1435 case CC_OP_SARW:
1436 case CC_OP_SARL:
1437 case CC_OP_SARQ:
1438 switch(jcc_op) {
1439 case JCC_Z:
1440 size = (cc_op - CC_OP_ADDB) & 3;
1441 goto fast_jcc_z;
1442 case JCC_S:
1443 size = (cc_op - CC_OP_ADDB) & 3;
1444 goto fast_jcc_s;
1445 default:
1446 goto slow_jcc;
1447 }
1448 break;
1449 default:
1450 slow_jcc:
1451 gen_setcc_slow_T0(s, jcc_op);
1452 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1453 cpu_T[0], 0, l1);
1454 break;
1455 }
1456}
1457
1458/* XXX: does not work with gdbstub "ice" single step - not a
1459 serious problem */
1460static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1461{
1462 int l1, l2;
1463
1464 l1 = gen_new_label();
1465 l2 = gen_new_label();
1466 gen_op_jnz_ecx(s->aflag, l1);
1467 gen_set_label(l2);
1468 gen_jmp_tb(s, next_eip, 1);
1469 gen_set_label(l1);
1470 return l2;
1471}
1472
1473#ifndef VBOX
1474static inline void gen_stos(DisasContext *s, int ot)
1475#else /* VBOX */
1476DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1477#endif /* VBOX */
1478{
1479 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1480 gen_string_movl_A0_EDI(s);
1481 gen_op_st_T0_A0(ot + s->mem_index);
1482 gen_op_movl_T0_Dshift(ot);
1483 gen_op_add_reg_T0(s->aflag, R_EDI);
1484}
1485
1486#ifndef VBOX
1487static inline void gen_lods(DisasContext *s, int ot)
1488#else /* VBOX */
1489DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1490#endif /* VBOX */
1491{
1492 gen_string_movl_A0_ESI(s);
1493 gen_op_ld_T0_A0(ot + s->mem_index);
1494 gen_op_mov_reg_T0(ot, R_EAX);
1495 gen_op_movl_T0_Dshift(ot);
1496 gen_op_add_reg_T0(s->aflag, R_ESI);
1497}
1498
1499#ifndef VBOX
1500static inline void gen_scas(DisasContext *s, int ot)
1501#else /* VBOX */
1502DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1503#endif /* VBOX */
1504{
1505 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1506 gen_string_movl_A0_EDI(s);
1507 gen_op_ld_T1_A0(ot + s->mem_index);
1508 gen_op_cmpl_T0_T1_cc();
1509 gen_op_movl_T0_Dshift(ot);
1510 gen_op_add_reg_T0(s->aflag, R_EDI);
1511}
1512
1513#ifndef VBOX
1514static inline void gen_cmps(DisasContext *s, int ot)
1515#else /* VBOX */
1516DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1517#endif /* VBOX */
1518{
1519 gen_string_movl_A0_ESI(s);
1520 gen_op_ld_T0_A0(ot + s->mem_index);
1521 gen_string_movl_A0_EDI(s);
1522 gen_op_ld_T1_A0(ot + s->mem_index);
1523 gen_op_cmpl_T0_T1_cc();
1524 gen_op_movl_T0_Dshift(ot);
1525 gen_op_add_reg_T0(s->aflag, R_ESI);
1526 gen_op_add_reg_T0(s->aflag, R_EDI);
1527}
1528
1529#ifndef VBOX
1530static inline void gen_ins(DisasContext *s, int ot)
1531#else /* VBOX */
1532DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1533#endif /* VBOX */
1534{
1535 if (use_icount)
1536 gen_io_start();
1537 gen_string_movl_A0_EDI(s);
1538 /* Note: we must do this dummy write first to be restartable in
1539 case of page fault. */
1540 gen_op_movl_T0_0();
1541 gen_op_st_T0_A0(ot + s->mem_index);
1542 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1543 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1544 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1545 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1546 gen_op_st_T0_A0(ot + s->mem_index);
1547 gen_op_movl_T0_Dshift(ot);
1548 gen_op_add_reg_T0(s->aflag, R_EDI);
1549 if (use_icount)
1550 gen_io_end();
1551}
1552
1553#ifndef VBOX
1554static inline void gen_outs(DisasContext *s, int ot)
1555#else /* VBOX */
1556DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1557#endif /* VBOX */
1558{
1559 if (use_icount)
1560 gen_io_start();
1561 gen_string_movl_A0_ESI(s);
1562 gen_op_ld_T0_A0(ot + s->mem_index);
1563
1564 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1565 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1566 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1567 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1568 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1569
1570 gen_op_movl_T0_Dshift(ot);
1571 gen_op_add_reg_T0(s->aflag, R_ESI);
1572 if (use_icount)
1573 gen_io_end();
1574}
1575
1576/* same method as Valgrind : we generate jumps to current or next
1577 instruction */
1578#ifndef VBOX
1579#define GEN_REPZ(op) \
1580static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1581 target_ulong cur_eip, target_ulong next_eip) \
1582{ \
1583 int l2; \
1584 gen_update_cc_op(s); \
1585 l2 = gen_jz_ecx_string(s, next_eip); \
1586 gen_ ## op(s, ot); \
1587 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1588 /* a loop would cause two single step exceptions if ECX = 1 \
1589 before rep string_insn */ \
1590 if (!s->jmp_opt) \
1591 gen_op_jz_ecx(s->aflag, l2); \
1592 gen_jmp(s, cur_eip); \
1593}
1594#else /* VBOX */
1595#define GEN_REPZ(op) \
1596DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1597 target_ulong cur_eip, target_ulong next_eip) \
1598{ \
1599 int l2; \
1600 gen_update_cc_op(s); \
1601 l2 = gen_jz_ecx_string(s, next_eip); \
1602 gen_ ## op(s, ot); \
1603 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1604 /* a loop would cause two single step exceptions if ECX = 1 \
1605 before rep string_insn */ \
1606 if (!s->jmp_opt) \
1607 gen_op_jz_ecx(s->aflag, l2); \
1608 gen_jmp(s, cur_eip); \
1609}
1610#endif /* VBOX */
1611
1612#ifndef VBOX
1613#define GEN_REPZ2(op) \
1614static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1615 target_ulong cur_eip, \
1616 target_ulong next_eip, \
1617 int nz) \
1618{ \
1619 int l2; \
1620 gen_update_cc_op(s); \
1621 l2 = gen_jz_ecx_string(s, next_eip); \
1622 gen_ ## op(s, ot); \
1623 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1624 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1625 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1626 if (!s->jmp_opt) \
1627 gen_op_jz_ecx(s->aflag, l2); \
1628 gen_jmp(s, cur_eip); \
1629}
1630#else /* VBOX */
1631#define GEN_REPZ2(op) \
1632DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1633 target_ulong cur_eip, \
1634 target_ulong next_eip, \
1635 int nz) \
1636{ \
1637 int l2;\
1638 gen_update_cc_op(s); \
1639 l2 = gen_jz_ecx_string(s, next_eip); \
1640 gen_ ## op(s, ot); \
1641 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1642 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1643 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1644 if (!s->jmp_opt) \
1645 gen_op_jz_ecx(s->aflag, l2); \
1646 gen_jmp(s, cur_eip); \
1647}
1648#endif /* VBOX */
1649
1650GEN_REPZ(movs)
1651GEN_REPZ(stos)
1652GEN_REPZ(lods)
1653GEN_REPZ(ins)
1654GEN_REPZ(outs)
1655GEN_REPZ2(scas)
1656GEN_REPZ2(cmps)
1657
1658static void *helper_fp_arith_ST0_FT0[8] = {
1659 helper_fadd_ST0_FT0,
1660 helper_fmul_ST0_FT0,
1661 helper_fcom_ST0_FT0,
1662 helper_fcom_ST0_FT0,
1663 helper_fsub_ST0_FT0,
1664 helper_fsubr_ST0_FT0,
1665 helper_fdiv_ST0_FT0,
1666 helper_fdivr_ST0_FT0,
1667};
1668
1669/* NOTE the exception in "r" op ordering */
1670static void *helper_fp_arith_STN_ST0[8] = {
1671 helper_fadd_STN_ST0,
1672 helper_fmul_STN_ST0,
1673 NULL,
1674 NULL,
1675 helper_fsubr_STN_ST0,
1676 helper_fsub_STN_ST0,
1677 helper_fdivr_STN_ST0,
1678 helper_fdiv_STN_ST0,
1679};
1680
1681/* if d == OR_TMP0, it means memory operand (address in A0) */
1682static void gen_op(DisasContext *s1, int op, int ot, int d)
1683{
1684 if (d != OR_TMP0) {
1685 gen_op_mov_TN_reg(ot, 0, d);
1686 } else {
1687 gen_op_ld_T0_A0(ot + s1->mem_index);
1688 }
1689 switch(op) {
1690 case OP_ADCL:
1691 if (s1->cc_op != CC_OP_DYNAMIC)
1692 gen_op_set_cc_op(s1->cc_op);
1693 gen_compute_eflags_c(cpu_tmp4);
1694 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1695 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1696 if (d != OR_TMP0)
1697 gen_op_mov_reg_T0(ot, d);
1698 else
1699 gen_op_st_T0_A0(ot + s1->mem_index);
1700 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1701 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1702 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1703 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1704 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1705 s1->cc_op = CC_OP_DYNAMIC;
1706 break;
1707 case OP_SBBL:
1708 if (s1->cc_op != CC_OP_DYNAMIC)
1709 gen_op_set_cc_op(s1->cc_op);
1710 gen_compute_eflags_c(cpu_tmp4);
1711 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1712 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1713 if (d != OR_TMP0)
1714 gen_op_mov_reg_T0(ot, d);
1715 else
1716 gen_op_st_T0_A0(ot + s1->mem_index);
1717 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1718 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1719 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1720 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1721 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1722 s1->cc_op = CC_OP_DYNAMIC;
1723 break;
1724 case OP_ADDL:
1725 gen_op_addl_T0_T1();
1726 if (d != OR_TMP0)
1727 gen_op_mov_reg_T0(ot, d);
1728 else
1729 gen_op_st_T0_A0(ot + s1->mem_index);
1730 gen_op_update2_cc();
1731 s1->cc_op = CC_OP_ADDB + ot;
1732 break;
1733 case OP_SUBL:
1734 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1735 if (d != OR_TMP0)
1736 gen_op_mov_reg_T0(ot, d);
1737 else
1738 gen_op_st_T0_A0(ot + s1->mem_index);
1739 gen_op_update2_cc();
1740 s1->cc_op = CC_OP_SUBB + ot;
1741 break;
1742 default:
1743 case OP_ANDL:
1744 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1745 if (d != OR_TMP0)
1746 gen_op_mov_reg_T0(ot, d);
1747 else
1748 gen_op_st_T0_A0(ot + s1->mem_index);
1749 gen_op_update1_cc();
1750 s1->cc_op = CC_OP_LOGICB + ot;
1751 break;
1752 case OP_ORL:
1753 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1754 if (d != OR_TMP0)
1755 gen_op_mov_reg_T0(ot, d);
1756 else
1757 gen_op_st_T0_A0(ot + s1->mem_index);
1758 gen_op_update1_cc();
1759 s1->cc_op = CC_OP_LOGICB + ot;
1760 break;
1761 case OP_XORL:
1762 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1763 if (d != OR_TMP0)
1764 gen_op_mov_reg_T0(ot, d);
1765 else
1766 gen_op_st_T0_A0(ot + s1->mem_index);
1767 gen_op_update1_cc();
1768 s1->cc_op = CC_OP_LOGICB + ot;
1769 break;
1770 case OP_CMPL:
1771 gen_op_cmpl_T0_T1_cc();
1772 s1->cc_op = CC_OP_SUBB + ot;
1773 break;
1774 }
1775}
1776
1777/* if d == OR_TMP0, it means memory operand (address in A0) */
1778static void gen_inc(DisasContext *s1, int ot, int d, int c)
1779{
1780 if (d != OR_TMP0)
1781 gen_op_mov_TN_reg(ot, 0, d);
1782 else
1783 gen_op_ld_T0_A0(ot + s1->mem_index);
1784 if (s1->cc_op != CC_OP_DYNAMIC)
1785 gen_op_set_cc_op(s1->cc_op);
1786 if (c > 0) {
1787 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1788 s1->cc_op = CC_OP_INCB + ot;
1789 } else {
1790 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1791 s1->cc_op = CC_OP_DECB + ot;
1792 }
1793 if (d != OR_TMP0)
1794 gen_op_mov_reg_T0(ot, d);
1795 else
1796 gen_op_st_T0_A0(ot + s1->mem_index);
1797 gen_compute_eflags_c(cpu_cc_src);
1798 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1799}
1800
1801static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1802 int is_right, int is_arith)
1803{
1804 target_ulong mask;
1805 int shift_label;
1806 TCGv t0, t1;
1807
1808 if (ot == OT_QUAD)
1809 mask = 0x3f;
1810 else
1811 mask = 0x1f;
1812
1813 /* load */
1814 if (op1 == OR_TMP0)
1815 gen_op_ld_T0_A0(ot + s->mem_index);
1816 else
1817 gen_op_mov_TN_reg(ot, 0, op1);
1818
1819 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1820
1821 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1822
1823 if (is_right) {
1824 if (is_arith) {
1825 gen_exts(ot, cpu_T[0]);
1826 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1827 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1828 } else {
1829 gen_extu(ot, cpu_T[0]);
1830 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1831 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1832 }
1833 } else {
1834 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1835 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1836 }
1837
1838 /* store */
1839 if (op1 == OR_TMP0)
1840 gen_op_st_T0_A0(ot + s->mem_index);
1841 else
1842 gen_op_mov_reg_T0(ot, op1);
1843
1844 /* update eflags if non zero shift */
1845 if (s->cc_op != CC_OP_DYNAMIC)
1846 gen_op_set_cc_op(s->cc_op);
1847
1848 /* XXX: inefficient */
1849 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1850 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1851
1852 tcg_gen_mov_tl(t0, cpu_T[0]);
1853 tcg_gen_mov_tl(t1, cpu_T3);
1854
1855 shift_label = gen_new_label();
1856 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1857
1858 tcg_gen_mov_tl(cpu_cc_src, t1);
1859 tcg_gen_mov_tl(cpu_cc_dst, t0);
1860 if (is_right)
1861 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1862 else
1863 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1864
1865 gen_set_label(shift_label);
1866 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1867
1868 tcg_temp_free(t0);
1869 tcg_temp_free(t1);
1870}
1871
1872static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1873 int is_right, int is_arith)
1874{
1875 int mask;
1876
1877 if (ot == OT_QUAD)
1878 mask = 0x3f;
1879 else
1880 mask = 0x1f;
1881
1882 /* load */
1883 if (op1 == OR_TMP0)
1884 gen_op_ld_T0_A0(ot + s->mem_index);
1885 else
1886 gen_op_mov_TN_reg(ot, 0, op1);
1887
1888 op2 &= mask;
1889 if (op2 != 0) {
1890 if (is_right) {
1891 if (is_arith) {
1892 gen_exts(ot, cpu_T[0]);
1893 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1894 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1895 } else {
1896 gen_extu(ot, cpu_T[0]);
1897 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1898 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1899 }
1900 } else {
1901 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1902 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1903 }
1904 }
1905
1906 /* store */
1907 if (op1 == OR_TMP0)
1908 gen_op_st_T0_A0(ot + s->mem_index);
1909 else
1910 gen_op_mov_reg_T0(ot, op1);
1911
1912 /* update eflags if non zero shift */
1913 if (op2 != 0) {
1914 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1915 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1916 if (is_right)
1917 s->cc_op = CC_OP_SARB + ot;
1918 else
1919 s->cc_op = CC_OP_SHLB + ot;
1920 }
1921}
1922
1923#ifndef VBOX
1924static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1925#else /* VBOX */
1926DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1927#endif /* VBOX */
1928{
1929 if (arg2 >= 0)
1930 tcg_gen_shli_tl(ret, arg1, arg2);
1931 else
1932 tcg_gen_shri_tl(ret, arg1, -arg2);
1933}
1934
1935/* XXX: add faster immediate case */
1936static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1937 int is_right)
1938{
1939 target_ulong mask;
1940 int label1, label2, data_bits;
1941 TCGv t0, t1, t2, a0;
1942
1943 /* XXX: inefficient, but we must use local temps */
1944 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1945 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1946 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1947 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1948
1949 if (ot == OT_QUAD)
1950 mask = 0x3f;
1951 else
1952 mask = 0x1f;
1953
1954 /* load */
1955 if (op1 == OR_TMP0) {
1956 tcg_gen_mov_tl(a0, cpu_A0);
1957 gen_op_ld_v(ot + s->mem_index, t0, a0);
1958 } else {
1959 gen_op_mov_v_reg(ot, t0, op1);
1960 }
1961
1962 tcg_gen_mov_tl(t1, cpu_T[1]);
1963
1964 tcg_gen_andi_tl(t1, t1, mask);
1965
1966 /* Must test zero case to avoid using undefined behaviour in TCG
1967 shifts. */
1968 label1 = gen_new_label();
1969 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1970
1971 if (ot <= OT_WORD)
1972 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1973 else
1974 tcg_gen_mov_tl(cpu_tmp0, t1);
1975
1976 gen_extu(ot, t0);
1977 tcg_gen_mov_tl(t2, t0);
1978
1979 data_bits = 8 << ot;
1980 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1981 fix TCG definition) */
1982 if (is_right) {
1983 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1984 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1985 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1986 } else {
1987 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1988 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1989 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1990 }
1991 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1992
1993 gen_set_label(label1);
1994 /* store */
1995 if (op1 == OR_TMP0) {
1996 gen_op_st_v(ot + s->mem_index, t0, a0);
1997 } else {
1998 gen_op_mov_reg_v(ot, op1, t0);
1999 }
2000
2001 /* update eflags */
2002 if (s->cc_op != CC_OP_DYNAMIC)
2003 gen_op_set_cc_op(s->cc_op);
2004
2005 label2 = gen_new_label();
2006 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2007
2008 gen_compute_eflags(cpu_cc_src);
2009 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2010 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2011 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2012 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2013 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2014 if (is_right) {
2015 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2016 }
2017 tcg_gen_andi_tl(t0, t0, CC_C);
2018 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2019
2020 tcg_gen_discard_tl(cpu_cc_dst);
2021 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2022
2023 gen_set_label(label2);
2024 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2025
2026 tcg_temp_free(t0);
2027 tcg_temp_free(t1);
2028 tcg_temp_free(t2);
2029 tcg_temp_free(a0);
2030}
2031
2032static void *helper_rotc[8] = {
2033 helper_rclb,
2034 helper_rclw,
2035 helper_rcll,
2036 X86_64_ONLY(helper_rclq),
2037 helper_rcrb,
2038 helper_rcrw,
2039 helper_rcrl,
2040 X86_64_ONLY(helper_rcrq),
2041};
2042
2043/* XXX: add faster immediate = 1 case */
2044static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2045 int is_right)
2046{
2047 int label1;
2048
2049 if (s->cc_op != CC_OP_DYNAMIC)
2050 gen_op_set_cc_op(s->cc_op);
2051
2052 /* load */
2053 if (op1 == OR_TMP0)
2054 gen_op_ld_T0_A0(ot + s->mem_index);
2055 else
2056 gen_op_mov_TN_reg(ot, 0, op1);
2057
2058 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2059 cpu_T[0], cpu_T[0], cpu_T[1]);
2060 /* store */
2061 if (op1 == OR_TMP0)
2062 gen_op_st_T0_A0(ot + s->mem_index);
2063 else
2064 gen_op_mov_reg_T0(ot, op1);
2065
2066 /* update eflags */
2067 label1 = gen_new_label();
2068 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2069
2070 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2071 tcg_gen_discard_tl(cpu_cc_dst);
2072 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2073
2074 gen_set_label(label1);
2075 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2076}
2077
2078/* XXX: add faster immediate case */
2079static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2080 int is_right)
2081{
2082 int label1, label2, data_bits;
2083 target_ulong mask;
2084 TCGv t0, t1, t2, a0;
2085
2086 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2087 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2088 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2089 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2090
2091 if (ot == OT_QUAD)
2092 mask = 0x3f;
2093 else
2094 mask = 0x1f;
2095
2096 /* load */
2097 if (op1 == OR_TMP0) {
2098 tcg_gen_mov_tl(a0, cpu_A0);
2099 gen_op_ld_v(ot + s->mem_index, t0, a0);
2100 } else {
2101 gen_op_mov_v_reg(ot, t0, op1);
2102 }
2103
2104 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2105
2106 tcg_gen_mov_tl(t1, cpu_T[1]);
2107 tcg_gen_mov_tl(t2, cpu_T3);
2108
2109 /* Must test zero case to avoid using undefined behaviour in TCG
2110 shifts. */
2111 label1 = gen_new_label();
2112 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2113
2114 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2115 if (ot == OT_WORD) {
2116 /* Note: we implement the Intel behaviour for shift count > 16 */
2117 if (is_right) {
2118 tcg_gen_andi_tl(t0, t0, 0xffff);
2119 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2120 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2121 tcg_gen_ext32u_tl(t0, t0);
2122
2123 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2124
2125 /* only needed if count > 16, but a test would complicate */
2126 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2127 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2128
2129 tcg_gen_shr_tl(t0, t0, t2);
2130
2131 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2132 } else {
2133 /* XXX: not optimal */
2134 tcg_gen_andi_tl(t0, t0, 0xffff);
2135 tcg_gen_shli_tl(t1, t1, 16);
2136 tcg_gen_or_tl(t1, t1, t0);
2137 tcg_gen_ext32u_tl(t1, t1);
2138
2139 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2140 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2141 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2142 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2143
2144 tcg_gen_shl_tl(t0, t0, t2);
2145 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2146 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2147 tcg_gen_or_tl(t0, t0, t1);
2148 }
2149 } else {
2150 data_bits = 8 << ot;
2151 if (is_right) {
2152 if (ot == OT_LONG)
2153 tcg_gen_ext32u_tl(t0, t0);
2154
2155 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2156
2157 tcg_gen_shr_tl(t0, t0, t2);
2158 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2159 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2160 tcg_gen_or_tl(t0, t0, t1);
2161
2162 } else {
2163 if (ot == OT_LONG)
2164 tcg_gen_ext32u_tl(t1, t1);
2165
2166 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2167
2168 tcg_gen_shl_tl(t0, t0, t2);
2169 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2170 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2171 tcg_gen_or_tl(t0, t0, t1);
2172 }
2173 }
2174 tcg_gen_mov_tl(t1, cpu_tmp4);
2175
2176 gen_set_label(label1);
2177 /* store */
2178 if (op1 == OR_TMP0) {
2179 gen_op_st_v(ot + s->mem_index, t0, a0);
2180 } else {
2181 gen_op_mov_reg_v(ot, op1, t0);
2182 }
2183
2184 /* update eflags */
2185 if (s->cc_op != CC_OP_DYNAMIC)
2186 gen_op_set_cc_op(s->cc_op);
2187
2188 label2 = gen_new_label();
2189 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2190
2191 tcg_gen_mov_tl(cpu_cc_src, t1);
2192 tcg_gen_mov_tl(cpu_cc_dst, t0);
2193 if (is_right) {
2194 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2195 } else {
2196 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2197 }
2198 gen_set_label(label2);
2199 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2200
2201 tcg_temp_free(t0);
2202 tcg_temp_free(t1);
2203 tcg_temp_free(t2);
2204 tcg_temp_free(a0);
2205}
2206
2207static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2208{
2209 if (s != OR_TMP1)
2210 gen_op_mov_TN_reg(ot, 1, s);
2211 switch(op) {
2212 case OP_ROL:
2213 gen_rot_rm_T1(s1, ot, d, 0);
2214 break;
2215 case OP_ROR:
2216 gen_rot_rm_T1(s1, ot, d, 1);
2217 break;
2218 case OP_SHL:
2219 case OP_SHL1:
2220 gen_shift_rm_T1(s1, ot, d, 0, 0);
2221 break;
2222 case OP_SHR:
2223 gen_shift_rm_T1(s1, ot, d, 1, 0);
2224 break;
2225 case OP_SAR:
2226 gen_shift_rm_T1(s1, ot, d, 1, 1);
2227 break;
2228 case OP_RCL:
2229 gen_rotc_rm_T1(s1, ot, d, 0);
2230 break;
2231 case OP_RCR:
2232 gen_rotc_rm_T1(s1, ot, d, 1);
2233 break;
2234 }
2235}
2236
2237static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2238{
2239 switch(op) {
2240 case OP_SHL:
2241 case OP_SHL1:
2242 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2243 break;
2244 case OP_SHR:
2245 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2246 break;
2247 case OP_SAR:
2248 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2249 break;
2250 default:
2251 /* currently not optimized */
2252 gen_op_movl_T1_im(c);
2253 gen_shift(s1, op, ot, d, OR_TMP1);
2254 break;
2255 }
2256}
2257
2258static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2259{
2260 target_long disp;
2261 int havesib;
2262 int base;
2263 int index;
2264 int scale;
2265 int opreg;
2266 int mod, rm, code, override, must_add_seg;
2267
2268 override = s->override;
2269 must_add_seg = s->addseg;
2270 if (override >= 0)
2271 must_add_seg = 1;
2272 mod = (modrm >> 6) & 3;
2273 rm = modrm & 7;
2274
2275 if (s->aflag) {
2276
2277 havesib = 0;
2278 base = rm;
2279 index = 0;
2280 scale = 0;
2281
2282 if (base == 4) {
2283 havesib = 1;
2284 code = ldub_code(s->pc++);
2285 scale = (code >> 6) & 3;
2286 index = ((code >> 3) & 7) | REX_X(s);
2287 base = (code & 7);
2288 }
2289 base |= REX_B(s);
2290
2291 switch (mod) {
2292 case 0:
2293 if ((base & 7) == 5) {
2294 base = -1;
2295 disp = (int32_t)ldl_code(s->pc);
2296 s->pc += 4;
2297 if (CODE64(s) && !havesib) {
2298 disp += s->pc + s->rip_offset;
2299 }
2300 } else {
2301 disp = 0;
2302 }
2303 break;
2304 case 1:
2305 disp = (int8_t)ldub_code(s->pc++);
2306 break;
2307 default:
2308 case 2:
2309 disp = ldl_code(s->pc);
2310 s->pc += 4;
2311 break;
2312 }
2313
2314 if (base >= 0) {
2315 /* for correct popl handling with esp */
2316 if (base == 4 && s->popl_esp_hack)
2317 disp += s->popl_esp_hack;
2318#ifdef TARGET_X86_64
2319 if (s->aflag == 2) {
2320 gen_op_movq_A0_reg(base);
2321 if (disp != 0) {
2322 gen_op_addq_A0_im(disp);
2323 }
2324 } else
2325#endif
2326 {
2327 gen_op_movl_A0_reg(base);
2328 if (disp != 0)
2329 gen_op_addl_A0_im(disp);
2330 }
2331 } else {
2332#ifdef TARGET_X86_64
2333 if (s->aflag == 2) {
2334 gen_op_movq_A0_im(disp);
2335 } else
2336#endif
2337 {
2338 gen_op_movl_A0_im(disp);
2339 }
2340 }
2341 /* XXX: index == 4 is always invalid */
2342 if (havesib && (index != 4 || scale != 0)) {
2343#ifdef TARGET_X86_64
2344 if (s->aflag == 2) {
2345 gen_op_addq_A0_reg_sN(scale, index);
2346 } else
2347#endif
2348 {
2349 gen_op_addl_A0_reg_sN(scale, index);
2350 }
2351 }
2352 if (must_add_seg) {
2353 if (override < 0) {
2354 if (base == R_EBP || base == R_ESP)
2355 override = R_SS;
2356 else
2357 override = R_DS;
2358 }
2359#ifdef TARGET_X86_64
2360 if (s->aflag == 2) {
2361 gen_op_addq_A0_seg(override);
2362 } else
2363#endif
2364 {
2365 gen_op_addl_A0_seg(override);
2366 }
2367 }
2368 } else {
2369 switch (mod) {
2370 case 0:
2371 if (rm == 6) {
2372 disp = lduw_code(s->pc);
2373 s->pc += 2;
2374 gen_op_movl_A0_im(disp);
2375 rm = 0; /* avoid SS override */
2376 goto no_rm;
2377 } else {
2378 disp = 0;
2379 }
2380 break;
2381 case 1:
2382 disp = (int8_t)ldub_code(s->pc++);
2383 break;
2384 default:
2385 case 2:
2386 disp = lduw_code(s->pc);
2387 s->pc += 2;
2388 break;
2389 }
2390 switch(rm) {
2391 case 0:
2392 gen_op_movl_A0_reg(R_EBX);
2393 gen_op_addl_A0_reg_sN(0, R_ESI);
2394 break;
2395 case 1:
2396 gen_op_movl_A0_reg(R_EBX);
2397 gen_op_addl_A0_reg_sN(0, R_EDI);
2398 break;
2399 case 2:
2400 gen_op_movl_A0_reg(R_EBP);
2401 gen_op_addl_A0_reg_sN(0, R_ESI);
2402 break;
2403 case 3:
2404 gen_op_movl_A0_reg(R_EBP);
2405 gen_op_addl_A0_reg_sN(0, R_EDI);
2406 break;
2407 case 4:
2408 gen_op_movl_A0_reg(R_ESI);
2409 break;
2410 case 5:
2411 gen_op_movl_A0_reg(R_EDI);
2412 break;
2413 case 6:
2414 gen_op_movl_A0_reg(R_EBP);
2415 break;
2416 default:
2417 case 7:
2418 gen_op_movl_A0_reg(R_EBX);
2419 break;
2420 }
2421 if (disp != 0)
2422 gen_op_addl_A0_im(disp);
2423 gen_op_andl_A0_ffff();
2424 no_rm:
2425 if (must_add_seg) {
2426 if (override < 0) {
2427 if (rm == 2 || rm == 3 || rm == 6)
2428 override = R_SS;
2429 else
2430 override = R_DS;
2431 }
2432 gen_op_addl_A0_seg(override);
2433 }
2434 }
2435
2436 opreg = OR_A0;
2437 disp = 0;
2438 *reg_ptr = opreg;
2439 *offset_ptr = disp;
2440}
2441
2442static void gen_nop_modrm(DisasContext *s, int modrm)
2443{
2444 int mod, rm, base, code;
2445
2446 mod = (modrm >> 6) & 3;
2447 if (mod == 3)
2448 return;
2449 rm = modrm & 7;
2450
2451 if (s->aflag) {
2452
2453 base = rm;
2454
2455 if (base == 4) {
2456 code = ldub_code(s->pc++);
2457 base = (code & 7);
2458 }
2459
2460 switch (mod) {
2461 case 0:
2462 if (base == 5) {
2463 s->pc += 4;
2464 }
2465 break;
2466 case 1:
2467 s->pc++;
2468 break;
2469 default:
2470 case 2:
2471 s->pc += 4;
2472 break;
2473 }
2474 } else {
2475 switch (mod) {
2476 case 0:
2477 if (rm == 6) {
2478 s->pc += 2;
2479 }
2480 break;
2481 case 1:
2482 s->pc++;
2483 break;
2484 default:
2485 case 2:
2486 s->pc += 2;
2487 break;
2488 }
2489 }
2490}
2491
2492/* used for LEA and MOV AX, mem */
2493static void gen_add_A0_ds_seg(DisasContext *s)
2494{
2495 int override, must_add_seg;
2496 must_add_seg = s->addseg;
2497 override = R_DS;
2498 if (s->override >= 0) {
2499 override = s->override;
2500 must_add_seg = 1;
2501 } else {
2502 override = R_DS;
2503 }
2504 if (must_add_seg) {
2505#ifdef TARGET_X86_64
2506 if (CODE64(s)) {
2507 gen_op_addq_A0_seg(override);
2508 } else
2509#endif
2510 {
2511 gen_op_addl_A0_seg(override);
2512 }
2513 }
2514}
2515
2516/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2517 OR_TMP0 */
2518static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2519{
2520 int mod, rm, opreg, disp;
2521
2522 mod = (modrm >> 6) & 3;
2523 rm = (modrm & 7) | REX_B(s);
2524 if (mod == 3) {
2525 if (is_store) {
2526 if (reg != OR_TMP0)
2527 gen_op_mov_TN_reg(ot, 0, reg);
2528 gen_op_mov_reg_T0(ot, rm);
2529 } else {
2530 gen_op_mov_TN_reg(ot, 0, rm);
2531 if (reg != OR_TMP0)
2532 gen_op_mov_reg_T0(ot, reg);
2533 }
2534 } else {
2535 gen_lea_modrm(s, modrm, &opreg, &disp);
2536 if (is_store) {
2537 if (reg != OR_TMP0)
2538 gen_op_mov_TN_reg(ot, 0, reg);
2539 gen_op_st_T0_A0(ot + s->mem_index);
2540 } else {
2541 gen_op_ld_T0_A0(ot + s->mem_index);
2542 if (reg != OR_TMP0)
2543 gen_op_mov_reg_T0(ot, reg);
2544 }
2545 }
2546}
2547
2548#ifndef VBOX
2549static inline uint32_t insn_get(DisasContext *s, int ot)
2550#else /* VBOX */
2551DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2552#endif /* VBOX */
2553{
2554 uint32_t ret;
2555
2556 switch(ot) {
2557 case OT_BYTE:
2558 ret = ldub_code(s->pc);
2559 s->pc++;
2560 break;
2561 case OT_WORD:
2562 ret = lduw_code(s->pc);
2563 s->pc += 2;
2564 break;
2565 default:
2566 case OT_LONG:
2567 ret = ldl_code(s->pc);
2568 s->pc += 4;
2569 break;
2570 }
2571 return ret;
2572}
2573
2574#ifndef VBOX
2575static inline int insn_const_size(unsigned int ot)
2576#else /* VBOX */
2577DECLINLINE(int) insn_const_size(unsigned int ot)
2578#endif /* VBOX */
2579{
2580 if (ot <= OT_LONG)
2581 return 1 << ot;
2582 else
2583 return 4;
2584}
2585
2586#ifndef VBOX
2587static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2588#else /* VBOX */
2589DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2590#endif /* VBOX */
2591{
2592 TranslationBlock *tb;
2593 target_ulong pc;
2594
2595 pc = s->cs_base + eip;
2596 tb = s->tb;
2597 /* NOTE: we handle the case where the TB spans two pages here */
2598 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2599 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2600 /* jump to same page: we can use a direct jump */
2601 tcg_gen_goto_tb(tb_num);
2602 gen_jmp_im(eip);
2603 tcg_gen_exit_tb((long)tb + tb_num);
2604 } else {
2605 /* jump to another page: currently not optimized */
2606 gen_jmp_im(eip);
2607 gen_eob(s);
2608 }
2609}
2610
2611#ifndef VBOX
2612static inline void gen_jcc(DisasContext *s, int b,
2613#else /* VBOX */
2614DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2615#endif /* VBOX */
2616 target_ulong val, target_ulong next_eip)
2617{
2618 int l1, l2, cc_op;
2619
2620 cc_op = s->cc_op;
2621 if (s->cc_op != CC_OP_DYNAMIC) {
2622 gen_op_set_cc_op(s->cc_op);
2623 s->cc_op = CC_OP_DYNAMIC;
2624 }
2625 if (s->jmp_opt) {
2626#ifdef VBOX
2627 gen_check_external_event(s);
2628#endif /* VBOX */
2629 l1 = gen_new_label();
2630 gen_jcc1(s, cc_op, b, l1);
2631
2632 gen_goto_tb(s, 0, next_eip);
2633
2634 gen_set_label(l1);
2635 gen_goto_tb(s, 1, val);
2636 s->is_jmp = 3;
2637 } else {
2638
2639 l1 = gen_new_label();
2640 l2 = gen_new_label();
2641 gen_jcc1(s, cc_op, b, l1);
2642
2643 gen_jmp_im(next_eip);
2644 tcg_gen_br(l2);
2645
2646 gen_set_label(l1);
2647 gen_jmp_im(val);
2648 gen_set_label(l2);
2649 gen_eob(s);
2650 }
2651}
2652
2653static void gen_setcc(DisasContext *s, int b)
2654{
2655 int inv, jcc_op, l1;
2656 TCGv t0;
2657
2658 if (is_fast_jcc_case(s, b)) {
2659 /* nominal case: we use a jump */
2660 /* XXX: make it faster by adding new instructions in TCG */
2661 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2662 tcg_gen_movi_tl(t0, 0);
2663 l1 = gen_new_label();
2664 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2665 tcg_gen_movi_tl(t0, 1);
2666 gen_set_label(l1);
2667 tcg_gen_mov_tl(cpu_T[0], t0);
2668 tcg_temp_free(t0);
2669 } else {
2670 /* slow case: it is more efficient not to generate a jump,
2671 although it is questionnable whether this optimization is
2672 worth to */
2673 inv = b & 1;
2674 jcc_op = (b >> 1) & 7;
2675 gen_setcc_slow_T0(s, jcc_op);
2676 if (inv) {
2677 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2678 }
2679 }
2680}
2681
2682#ifndef VBOX
2683static inline void gen_op_movl_T0_seg(int seg_reg)
2684#else /* VBOX */
2685DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2686#endif /* VBOX */
2687{
2688 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2689 offsetof(CPUX86State,segs[seg_reg].selector));
2690}
2691
2692#ifndef VBOX
2693static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2694#else /* VBOX */
2695DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2696#endif /* VBOX */
2697{
2698 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2699 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2700 offsetof(CPUX86State,segs[seg_reg].selector));
2701 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2702 tcg_gen_st_tl(cpu_T[0], cpu_env,
2703 offsetof(CPUX86State,segs[seg_reg].base));
2704#ifdef VBOX
2705 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2706 if (seg_reg == R_CS)
2707 flags |= DESC_CS_MASK;
2708 gen_op_movl_T0_im(flags);
2709 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2710#endif
2711}
2712
2713/* move T0 to seg_reg and compute if the CPU state may change. Never
2714 call this function with seg_reg == R_CS */
2715static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2716{
2717 if (s->pe && !s->vm86) {
2718 /* XXX: optimize by finding processor state dynamically */
2719 if (s->cc_op != CC_OP_DYNAMIC)
2720 gen_op_set_cc_op(s->cc_op);
2721 gen_jmp_im(cur_eip);
2722 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2723 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2724 /* abort translation because the addseg value may change or
2725 because ss32 may change. For R_SS, translation must always
2726 stop as a special handling must be done to disable hardware
2727 interrupts for the next instruction */
2728 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2729 s->is_jmp = 3;
2730 } else {
2731 gen_op_movl_seg_T0_vm(seg_reg);
2732 if (seg_reg == R_SS)
2733 s->is_jmp = 3;
2734 }
2735}
2736
2737#ifndef VBOX
2738static inline int svm_is_rep(int prefixes)
2739#else /* VBOX */
2740DECLINLINE(int) svm_is_rep(int prefixes)
2741#endif /* VBOX */
2742{
2743 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2744}
2745
2746#ifndef VBOX
2747static inline void
2748#else /* VBOX */
2749DECLINLINE(void)
2750#endif /* VBOX */
2751gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2752 uint32_t type, uint64_t param)
2753{
2754 /* no SVM activated; fast case */
2755 if (likely(!(s->flags & HF_SVMI_MASK)))
2756 return;
2757 if (s->cc_op != CC_OP_DYNAMIC)
2758 gen_op_set_cc_op(s->cc_op);
2759 gen_jmp_im(pc_start - s->cs_base);
2760 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2761 tcg_const_i32(type), tcg_const_i64(param));
2762}
2763
2764#ifndef VBOX
2765static inline void
2766#else /* VBOX */
2767DECLINLINE(void)
2768#endif
2769gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2770{
2771 gen_svm_check_intercept_param(s, pc_start, type, 0);
2772}
2773
2774#ifndef VBOX
2775static inline void gen_stack_update(DisasContext *s, int addend)
2776#else /* VBOX */
2777DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2778#endif /* VBOX */
2779{
2780#ifdef TARGET_X86_64
2781 if (CODE64(s)) {
2782 gen_op_add_reg_im(2, R_ESP, addend);
2783 } else
2784#endif
2785 if (s->ss32) {
2786 gen_op_add_reg_im(1, R_ESP, addend);
2787 } else {
2788 gen_op_add_reg_im(0, R_ESP, addend);
2789 }
2790}
2791
2792/* generate a push. It depends on ss32, addseg and dflag */
2793static void gen_push_T0(DisasContext *s)
2794{
2795#ifdef TARGET_X86_64
2796 if (CODE64(s)) {
2797 gen_op_movq_A0_reg(R_ESP);
2798 if (s->dflag) {
2799 gen_op_addq_A0_im(-8);
2800 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2801 } else {
2802 gen_op_addq_A0_im(-2);
2803 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2804 }
2805 gen_op_mov_reg_A0(2, R_ESP);
2806 } else
2807#endif
2808 {
2809 gen_op_movl_A0_reg(R_ESP);
2810 if (!s->dflag)
2811 gen_op_addl_A0_im(-2);
2812 else
2813 gen_op_addl_A0_im(-4);
2814 if (s->ss32) {
2815 if (s->addseg) {
2816 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2817 gen_op_addl_A0_seg(R_SS);
2818 }
2819 } else {
2820 gen_op_andl_A0_ffff();
2821 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2822 gen_op_addl_A0_seg(R_SS);
2823 }
2824 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2825 if (s->ss32 && !s->addseg)
2826 gen_op_mov_reg_A0(1, R_ESP);
2827 else
2828 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2829 }
2830}
2831
2832/* generate a push. It depends on ss32, addseg and dflag */
2833/* slower version for T1, only used for call Ev */
2834static void gen_push_T1(DisasContext *s)
2835{
2836#ifdef TARGET_X86_64
2837 if (CODE64(s)) {
2838 gen_op_movq_A0_reg(R_ESP);
2839 if (s->dflag) {
2840 gen_op_addq_A0_im(-8);
2841 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2842 } else {
2843 gen_op_addq_A0_im(-2);
2844 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2845 }
2846 gen_op_mov_reg_A0(2, R_ESP);
2847 } else
2848#endif
2849 {
2850 gen_op_movl_A0_reg(R_ESP);
2851 if (!s->dflag)
2852 gen_op_addl_A0_im(-2);
2853 else
2854 gen_op_addl_A0_im(-4);
2855 if (s->ss32) {
2856 if (s->addseg) {
2857 gen_op_addl_A0_seg(R_SS);
2858 }
2859 } else {
2860 gen_op_andl_A0_ffff();
2861 gen_op_addl_A0_seg(R_SS);
2862 }
2863 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2864
2865 if (s->ss32 && !s->addseg)
2866 gen_op_mov_reg_A0(1, R_ESP);
2867 else
2868 gen_stack_update(s, (-2) << s->dflag);
2869 }
2870}
2871
2872/* two step pop is necessary for precise exceptions */
2873static void gen_pop_T0(DisasContext *s)
2874{
2875#ifdef TARGET_X86_64
2876 if (CODE64(s)) {
2877 gen_op_movq_A0_reg(R_ESP);
2878 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2879 } else
2880#endif
2881 {
2882 gen_op_movl_A0_reg(R_ESP);
2883 if (s->ss32) {
2884 if (s->addseg)
2885 gen_op_addl_A0_seg(R_SS);
2886 } else {
2887 gen_op_andl_A0_ffff();
2888 gen_op_addl_A0_seg(R_SS);
2889 }
2890 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2891 }
2892}
2893
2894static void gen_pop_update(DisasContext *s)
2895{
2896#ifdef TARGET_X86_64
2897 if (CODE64(s) && s->dflag) {
2898 gen_stack_update(s, 8);
2899 } else
2900#endif
2901 {
2902 gen_stack_update(s, 2 << s->dflag);
2903 }
2904}
2905
2906static void gen_stack_A0(DisasContext *s)
2907{
2908 gen_op_movl_A0_reg(R_ESP);
2909 if (!s->ss32)
2910 gen_op_andl_A0_ffff();
2911 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2912 if (s->addseg)
2913 gen_op_addl_A0_seg(R_SS);
2914}
2915
2916/* NOTE: wrap around in 16 bit not fully handled */
2917static void gen_pusha(DisasContext *s)
2918{
2919 int i;
2920 gen_op_movl_A0_reg(R_ESP);
2921 gen_op_addl_A0_im(-16 << s->dflag);
2922 if (!s->ss32)
2923 gen_op_andl_A0_ffff();
2924 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2925 if (s->addseg)
2926 gen_op_addl_A0_seg(R_SS);
2927 for(i = 0;i < 8; i++) {
2928 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2929 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2930 gen_op_addl_A0_im(2 << s->dflag);
2931 }
2932 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2933}
2934
2935/* NOTE: wrap around in 16 bit not fully handled */
2936static void gen_popa(DisasContext *s)
2937{
2938 int i;
2939 gen_op_movl_A0_reg(R_ESP);
2940 if (!s->ss32)
2941 gen_op_andl_A0_ffff();
2942 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2943 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2944 if (s->addseg)
2945 gen_op_addl_A0_seg(R_SS);
2946 for(i = 0;i < 8; i++) {
2947 /* ESP is not reloaded */
2948 if (i != 3) {
2949 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2950 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2951 }
2952 gen_op_addl_A0_im(2 << s->dflag);
2953 }
2954 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2955}
2956
2957static void gen_enter(DisasContext *s, int esp_addend, int level)
2958{
2959 int ot, opsize;
2960
2961 level &= 0x1f;
2962#ifdef TARGET_X86_64
2963 if (CODE64(s)) {
2964 ot = s->dflag ? OT_QUAD : OT_WORD;
2965 opsize = 1 << ot;
2966
2967 gen_op_movl_A0_reg(R_ESP);
2968 gen_op_addq_A0_im(-opsize);
2969 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2970
2971 /* push bp */
2972 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2973 gen_op_st_T0_A0(ot + s->mem_index);
2974 if (level) {
2975 /* XXX: must save state */
2976 tcg_gen_helper_0_3(helper_enter64_level,
2977 tcg_const_i32(level),
2978 tcg_const_i32((ot == OT_QUAD)),
2979 cpu_T[1]);
2980 }
2981 gen_op_mov_reg_T1(ot, R_EBP);
2982 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2983 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2984 } else
2985#endif
2986 {
2987 ot = s->dflag + OT_WORD;
2988 opsize = 2 << s->dflag;
2989
2990 gen_op_movl_A0_reg(R_ESP);
2991 gen_op_addl_A0_im(-opsize);
2992 if (!s->ss32)
2993 gen_op_andl_A0_ffff();
2994 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2995 if (s->addseg)
2996 gen_op_addl_A0_seg(R_SS);
2997 /* push bp */
2998 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2999 gen_op_st_T0_A0(ot + s->mem_index);
3000 if (level) {
3001 /* XXX: must save state */
3002 tcg_gen_helper_0_3(helper_enter_level,
3003 tcg_const_i32(level),
3004 tcg_const_i32(s->dflag),
3005 cpu_T[1]);
3006 }
3007 gen_op_mov_reg_T1(ot, R_EBP);
3008 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3009 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3010 }
3011}
3012
3013static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3014{
3015 if (s->cc_op != CC_OP_DYNAMIC)
3016 gen_op_set_cc_op(s->cc_op);
3017 gen_jmp_im(cur_eip);
3018 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3019 s->is_jmp = 3;
3020}
3021
3022/* an interrupt is different from an exception because of the
3023 privilege checks */
3024static void gen_interrupt(DisasContext *s, int intno,
3025 target_ulong cur_eip, target_ulong next_eip)
3026{
3027 if (s->cc_op != CC_OP_DYNAMIC)
3028 gen_op_set_cc_op(s->cc_op);
3029 gen_jmp_im(cur_eip);
3030 tcg_gen_helper_0_2(helper_raise_interrupt,
3031 tcg_const_i32(intno),
3032 tcg_const_i32(next_eip - cur_eip));
3033 s->is_jmp = 3;
3034}
3035
3036static void gen_debug(DisasContext *s, target_ulong cur_eip)
3037{
3038 if (s->cc_op != CC_OP_DYNAMIC)
3039 gen_op_set_cc_op(s->cc_op);
3040 gen_jmp_im(cur_eip);
3041 tcg_gen_helper_0_0(helper_debug);
3042 s->is_jmp = 3;
3043}
3044
3045/* generate a generic end of block. Trace exception is also generated
3046 if needed */
3047static void gen_eob(DisasContext *s)
3048{
3049 if (s->cc_op != CC_OP_DYNAMIC)
3050 gen_op_set_cc_op(s->cc_op);
3051 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3052 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3053 }
3054 if (s->singlestep_enabled) {
3055 tcg_gen_helper_0_0(helper_debug);
3056 } else if (s->tf) {
3057 tcg_gen_helper_0_0(helper_single_step);
3058 } else {
3059 tcg_gen_exit_tb(0);
3060 }
3061 s->is_jmp = 3;
3062}
3063
3064/* generate a jump to eip. No segment change must happen before as a
3065 direct call to the next block may occur */
3066static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3067{
3068 if (s->jmp_opt) {
3069#ifdef VBOX
3070 gen_check_external_event(s);
3071#endif /* VBOX */
3072 if (s->cc_op != CC_OP_DYNAMIC) {
3073 gen_op_set_cc_op(s->cc_op);
3074 s->cc_op = CC_OP_DYNAMIC;
3075 }
3076 gen_goto_tb(s, tb_num, eip);
3077 s->is_jmp = 3;
3078 } else {
3079 gen_jmp_im(eip);
3080 gen_eob(s);
3081 }
3082}
3083
3084static void gen_jmp(DisasContext *s, target_ulong eip)
3085{
3086 gen_jmp_tb(s, eip, 0);
3087}
3088
3089#ifndef VBOX
3090static inline void gen_ldq_env_A0(int idx, int offset)
3091#else /* VBOX */
3092DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3093#endif /* VBOX */
3094{
3095 int mem_index = (idx >> 2) - 1;
3096 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3097 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3098}
3099
3100#ifndef VBOX
3101static inline void gen_stq_env_A0(int idx, int offset)
3102#else /* VBOX */
3103DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3104#endif /* VBOX */
3105{
3106 int mem_index = (idx >> 2) - 1;
3107 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3108 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3109}
3110
3111#ifndef VBOX
3112static inline void gen_ldo_env_A0(int idx, int offset)
3113#else /* VBOX */
3114DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3115#endif /* VBOX */
3116{
3117 int mem_index = (idx >> 2) - 1;
3118 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3119 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3120 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3121 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3122 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3123}
3124
3125#ifndef VBOX
3126static inline void gen_sto_env_A0(int idx, int offset)
3127#else /* VBOX */
3128DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3129#endif /* VBOX */
3130{
3131 int mem_index = (idx >> 2) - 1;
3132 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3133 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3134 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3135 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3136 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3137}
3138
3139#ifndef VBOX
3140static inline void gen_op_movo(int d_offset, int s_offset)
3141#else /* VBOX */
3142DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3143#endif /* VBOX */
3144{
3145 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3146 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3147 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3148 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3149}
3150
3151#ifndef VBOX
3152static inline void gen_op_movq(int d_offset, int s_offset)
3153#else /* VBOX */
3154DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3155#endif /* VBOX */
3156{
3157 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3158 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3159}
3160
3161#ifndef VBOX
3162static inline void gen_op_movl(int d_offset, int s_offset)
3163#else /* VBOX */
3164DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3165#endif /* VBOX */
3166{
3167 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3168 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3169}
3170
3171#ifndef VBOX
3172static inline void gen_op_movq_env_0(int d_offset)
3173#else /* VBOX */
3174DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3175#endif /* VBOX */
3176{
3177 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3178 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3179}
3180
3181#define SSE_SPECIAL ((void *)1)
3182#define SSE_DUMMY ((void *)2)
3183
3184#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3185#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3186 helper_ ## x ## ss, helper_ ## x ## sd, }
3187
3188static void *sse_op_table1[256][4] = {
3189 /* 3DNow! extensions */
3190 [0x0e] = { SSE_DUMMY }, /* femms */
3191 [0x0f] = { SSE_DUMMY }, /* pf... */
3192 /* pure SSE operations */
3193 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3194 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3195 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3196 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3197 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3198 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3199 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3200 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3201
3202 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3203 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3204 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3205 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3206 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3207 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3208 [0x2e] = { helper_ucomiss, helper_ucomisd },
3209 [0x2f] = { helper_comiss, helper_comisd },
3210 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3211 [0x51] = SSE_FOP(sqrt),
3212 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3213 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3214 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3215 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3216 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3217 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3218 [0x58] = SSE_FOP(add),
3219 [0x59] = SSE_FOP(mul),
3220 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3221 helper_cvtss2sd, helper_cvtsd2ss },
3222 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3223 [0x5c] = SSE_FOP(sub),
3224 [0x5d] = SSE_FOP(min),
3225 [0x5e] = SSE_FOP(div),
3226 [0x5f] = SSE_FOP(max),
3227
3228 [0xc2] = SSE_FOP(cmpeq),
3229 [0xc6] = { helper_shufps, helper_shufpd },
3230
3231 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3232 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3233
3234 /* MMX ops and their SSE extensions */
3235 [0x60] = MMX_OP2(punpcklbw),
3236 [0x61] = MMX_OP2(punpcklwd),
3237 [0x62] = MMX_OP2(punpckldq),
3238 [0x63] = MMX_OP2(packsswb),
3239 [0x64] = MMX_OP2(pcmpgtb),
3240 [0x65] = MMX_OP2(pcmpgtw),
3241 [0x66] = MMX_OP2(pcmpgtl),
3242 [0x67] = MMX_OP2(packuswb),
3243 [0x68] = MMX_OP2(punpckhbw),
3244 [0x69] = MMX_OP2(punpckhwd),
3245 [0x6a] = MMX_OP2(punpckhdq),
3246 [0x6b] = MMX_OP2(packssdw),
3247 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3248 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3249 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3250 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3251 [0x70] = { helper_pshufw_mmx,
3252 helper_pshufd_xmm,
3253 helper_pshufhw_xmm,
3254 helper_pshuflw_xmm },
3255 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3256 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3257 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3258 [0x74] = MMX_OP2(pcmpeqb),
3259 [0x75] = MMX_OP2(pcmpeqw),
3260 [0x76] = MMX_OP2(pcmpeql),
3261 [0x77] = { SSE_DUMMY }, /* emms */
3262 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3263 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3264 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3265 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3266 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3267 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3268 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3269 [0xd1] = MMX_OP2(psrlw),
3270 [0xd2] = MMX_OP2(psrld),
3271 [0xd3] = MMX_OP2(psrlq),
3272 [0xd4] = MMX_OP2(paddq),
3273 [0xd5] = MMX_OP2(pmullw),
3274 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3275 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3276 [0xd8] = MMX_OP2(psubusb),
3277 [0xd9] = MMX_OP2(psubusw),
3278 [0xda] = MMX_OP2(pminub),
3279 [0xdb] = MMX_OP2(pand),
3280 [0xdc] = MMX_OP2(paddusb),
3281 [0xdd] = MMX_OP2(paddusw),
3282 [0xde] = MMX_OP2(pmaxub),
3283 [0xdf] = MMX_OP2(pandn),
3284 [0xe0] = MMX_OP2(pavgb),
3285 [0xe1] = MMX_OP2(psraw),
3286 [0xe2] = MMX_OP2(psrad),
3287 [0xe3] = MMX_OP2(pavgw),
3288 [0xe4] = MMX_OP2(pmulhuw),
3289 [0xe5] = MMX_OP2(pmulhw),
3290 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3291 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3292 [0xe8] = MMX_OP2(psubsb),
3293 [0xe9] = MMX_OP2(psubsw),
3294 [0xea] = MMX_OP2(pminsw),
3295 [0xeb] = MMX_OP2(por),
3296 [0xec] = MMX_OP2(paddsb),
3297 [0xed] = MMX_OP2(paddsw),
3298 [0xee] = MMX_OP2(pmaxsw),
3299 [0xef] = MMX_OP2(pxor),
3300 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3301 [0xf1] = MMX_OP2(psllw),
3302 [0xf2] = MMX_OP2(pslld),
3303 [0xf3] = MMX_OP2(psllq),
3304 [0xf4] = MMX_OP2(pmuludq),
3305 [0xf5] = MMX_OP2(pmaddwd),
3306 [0xf6] = MMX_OP2(psadbw),
3307 [0xf7] = MMX_OP2(maskmov),
3308 [0xf8] = MMX_OP2(psubb),
3309 [0xf9] = MMX_OP2(psubw),
3310 [0xfa] = MMX_OP2(psubl),
3311 [0xfb] = MMX_OP2(psubq),
3312 [0xfc] = MMX_OP2(paddb),
3313 [0xfd] = MMX_OP2(paddw),
3314 [0xfe] = MMX_OP2(paddl),
3315};
3316
3317static void *sse_op_table2[3 * 8][2] = {
3318 [0 + 2] = MMX_OP2(psrlw),
3319 [0 + 4] = MMX_OP2(psraw),
3320 [0 + 6] = MMX_OP2(psllw),
3321 [8 + 2] = MMX_OP2(psrld),
3322 [8 + 4] = MMX_OP2(psrad),
3323 [8 + 6] = MMX_OP2(pslld),
3324 [16 + 2] = MMX_OP2(psrlq),
3325 [16 + 3] = { NULL, helper_psrldq_xmm },
3326 [16 + 6] = MMX_OP2(psllq),
3327 [16 + 7] = { NULL, helper_pslldq_xmm },
3328};
3329
3330static void *sse_op_table3[4 * 3] = {
3331 helper_cvtsi2ss,
3332 helper_cvtsi2sd,
3333 X86_64_ONLY(helper_cvtsq2ss),
3334 X86_64_ONLY(helper_cvtsq2sd),
3335
3336 helper_cvttss2si,
3337 helper_cvttsd2si,
3338 X86_64_ONLY(helper_cvttss2sq),
3339 X86_64_ONLY(helper_cvttsd2sq),
3340
3341 helper_cvtss2si,
3342 helper_cvtsd2si,
3343 X86_64_ONLY(helper_cvtss2sq),
3344 X86_64_ONLY(helper_cvtsd2sq),
3345};
3346
3347static void *sse_op_table4[8][4] = {
3348 SSE_FOP(cmpeq),
3349 SSE_FOP(cmplt),
3350 SSE_FOP(cmple),
3351 SSE_FOP(cmpunord),
3352 SSE_FOP(cmpneq),
3353 SSE_FOP(cmpnlt),
3354 SSE_FOP(cmpnle),
3355 SSE_FOP(cmpord),
3356};
3357
3358static void *sse_op_table5[256] = {
3359 [0x0c] = helper_pi2fw,
3360 [0x0d] = helper_pi2fd,
3361 [0x1c] = helper_pf2iw,
3362 [0x1d] = helper_pf2id,
3363 [0x8a] = helper_pfnacc,
3364 [0x8e] = helper_pfpnacc,
3365 [0x90] = helper_pfcmpge,
3366 [0x94] = helper_pfmin,
3367 [0x96] = helper_pfrcp,
3368 [0x97] = helper_pfrsqrt,
3369 [0x9a] = helper_pfsub,
3370 [0x9e] = helper_pfadd,
3371 [0xa0] = helper_pfcmpgt,
3372 [0xa4] = helper_pfmax,
3373 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3374 [0xa7] = helper_movq, /* pfrsqit1 */
3375 [0xaa] = helper_pfsubr,
3376 [0xae] = helper_pfacc,
3377 [0xb0] = helper_pfcmpeq,
3378 [0xb4] = helper_pfmul,
3379 [0xb6] = helper_movq, /* pfrcpit2 */
3380 [0xb7] = helper_pmulhrw_mmx,
3381 [0xbb] = helper_pswapd,
3382 [0xbf] = helper_pavgb_mmx /* pavgusb */
3383};
3384
3385struct sse_op_helper_s {
3386 void *op[2]; uint32_t ext_mask;
3387};
3388#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3389#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3390#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3391#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3392static struct sse_op_helper_s sse_op_table6[256] = {
3393 [0x00] = SSSE3_OP(pshufb),
3394 [0x01] = SSSE3_OP(phaddw),
3395 [0x02] = SSSE3_OP(phaddd),
3396 [0x03] = SSSE3_OP(phaddsw),
3397 [0x04] = SSSE3_OP(pmaddubsw),
3398 [0x05] = SSSE3_OP(phsubw),
3399 [0x06] = SSSE3_OP(phsubd),
3400 [0x07] = SSSE3_OP(phsubsw),
3401 [0x08] = SSSE3_OP(psignb),
3402 [0x09] = SSSE3_OP(psignw),
3403 [0x0a] = SSSE3_OP(psignd),
3404 [0x0b] = SSSE3_OP(pmulhrsw),
3405 [0x10] = SSE41_OP(pblendvb),
3406 [0x14] = SSE41_OP(blendvps),
3407 [0x15] = SSE41_OP(blendvpd),
3408 [0x17] = SSE41_OP(ptest),
3409 [0x1c] = SSSE3_OP(pabsb),
3410 [0x1d] = SSSE3_OP(pabsw),
3411 [0x1e] = SSSE3_OP(pabsd),
3412 [0x20] = SSE41_OP(pmovsxbw),
3413 [0x21] = SSE41_OP(pmovsxbd),
3414 [0x22] = SSE41_OP(pmovsxbq),
3415 [0x23] = SSE41_OP(pmovsxwd),
3416 [0x24] = SSE41_OP(pmovsxwq),
3417 [0x25] = SSE41_OP(pmovsxdq),
3418 [0x28] = SSE41_OP(pmuldq),
3419 [0x29] = SSE41_OP(pcmpeqq),
3420 [0x2a] = SSE41_SPECIAL, /* movntqda */
3421 [0x2b] = SSE41_OP(packusdw),
3422 [0x30] = SSE41_OP(pmovzxbw),
3423 [0x31] = SSE41_OP(pmovzxbd),
3424 [0x32] = SSE41_OP(pmovzxbq),
3425 [0x33] = SSE41_OP(pmovzxwd),
3426 [0x34] = SSE41_OP(pmovzxwq),
3427 [0x35] = SSE41_OP(pmovzxdq),
3428 [0x37] = SSE42_OP(pcmpgtq),
3429 [0x38] = SSE41_OP(pminsb),
3430 [0x39] = SSE41_OP(pminsd),
3431 [0x3a] = SSE41_OP(pminuw),
3432 [0x3b] = SSE41_OP(pminud),
3433 [0x3c] = SSE41_OP(pmaxsb),
3434 [0x3d] = SSE41_OP(pmaxsd),
3435 [0x3e] = SSE41_OP(pmaxuw),
3436 [0x3f] = SSE41_OP(pmaxud),
3437 [0x40] = SSE41_OP(pmulld),
3438 [0x41] = SSE41_OP(phminposuw),
3439};
3440
3441static struct sse_op_helper_s sse_op_table7[256] = {
3442 [0x08] = SSE41_OP(roundps),
3443 [0x09] = SSE41_OP(roundpd),
3444 [0x0a] = SSE41_OP(roundss),
3445 [0x0b] = SSE41_OP(roundsd),
3446 [0x0c] = SSE41_OP(blendps),
3447 [0x0d] = SSE41_OP(blendpd),
3448 [0x0e] = SSE41_OP(pblendw),
3449 [0x0f] = SSSE3_OP(palignr),
3450 [0x14] = SSE41_SPECIAL, /* pextrb */
3451 [0x15] = SSE41_SPECIAL, /* pextrw */
3452 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3453 [0x17] = SSE41_SPECIAL, /* extractps */
3454 [0x20] = SSE41_SPECIAL, /* pinsrb */
3455 [0x21] = SSE41_SPECIAL, /* insertps */
3456 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3457 [0x40] = SSE41_OP(dpps),
3458 [0x41] = SSE41_OP(dppd),
3459 [0x42] = SSE41_OP(mpsadbw),
3460 [0x60] = SSE42_OP(pcmpestrm),
3461 [0x61] = SSE42_OP(pcmpestri),
3462 [0x62] = SSE42_OP(pcmpistrm),
3463 [0x63] = SSE42_OP(pcmpistri),
3464};
3465
3466static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3467{
3468 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3469 int modrm, mod, rm, reg, reg_addr, offset_addr;
3470 void *sse_op2;
3471
3472 b &= 0xff;
3473 if (s->prefix & PREFIX_DATA)
3474 b1 = 1;
3475 else if (s->prefix & PREFIX_REPZ)
3476 b1 = 2;
3477 else if (s->prefix & PREFIX_REPNZ)
3478 b1 = 3;
3479 else
3480 b1 = 0;
3481 sse_op2 = sse_op_table1[b][b1];
3482 if (!sse_op2)
3483 goto illegal_op;
3484 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3485 is_xmm = 1;
3486 } else {
3487 if (b1 == 0) {
3488 /* MMX case */
3489 is_xmm = 0;
3490 } else {
3491 is_xmm = 1;
3492 }
3493 }
3494 /* simple MMX/SSE operation */
3495 if (s->flags & HF_TS_MASK) {
3496 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3497 return;
3498 }
3499 if (s->flags & HF_EM_MASK) {
3500 illegal_op:
3501 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3502 return;
3503 }
3504 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3505 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3506 goto illegal_op;
3507 if (b == 0x0e) {
3508 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3509 goto illegal_op;
3510 /* femms */
3511 tcg_gen_helper_0_0(helper_emms);
3512 return;
3513 }
3514 if (b == 0x77) {
3515 /* emms */
3516 tcg_gen_helper_0_0(helper_emms);
3517 return;
3518 }
3519 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3520 the static cpu state) */
3521 if (!is_xmm) {
3522 tcg_gen_helper_0_0(helper_enter_mmx);
3523 }
3524
3525 modrm = ldub_code(s->pc++);
3526 reg = ((modrm >> 3) & 7);
3527 if (is_xmm)
3528 reg |= rex_r;
3529 mod = (modrm >> 6) & 3;
3530 if (sse_op2 == SSE_SPECIAL) {
3531 b |= (b1 << 8);
3532 switch(b) {
3533 case 0x0e7: /* movntq */
3534 if (mod == 3)
3535 goto illegal_op;
3536 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3537 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3538 break;
3539 case 0x1e7: /* movntdq */
3540 case 0x02b: /* movntps */
3541 case 0x12b: /* movntps */
3542 case 0x3f0: /* lddqu */
3543 if (mod == 3)
3544 goto illegal_op;
3545 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3546 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3547 break;
3548 case 0x6e: /* movd mm, ea */
3549#ifdef TARGET_X86_64
3550 if (s->dflag == 2) {
3551 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3552 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3553 } else
3554#endif
3555 {
3556 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3557 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3558 offsetof(CPUX86State,fpregs[reg].mmx));
3559 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3560 }
3561 break;
3562 case 0x16e: /* movd xmm, ea */
3563#ifdef TARGET_X86_64
3564 if (s->dflag == 2) {
3565 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3566 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3567 offsetof(CPUX86State,xmm_regs[reg]));
3568 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3569 } else
3570#endif
3571 {
3572 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3573 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3574 offsetof(CPUX86State,xmm_regs[reg]));
3575 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3576 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3577 }
3578 break;
3579 case 0x6f: /* movq mm, ea */
3580 if (mod != 3) {
3581 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3582 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3583 } else {
3584 rm = (modrm & 7);
3585 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3586 offsetof(CPUX86State,fpregs[rm].mmx));
3587 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3588 offsetof(CPUX86State,fpregs[reg].mmx));
3589 }
3590 break;
3591 case 0x010: /* movups */
3592 case 0x110: /* movupd */
3593 case 0x028: /* movaps */
3594 case 0x128: /* movapd */
3595 case 0x16f: /* movdqa xmm, ea */
3596 case 0x26f: /* movdqu xmm, ea */
3597 if (mod != 3) {
3598 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3599 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3600 } else {
3601 rm = (modrm & 7) | REX_B(s);
3602 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3603 offsetof(CPUX86State,xmm_regs[rm]));
3604 }
3605 break;
3606 case 0x210: /* movss xmm, ea */
3607 if (mod != 3) {
3608 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3609 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3610 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3611 gen_op_movl_T0_0();
3612 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3613 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3614 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3615 } else {
3616 rm = (modrm & 7) | REX_B(s);
3617 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3618 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3619 }
3620 break;
3621 case 0x310: /* movsd xmm, ea */
3622 if (mod != 3) {
3623 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3624 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3625 gen_op_movl_T0_0();
3626 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3627 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3628 } else {
3629 rm = (modrm & 7) | REX_B(s);
3630 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3631 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3632 }
3633 break;
3634 case 0x012: /* movlps */
3635 case 0x112: /* movlpd */
3636 if (mod != 3) {
3637 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3638 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3639 } else {
3640 /* movhlps */
3641 rm = (modrm & 7) | REX_B(s);
3642 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3643 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3644 }
3645 break;
3646 case 0x212: /* movsldup */
3647 if (mod != 3) {
3648 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3649 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3650 } else {
3651 rm = (modrm & 7) | REX_B(s);
3652 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3653 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3654 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3655 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3656 }
3657 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3658 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3659 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3660 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3661 break;
3662 case 0x312: /* movddup */
3663 if (mod != 3) {
3664 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3665 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3666 } else {
3667 rm = (modrm & 7) | REX_B(s);
3668 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3669 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3670 }
3671 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3672 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3673 break;
3674 case 0x016: /* movhps */
3675 case 0x116: /* movhpd */
3676 if (mod != 3) {
3677 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3678 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3679 } else {
3680 /* movlhps */
3681 rm = (modrm & 7) | REX_B(s);
3682 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3683 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3684 }
3685 break;
3686 case 0x216: /* movshdup */
3687 if (mod != 3) {
3688 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3689 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3690 } else {
3691 rm = (modrm & 7) | REX_B(s);
3692 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3693 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3694 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3695 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3696 }
3697 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3698 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3699 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3700 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3701 break;
3702 case 0x7e: /* movd ea, mm */
3703#ifdef TARGET_X86_64
3704 if (s->dflag == 2) {
3705 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3706 offsetof(CPUX86State,fpregs[reg].mmx));
3707 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3708 } else
3709#endif
3710 {
3711 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3712 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3713 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3714 }
3715 break;
3716 case 0x17e: /* movd ea, xmm */
3717#ifdef TARGET_X86_64
3718 if (s->dflag == 2) {
3719 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3720 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3721 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3722 } else
3723#endif
3724 {
3725 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3726 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3727 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3728 }
3729 break;
3730 case 0x27e: /* movq xmm, ea */
3731 if (mod != 3) {
3732 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3733 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3734 } else {
3735 rm = (modrm & 7) | REX_B(s);
3736 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3737 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3738 }
3739 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3740 break;
3741 case 0x7f: /* movq ea, mm */
3742 if (mod != 3) {
3743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3744 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3745 } else {
3746 rm = (modrm & 7);
3747 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3748 offsetof(CPUX86State,fpregs[reg].mmx));
3749 }
3750 break;
3751 case 0x011: /* movups */
3752 case 0x111: /* movupd */
3753 case 0x029: /* movaps */
3754 case 0x129: /* movapd */
3755 case 0x17f: /* movdqa ea, xmm */
3756 case 0x27f: /* movdqu ea, xmm */
3757 if (mod != 3) {
3758 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3759 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3760 } else {
3761 rm = (modrm & 7) | REX_B(s);
3762 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3763 offsetof(CPUX86State,xmm_regs[reg]));
3764 }
3765 break;
3766 case 0x211: /* movss ea, xmm */
3767 if (mod != 3) {
3768 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3769 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3770 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3771 } else {
3772 rm = (modrm & 7) | REX_B(s);
3773 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3774 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3775 }
3776 break;
3777 case 0x311: /* movsd ea, xmm */
3778 if (mod != 3) {
3779 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3780 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3781 } else {
3782 rm = (modrm & 7) | REX_B(s);
3783 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3784 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3785 }
3786 break;
3787 case 0x013: /* movlps */
3788 case 0x113: /* movlpd */
3789 if (mod != 3) {
3790 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3791 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3792 } else {
3793 goto illegal_op;
3794 }
3795 break;
3796 case 0x017: /* movhps */
3797 case 0x117: /* movhpd */
3798 if (mod != 3) {
3799 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3800 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3801 } else {
3802 goto illegal_op;
3803 }
3804 break;
3805 case 0x71: /* shift mm, im */
3806 case 0x72:
3807 case 0x73:
3808 case 0x171: /* shift xmm, im */
3809 case 0x172:
3810 case 0x173:
3811 val = ldub_code(s->pc++);
3812 if (is_xmm) {
3813 gen_op_movl_T0_im(val);
3814 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3815 gen_op_movl_T0_0();
3816 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3817 op1_offset = offsetof(CPUX86State,xmm_t0);
3818 } else {
3819 gen_op_movl_T0_im(val);
3820 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3821 gen_op_movl_T0_0();
3822 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3823 op1_offset = offsetof(CPUX86State,mmx_t0);
3824 }
3825 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3826 if (!sse_op2)
3827 goto illegal_op;
3828 if (is_xmm) {
3829 rm = (modrm & 7) | REX_B(s);
3830 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3831 } else {
3832 rm = (modrm & 7);
3833 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3834 }
3835 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3836 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3837 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3838 break;
3839 case 0x050: /* movmskps */
3840 rm = (modrm & 7) | REX_B(s);
3841 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3842 offsetof(CPUX86State,xmm_regs[rm]));
3843 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3844 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3845 gen_op_mov_reg_T0(OT_LONG, reg);
3846 break;
3847 case 0x150: /* movmskpd */
3848 rm = (modrm & 7) | REX_B(s);
3849 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3850 offsetof(CPUX86State,xmm_regs[rm]));
3851 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3852 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3853 gen_op_mov_reg_T0(OT_LONG, reg);
3854 break;
3855 case 0x02a: /* cvtpi2ps */
3856 case 0x12a: /* cvtpi2pd */
3857 tcg_gen_helper_0_0(helper_enter_mmx);
3858 if (mod != 3) {
3859 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3860 op2_offset = offsetof(CPUX86State,mmx_t0);
3861 gen_ldq_env_A0(s->mem_index, op2_offset);
3862 } else {
3863 rm = (modrm & 7);
3864 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3865 }
3866 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3867 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3868 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3869 switch(b >> 8) {
3870 case 0x0:
3871 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3872 break;
3873 default:
3874 case 0x1:
3875 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3876 break;
3877 }
3878 break;
3879 case 0x22a: /* cvtsi2ss */
3880 case 0x32a: /* cvtsi2sd */
3881 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3882 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3883 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3884 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3885 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3886 if (ot == OT_LONG) {
3887 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3888 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3889 } else {
3890 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3891 }
3892 break;
3893 case 0x02c: /* cvttps2pi */
3894 case 0x12c: /* cvttpd2pi */
3895 case 0x02d: /* cvtps2pi */
3896 case 0x12d: /* cvtpd2pi */
3897 tcg_gen_helper_0_0(helper_enter_mmx);
3898 if (mod != 3) {
3899 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3900 op2_offset = offsetof(CPUX86State,xmm_t0);
3901 gen_ldo_env_A0(s->mem_index, op2_offset);
3902 } else {
3903 rm = (modrm & 7) | REX_B(s);
3904 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3905 }
3906 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3907 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3908 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3909 switch(b) {
3910 case 0x02c:
3911 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3912 break;
3913 case 0x12c:
3914 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3915 break;
3916 case 0x02d:
3917 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3918 break;
3919 case 0x12d:
3920 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3921 break;
3922 }
3923 break;
3924 case 0x22c: /* cvttss2si */
3925 case 0x32c: /* cvttsd2si */
3926 case 0x22d: /* cvtss2si */
3927 case 0x32d: /* cvtsd2si */
3928 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3929 if (mod != 3) {
3930 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3931 if ((b >> 8) & 1) {
3932 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3933 } else {
3934 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3935 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3936 }
3937 op2_offset = offsetof(CPUX86State,xmm_t0);
3938 } else {
3939 rm = (modrm & 7) | REX_B(s);
3940 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3941 }
3942 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3943 (b & 1) * 4];
3944 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3945 if (ot == OT_LONG) {
3946 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3947 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3948 } else {
3949 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3950 }
3951 gen_op_mov_reg_T0(ot, reg);
3952 break;
3953 case 0xc4: /* pinsrw */
3954 case 0x1c4:
3955 s->rip_offset = 1;
3956 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3957 val = ldub_code(s->pc++);
3958 if (b1) {
3959 val &= 7;
3960 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3961 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3962 } else {
3963 val &= 3;
3964 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3965 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3966 }
3967 break;
3968 case 0xc5: /* pextrw */
3969 case 0x1c5:
3970 if (mod != 3)
3971 goto illegal_op;
3972 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3973 val = ldub_code(s->pc++);
3974 if (b1) {
3975 val &= 7;
3976 rm = (modrm & 7) | REX_B(s);
3977 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3978 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3979 } else {
3980 val &= 3;
3981 rm = (modrm & 7);
3982 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3983 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3984 }
3985 reg = ((modrm >> 3) & 7) | rex_r;
3986 gen_op_mov_reg_T0(ot, reg);
3987 break;
3988 case 0x1d6: /* movq ea, xmm */
3989 if (mod != 3) {
3990 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3991 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3992 } else {
3993 rm = (modrm & 7) | REX_B(s);
3994 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3995 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3996 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3997 }
3998 break;
3999 case 0x2d6: /* movq2dq */
4000 tcg_gen_helper_0_0(helper_enter_mmx);
4001 rm = (modrm & 7);
4002 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4003 offsetof(CPUX86State,fpregs[rm].mmx));
4004 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4005 break;
4006 case 0x3d6: /* movdq2q */
4007 tcg_gen_helper_0_0(helper_enter_mmx);
4008 rm = (modrm & 7) | REX_B(s);
4009 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4010 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4011 break;
4012 case 0xd7: /* pmovmskb */
4013 case 0x1d7:
4014 if (mod != 3)
4015 goto illegal_op;
4016 if (b1) {
4017 rm = (modrm & 7) | REX_B(s);
4018 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4019 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4020 } else {
4021 rm = (modrm & 7);
4022 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4023 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4024 }
4025 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4026 reg = ((modrm >> 3) & 7) | rex_r;
4027 gen_op_mov_reg_T0(OT_LONG, reg);
4028 break;
4029 case 0x138:
4030 if (s->prefix & PREFIX_REPNZ)
4031 goto crc32;
4032 case 0x038:
4033 b = modrm;
4034 modrm = ldub_code(s->pc++);
4035 rm = modrm & 7;
4036 reg = ((modrm >> 3) & 7) | rex_r;
4037 mod = (modrm >> 6) & 3;
4038
4039 sse_op2 = sse_op_table6[b].op[b1];
4040 if (!sse_op2)
4041 goto illegal_op;
4042 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4043 goto illegal_op;
4044
4045 if (b1) {
4046 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4047 if (mod == 3) {
4048 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4049 } else {
4050 op2_offset = offsetof(CPUX86State,xmm_t0);
4051 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4052 switch (b) {
4053 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4054 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4055 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4056 gen_ldq_env_A0(s->mem_index, op2_offset +
4057 offsetof(XMMReg, XMM_Q(0)));
4058 break;
4059 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4060 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4061 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4062 (s->mem_index >> 2) - 1);
4063 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4064 offsetof(XMMReg, XMM_L(0)));
4065 break;
4066 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4067 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4068 (s->mem_index >> 2) - 1);
4069 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4070 offsetof(XMMReg, XMM_W(0)));
4071 break;
4072 case 0x2a: /* movntqda */
4073 gen_ldo_env_A0(s->mem_index, op1_offset);
4074 return;
4075 default:
4076 gen_ldo_env_A0(s->mem_index, op2_offset);
4077 }
4078 }
4079 } else {
4080 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4081 if (mod == 3) {
4082 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4083 } else {
4084 op2_offset = offsetof(CPUX86State,mmx_t0);
4085 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4086 gen_ldq_env_A0(s->mem_index, op2_offset);
4087 }
4088 }
4089 if (sse_op2 == SSE_SPECIAL)
4090 goto illegal_op;
4091
4092 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4093 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4094 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4095
4096 if (b == 0x17)
4097 s->cc_op = CC_OP_EFLAGS;
4098 break;
4099 case 0x338: /* crc32 */
4100 crc32:
4101 b = modrm;
4102 modrm = ldub_code(s->pc++);
4103 reg = ((modrm >> 3) & 7) | rex_r;
4104
4105 if (b != 0xf0 && b != 0xf1)
4106 goto illegal_op;
4107 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4108 goto illegal_op;
4109
4110 if (b == 0xf0)
4111 ot = OT_BYTE;
4112 else if (b == 0xf1 && s->dflag != 2)
4113 if (s->prefix & PREFIX_DATA)
4114 ot = OT_WORD;
4115 else
4116 ot = OT_LONG;
4117 else
4118 ot = OT_QUAD;
4119
4120 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4121 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4122 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4123 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4124 cpu_T[0], tcg_const_i32(8 << ot));
4125
4126 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4127 gen_op_mov_reg_T0(ot, reg);
4128 break;
4129 case 0x03a:
4130 case 0x13a:
4131 b = modrm;
4132 modrm = ldub_code(s->pc++);
4133 rm = modrm & 7;
4134 reg = ((modrm >> 3) & 7) | rex_r;
4135 mod = (modrm >> 6) & 3;
4136
4137 sse_op2 = sse_op_table7[b].op[b1];
4138 if (!sse_op2)
4139 goto illegal_op;
4140 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4141 goto illegal_op;
4142
4143 if (sse_op2 == SSE_SPECIAL) {
4144 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4145 rm = (modrm & 7) | REX_B(s);
4146 if (mod != 3)
4147 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4148 reg = ((modrm >> 3) & 7) | rex_r;
4149 val = ldub_code(s->pc++);
4150 switch (b) {
4151 case 0x14: /* pextrb */
4152 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4153 xmm_regs[reg].XMM_B(val & 15)));
4154 if (mod == 3)
4155 gen_op_mov_reg_T0(ot, rm);
4156 else
4157 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4158 (s->mem_index >> 2) - 1);
4159 break;
4160 case 0x15: /* pextrw */
4161 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4162 xmm_regs[reg].XMM_W(val & 7)));
4163 if (mod == 3)
4164 gen_op_mov_reg_T0(ot, rm);
4165 else
4166 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4167 (s->mem_index >> 2) - 1);
4168 break;
4169 case 0x16:
4170 if (ot == OT_LONG) { /* pextrd */
4171 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4172 offsetof(CPUX86State,
4173 xmm_regs[reg].XMM_L(val & 3)));
4174 if (mod == 3)
4175 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4176 else
4177 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4178 (s->mem_index >> 2) - 1);
4179 } else { /* pextrq */
4180 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4181 offsetof(CPUX86State,
4182 xmm_regs[reg].XMM_Q(val & 1)));
4183 if (mod == 3)
4184 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4185 else
4186 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4187 (s->mem_index >> 2) - 1);
4188 }
4189 break;
4190 case 0x17: /* extractps */
4191 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4192 xmm_regs[reg].XMM_L(val & 3)));
4193 if (mod == 3)
4194 gen_op_mov_reg_T0(ot, rm);
4195 else
4196 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4197 (s->mem_index >> 2) - 1);
4198 break;
4199 case 0x20: /* pinsrb */
4200 if (mod == 3)
4201 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4202 else
4203 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4204 (s->mem_index >> 2) - 1);
4205 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4206 xmm_regs[reg].XMM_B(val & 15)));
4207 break;
4208 case 0x21: /* insertps */
4209 if (mod == 3)
4210 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4211 offsetof(CPUX86State,xmm_regs[rm]
4212 .XMM_L((val >> 6) & 3)));
4213 else
4214 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4215 (s->mem_index >> 2) - 1);
4216 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4217 offsetof(CPUX86State,xmm_regs[reg]
4218 .XMM_L((val >> 4) & 3)));
4219 if ((val >> 0) & 1)
4220 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4221 cpu_env, offsetof(CPUX86State,
4222 xmm_regs[reg].XMM_L(0)));
4223 if ((val >> 1) & 1)
4224 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4225 cpu_env, offsetof(CPUX86State,
4226 xmm_regs[reg].XMM_L(1)));
4227 if ((val >> 2) & 1)
4228 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4229 cpu_env, offsetof(CPUX86State,
4230 xmm_regs[reg].XMM_L(2)));
4231 if ((val >> 3) & 1)
4232 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4233 cpu_env, offsetof(CPUX86State,
4234 xmm_regs[reg].XMM_L(3)));
4235 break;
4236 case 0x22:
4237 if (ot == OT_LONG) { /* pinsrd */
4238 if (mod == 3)
4239 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4240 else
4241 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4242 (s->mem_index >> 2) - 1);
4243 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4244 offsetof(CPUX86State,
4245 xmm_regs[reg].XMM_L(val & 3)));
4246 } else { /* pinsrq */
4247 if (mod == 3)
4248 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4249 else
4250 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4251 (s->mem_index >> 2) - 1);
4252 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4253 offsetof(CPUX86State,
4254 xmm_regs[reg].XMM_Q(val & 1)));
4255 }
4256 break;
4257 }
4258 return;
4259 }
4260
4261 if (b1) {
4262 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4263 if (mod == 3) {
4264 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4265 } else {
4266 op2_offset = offsetof(CPUX86State,xmm_t0);
4267 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4268 gen_ldo_env_A0(s->mem_index, op2_offset);
4269 }
4270 } else {
4271 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4272 if (mod == 3) {
4273 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4274 } else {
4275 op2_offset = offsetof(CPUX86State,mmx_t0);
4276 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4277 gen_ldq_env_A0(s->mem_index, op2_offset);
4278 }
4279 }
4280 val = ldub_code(s->pc++);
4281
4282 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4283 s->cc_op = CC_OP_EFLAGS;
4284
4285 if (s->dflag == 2)
4286 /* The helper must use entire 64-bit gp registers */
4287 val |= 1 << 8;
4288 }
4289
4290 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4291 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4292 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4293 break;
4294 default:
4295 goto illegal_op;
4296 }
4297 } else {
4298 /* generic MMX or SSE operation */
4299 switch(b) {
4300 case 0x70: /* pshufx insn */
4301 case 0xc6: /* pshufx insn */
4302 case 0xc2: /* compare insns */
4303 s->rip_offset = 1;
4304 break;
4305 default:
4306 break;
4307 }
4308 if (is_xmm) {
4309 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4310 if (mod != 3) {
4311 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4312 op2_offset = offsetof(CPUX86State,xmm_t0);
4313 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4314 b == 0xc2)) {
4315 /* specific case for SSE single instructions */
4316 if (b1 == 2) {
4317 /* 32 bit access */
4318 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4319 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4320 } else {
4321 /* 64 bit access */
4322 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4323 }
4324 } else {
4325 gen_ldo_env_A0(s->mem_index, op2_offset);
4326 }
4327 } else {
4328 rm = (modrm & 7) | REX_B(s);
4329 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4330 }
4331 } else {
4332 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4333 if (mod != 3) {
4334 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4335 op2_offset = offsetof(CPUX86State,mmx_t0);
4336 gen_ldq_env_A0(s->mem_index, op2_offset);
4337 } else {
4338 rm = (modrm & 7);
4339 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4340 }
4341 }
4342 switch(b) {
4343 case 0x0f: /* 3DNow! data insns */
4344 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4345 goto illegal_op;
4346 val = ldub_code(s->pc++);
4347 sse_op2 = sse_op_table5[val];
4348 if (!sse_op2)
4349 goto illegal_op;
4350 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4351 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4352 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4353 break;
4354 case 0x70: /* pshufx insn */
4355 case 0xc6: /* pshufx insn */
4356 val = ldub_code(s->pc++);
4357 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4358 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4359 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4360 break;
4361 case 0xc2:
4362 /* compare insns */
4363 val = ldub_code(s->pc++);
4364 if (val >= 8)
4365 goto illegal_op;
4366 sse_op2 = sse_op_table4[val][b1];
4367 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4368 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4369 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4370 break;
4371 case 0xf7:
4372 /* maskmov : we must prepare A0 */
4373 if (mod != 3)
4374 goto illegal_op;
4375#ifdef TARGET_X86_64
4376 if (s->aflag == 2) {
4377 gen_op_movq_A0_reg(R_EDI);
4378 } else
4379#endif
4380 {
4381 gen_op_movl_A0_reg(R_EDI);
4382 if (s->aflag == 0)
4383 gen_op_andl_A0_ffff();
4384 }
4385 gen_add_A0_ds_seg(s);
4386
4387 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4388 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4389 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4390 break;
4391 default:
4392 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4393 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4394 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4395 break;
4396 }
4397 if (b == 0x2e || b == 0x2f) {
4398 s->cc_op = CC_OP_EFLAGS;
4399 }
4400 }
4401}
4402
4403#ifdef VBOX
4404/* Checks if it's an invalid lock sequence. Only a few instructions
4405 can be used together with the lock prefix and of those only the
4406 form that write a memory operand. So, this is kind of annoying
4407 work to do...
4408 The AMD manual lists the following instructions.
4409 ADC
4410 ADD
4411 AND
4412 BTC
4413 BTR
4414 BTS
4415 CMPXCHG
4416 CMPXCHG8B
4417 CMPXCHG16B
4418 DEC
4419 INC
4420 NEG
4421 NOT
4422 OR
4423 SBB
4424 SUB
4425 XADD
4426 XCHG
4427 XOR */
4428static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4429{
4430 target_ulong pc = s->pc;
4431 int modrm, mod, op;
4432
4433 /* X={8,16,32,64} Y={16,32,64} */
4434 switch (b)
4435 {
4436 /* /2: ADC reg/memX, immX */
4437 /* /0: ADD reg/memX, immX */
4438 /* /4: AND reg/memX, immX */
4439 /* /1: OR reg/memX, immX */
4440 /* /3: SBB reg/memX, immX */
4441 /* /5: SUB reg/memX, immX */
4442 /* /6: XOR reg/memX, immX */
4443 case 0x80:
4444 case 0x81:
4445 case 0x83:
4446 modrm = ldub_code(pc++);
4447 op = (modrm >> 3) & 7;
4448 if (op == 7) /* /7: CMP */
4449 break;
4450 mod = (modrm >> 6) & 3;
4451 if (mod == 3) /* register destination */
4452 break;
4453 return false;
4454
4455 case 0x10: /* /r: ADC reg/mem8, reg8 */
4456 case 0x11: /* /r: ADC reg/memX, regY */
4457 case 0x00: /* /r: ADD reg/mem8, reg8 */
4458 case 0x01: /* /r: ADD reg/memX, regY */
4459 case 0x20: /* /r: AND reg/mem8, reg8 */
4460 case 0x21: /* /r: AND reg/memY, regY */
4461 case 0x08: /* /r: OR reg/mem8, reg8 */
4462 case 0x09: /* /r: OR reg/memY, regY */
4463 case 0x18: /* /r: SBB reg/mem8, reg8 */
4464 case 0x19: /* /r: SBB reg/memY, regY */
4465 case 0x28: /* /r: SUB reg/mem8, reg8 */
4466 case 0x29: /* /r: SUB reg/memY, regY */
4467 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4468 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4469 case 0x30: /* /r: XOR reg/mem8, reg8 */
4470 case 0x31: /* /r: XOR reg/memY, regY */
4471 modrm = ldub_code(pc++);
4472 mod = (modrm >> 6) & 3;
4473 if (mod == 3) /* register destination */
4474 break;
4475 return false;
4476
4477 /* /1: DEC reg/memX */
4478 /* /0: INC reg/memX */
4479 case 0xfe:
4480 case 0xff:
4481 modrm = ldub_code(pc++);
4482 mod = (modrm >> 6) & 3;
4483 if (mod == 3) /* register destination */
4484 break;
4485 return false;
4486
4487 /* /3: NEG reg/memX */
4488 /* /2: NOT reg/memX */
4489 case 0xf6:
4490 case 0xf7:
4491 modrm = ldub_code(pc++);
4492 mod = (modrm >> 6) & 3;
4493 if (mod == 3) /* register destination */
4494 break;
4495 return false;
4496
4497 case 0x0f:
4498 b = ldub_code(pc++);
4499 switch (b)
4500 {
4501 /* /7: BTC reg/memY, imm8 */
4502 /* /6: BTR reg/memY, imm8 */
4503 /* /5: BTS reg/memY, imm8 */
4504 case 0xba:
4505 modrm = ldub_code(pc++);
4506 op = (modrm >> 3) & 7;
4507 if (op < 5)
4508 break;
4509 mod = (modrm >> 6) & 3;
4510 if (mod == 3) /* register destination */
4511 break;
4512 return false;
4513
4514 case 0xbb: /* /r: BTC reg/memY, regY */
4515 case 0xb3: /* /r: BTR reg/memY, regY */
4516 case 0xab: /* /r: BTS reg/memY, regY */
4517 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4518 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4519 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4520 case 0xc1: /* /r: XADD reg/memY, regY */
4521 modrm = ldub_code(pc++);
4522 mod = (modrm >> 6) & 3;
4523 if (mod == 3) /* register destination */
4524 break;
4525 return false;
4526
4527 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4528 case 0xc7:
4529 modrm = ldub_code(pc++);
4530 op = (modrm >> 3) & 7;
4531 if (op != 1)
4532 break;
4533 return false;
4534 }
4535 break;
4536 }
4537
4538 /* illegal sequence. The s->pc is past the lock prefix and that
4539 is sufficient for the TB, I think. */
4540 Log(("illegal lock sequence %VGv (b=%#x)\n", pc_start, b));
4541 return true;
4542}
4543#endif /* VBOX */
4544
4545
4546/* convert one instruction. s->is_jmp is set if the translation must
4547 be stopped. Return the next pc value */
4548static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4549{
4550 int b, prefixes, aflag, dflag;
4551 int shift, ot;
4552 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4553 target_ulong next_eip, tval;
4554 int rex_w, rex_r;
4555
4556 if (unlikely(loglevel & CPU_LOG_TB_OP))
4557 tcg_gen_debug_insn_start(pc_start);
4558 s->pc = pc_start;
4559 prefixes = 0;
4560 aflag = s->code32;
4561 dflag = s->code32;
4562 s->override = -1;
4563 rex_w = -1;
4564 rex_r = 0;
4565#ifdef TARGET_X86_64
4566 s->rex_x = 0;
4567 s->rex_b = 0;
4568 x86_64_hregs = 0;
4569#endif
4570 s->rip_offset = 0; /* for relative ip address */
4571#ifdef VBOX
4572 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4573 gen_update_eip(pc_start - s->cs_base);
4574#endif
4575 next_byte:
4576 b = ldub_code(s->pc);
4577 s->pc++;
4578 /* check prefixes */
4579#ifdef TARGET_X86_64
4580 if (CODE64(s)) {
4581 switch (b) {
4582 case 0xf3:
4583 prefixes |= PREFIX_REPZ;
4584 goto next_byte;
4585 case 0xf2:
4586 prefixes |= PREFIX_REPNZ;
4587 goto next_byte;
4588 case 0xf0:
4589 prefixes |= PREFIX_LOCK;
4590 goto next_byte;
4591 case 0x2e:
4592 s->override = R_CS;
4593 goto next_byte;
4594 case 0x36:
4595 s->override = R_SS;
4596 goto next_byte;
4597 case 0x3e:
4598 s->override = R_DS;
4599 goto next_byte;
4600 case 0x26:
4601 s->override = R_ES;
4602 goto next_byte;
4603 case 0x64:
4604 s->override = R_FS;
4605 goto next_byte;
4606 case 0x65:
4607 s->override = R_GS;
4608 goto next_byte;
4609 case 0x66:
4610 prefixes |= PREFIX_DATA;
4611 goto next_byte;
4612 case 0x67:
4613 prefixes |= PREFIX_ADR;
4614 goto next_byte;
4615 case 0x40 ... 0x4f:
4616 /* REX prefix */
4617 rex_w = (b >> 3) & 1;
4618 rex_r = (b & 0x4) << 1;
4619 s->rex_x = (b & 0x2) << 2;
4620 REX_B(s) = (b & 0x1) << 3;
4621 x86_64_hregs = 1; /* select uniform byte register addressing */
4622 goto next_byte;
4623 }
4624 if (rex_w == 1) {
4625 /* 0x66 is ignored if rex.w is set */
4626 dflag = 2;
4627 } else {
4628 if (prefixes & PREFIX_DATA)
4629 dflag ^= 1;
4630 }
4631 if (!(prefixes & PREFIX_ADR))
4632 aflag = 2;
4633 } else
4634#endif
4635 {
4636 switch (b) {
4637 case 0xf3:
4638 prefixes |= PREFIX_REPZ;
4639 goto next_byte;
4640 case 0xf2:
4641 prefixes |= PREFIX_REPNZ;
4642 goto next_byte;
4643 case 0xf0:
4644 prefixes |= PREFIX_LOCK;
4645 goto next_byte;
4646 case 0x2e:
4647 s->override = R_CS;
4648 goto next_byte;
4649 case 0x36:
4650 s->override = R_SS;
4651 goto next_byte;
4652 case 0x3e:
4653 s->override = R_DS;
4654 goto next_byte;
4655 case 0x26:
4656 s->override = R_ES;
4657 goto next_byte;
4658 case 0x64:
4659 s->override = R_FS;
4660 goto next_byte;
4661 case 0x65:
4662 s->override = R_GS;
4663 goto next_byte;
4664 case 0x66:
4665 prefixes |= PREFIX_DATA;
4666 goto next_byte;
4667 case 0x67:
4668 prefixes |= PREFIX_ADR;
4669 goto next_byte;
4670 }
4671 if (prefixes & PREFIX_DATA)
4672 dflag ^= 1;
4673 if (prefixes & PREFIX_ADR)
4674 aflag ^= 1;
4675 }
4676
4677 s->prefix = prefixes;
4678 s->aflag = aflag;
4679 s->dflag = dflag;
4680
4681 /* lock generation */
4682#ifndef VBOX
4683 if (prefixes & PREFIX_LOCK)
4684 tcg_gen_helper_0_0(helper_lock);
4685#else /* VBOX */
4686 if (prefixes & PREFIX_LOCK) {
4687 if (is_invalid_lock_sequence(s, pc_start, b)) {
4688 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4689 return s->pc;
4690 }
4691 tcg_gen_helper_0_0(helper_lock);
4692 }
4693#endif /* VBOX */
4694
4695 /* now check op code */
4696 reswitch:
4697 switch(b) {
4698 case 0x0f:
4699 /**************************/
4700 /* extended op code */
4701 b = ldub_code(s->pc++) | 0x100;
4702 goto reswitch;
4703
4704 /**************************/
4705 /* arith & logic */
4706 case 0x00 ... 0x05:
4707 case 0x08 ... 0x0d:
4708 case 0x10 ... 0x15:
4709 case 0x18 ... 0x1d:
4710 case 0x20 ... 0x25:
4711 case 0x28 ... 0x2d:
4712 case 0x30 ... 0x35:
4713 case 0x38 ... 0x3d:
4714 {
4715 int op, f, val;
4716 op = (b >> 3) & 7;
4717 f = (b >> 1) & 3;
4718
4719 if ((b & 1) == 0)
4720 ot = OT_BYTE;
4721 else
4722 ot = dflag + OT_WORD;
4723
4724 switch(f) {
4725 case 0: /* OP Ev, Gv */
4726 modrm = ldub_code(s->pc++);
4727 reg = ((modrm >> 3) & 7) | rex_r;
4728 mod = (modrm >> 6) & 3;
4729 rm = (modrm & 7) | REX_B(s);
4730 if (mod != 3) {
4731 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4732 opreg = OR_TMP0;
4733 } else if (op == OP_XORL && rm == reg) {
4734 xor_zero:
4735 /* xor reg, reg optimisation */
4736 gen_op_movl_T0_0();
4737 s->cc_op = CC_OP_LOGICB + ot;
4738 gen_op_mov_reg_T0(ot, reg);
4739 gen_op_update1_cc();
4740 break;
4741 } else {
4742 opreg = rm;
4743 }
4744 gen_op_mov_TN_reg(ot, 1, reg);
4745 gen_op(s, op, ot, opreg);
4746 break;
4747 case 1: /* OP Gv, Ev */
4748 modrm = ldub_code(s->pc++);
4749 mod = (modrm >> 6) & 3;
4750 reg = ((modrm >> 3) & 7) | rex_r;
4751 rm = (modrm & 7) | REX_B(s);
4752 if (mod != 3) {
4753 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4754 gen_op_ld_T1_A0(ot + s->mem_index);
4755 } else if (op == OP_XORL && rm == reg) {
4756 goto xor_zero;
4757 } else {
4758 gen_op_mov_TN_reg(ot, 1, rm);
4759 }
4760 gen_op(s, op, ot, reg);
4761 break;
4762 case 2: /* OP A, Iv */
4763 val = insn_get(s, ot);
4764 gen_op_movl_T1_im(val);
4765 gen_op(s, op, ot, OR_EAX);
4766 break;
4767 }
4768 }
4769 break;
4770
4771 case 0x82:
4772 if (CODE64(s))
4773 goto illegal_op;
4774 case 0x80: /* GRP1 */
4775 case 0x81:
4776 case 0x83:
4777 {
4778 int val;
4779
4780 if ((b & 1) == 0)
4781 ot = OT_BYTE;
4782 else
4783 ot = dflag + OT_WORD;
4784
4785 modrm = ldub_code(s->pc++);
4786 mod = (modrm >> 6) & 3;
4787 rm = (modrm & 7) | REX_B(s);
4788 op = (modrm >> 3) & 7;
4789
4790 if (mod != 3) {
4791 if (b == 0x83)
4792 s->rip_offset = 1;
4793 else
4794 s->rip_offset = insn_const_size(ot);
4795 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4796 opreg = OR_TMP0;
4797 } else {
4798 opreg = rm;
4799 }
4800
4801 switch(b) {
4802 default:
4803 case 0x80:
4804 case 0x81:
4805 case 0x82:
4806 val = insn_get(s, ot);
4807 break;
4808 case 0x83:
4809 val = (int8_t)insn_get(s, OT_BYTE);
4810 break;
4811 }
4812 gen_op_movl_T1_im(val);
4813 gen_op(s, op, ot, opreg);
4814 }
4815 break;
4816
4817 /**************************/
4818 /* inc, dec, and other misc arith */
4819 case 0x40 ... 0x47: /* inc Gv */
4820 ot = dflag ? OT_LONG : OT_WORD;
4821 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4822 break;
4823 case 0x48 ... 0x4f: /* dec Gv */
4824 ot = dflag ? OT_LONG : OT_WORD;
4825 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4826 break;
4827 case 0xf6: /* GRP3 */
4828 case 0xf7:
4829 if ((b & 1) == 0)
4830 ot = OT_BYTE;
4831 else
4832 ot = dflag + OT_WORD;
4833
4834 modrm = ldub_code(s->pc++);
4835 mod = (modrm >> 6) & 3;
4836 rm = (modrm & 7) | REX_B(s);
4837 op = (modrm >> 3) & 7;
4838 if (mod != 3) {
4839 if (op == 0)
4840 s->rip_offset = insn_const_size(ot);
4841 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4842 gen_op_ld_T0_A0(ot + s->mem_index);
4843 } else {
4844 gen_op_mov_TN_reg(ot, 0, rm);
4845 }
4846
4847 switch(op) {
4848 case 0: /* test */
4849 val = insn_get(s, ot);
4850 gen_op_movl_T1_im(val);
4851 gen_op_testl_T0_T1_cc();
4852 s->cc_op = CC_OP_LOGICB + ot;
4853 break;
4854 case 2: /* not */
4855 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4856 if (mod != 3) {
4857 gen_op_st_T0_A0(ot + s->mem_index);
4858 } else {
4859 gen_op_mov_reg_T0(ot, rm);
4860 }
4861 break;
4862 case 3: /* neg */
4863 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4864 if (mod != 3) {
4865 gen_op_st_T0_A0(ot + s->mem_index);
4866 } else {
4867 gen_op_mov_reg_T0(ot, rm);
4868 }
4869 gen_op_update_neg_cc();
4870 s->cc_op = CC_OP_SUBB + ot;
4871 break;
4872 case 4: /* mul */
4873 switch(ot) {
4874 case OT_BYTE:
4875 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4876 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4877 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4878 /* XXX: use 32 bit mul which could be faster */
4879 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4880 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4881 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4882 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4883 s->cc_op = CC_OP_MULB;
4884 break;
4885 case OT_WORD:
4886 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4887 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4888 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4889 /* XXX: use 32 bit mul which could be faster */
4890 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4891 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4892 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4893 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4894 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4895 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4896 s->cc_op = CC_OP_MULW;
4897 break;
4898 default:
4899 case OT_LONG:
4900#ifdef TARGET_X86_64
4901 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4902 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4903 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4904 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4905 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4906 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4907 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4908 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4909 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4910#else
4911 {
4912 TCGv t0, t1;
4913 t0 = tcg_temp_new(TCG_TYPE_I64);
4914 t1 = tcg_temp_new(TCG_TYPE_I64);
4915 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4916 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4917 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4918 tcg_gen_mul_i64(t0, t0, t1);
4919 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4920 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4921 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4922 tcg_gen_shri_i64(t0, t0, 32);
4923 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4924 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4925 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4926 }
4927#endif
4928 s->cc_op = CC_OP_MULL;
4929 break;
4930#ifdef TARGET_X86_64
4931 case OT_QUAD:
4932 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
4933 s->cc_op = CC_OP_MULQ;
4934 break;
4935#endif
4936 }
4937 break;
4938 case 5: /* imul */
4939 switch(ot) {
4940 case OT_BYTE:
4941 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4942 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4943 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4944 /* XXX: use 32 bit mul which could be faster */
4945 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4946 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4947 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4948 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4949 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4950 s->cc_op = CC_OP_MULB;
4951 break;
4952 case OT_WORD:
4953 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4954 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4955 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4956 /* XXX: use 32 bit mul which could be faster */
4957 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4958 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4959 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4960 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4961 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4962 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4963 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4964 s->cc_op = CC_OP_MULW;
4965 break;
4966 default:
4967 case OT_LONG:
4968#ifdef TARGET_X86_64
4969 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4970 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4971 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4972 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4973 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4974 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4975 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4976 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4977 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4978 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4979#else
4980 {
4981 TCGv t0, t1;
4982 t0 = tcg_temp_new(TCG_TYPE_I64);
4983 t1 = tcg_temp_new(TCG_TYPE_I64);
4984 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4985 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4986 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4987 tcg_gen_mul_i64(t0, t0, t1);
4988 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4989 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4990 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4991 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4992 tcg_gen_shri_i64(t0, t0, 32);
4993 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4994 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4995 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4996 }
4997#endif
4998 s->cc_op = CC_OP_MULL;
4999 break;
5000#ifdef TARGET_X86_64
5001 case OT_QUAD:
5002 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5003 s->cc_op = CC_OP_MULQ;
5004 break;
5005#endif
5006 }
5007 break;
5008 case 6: /* div */
5009 switch(ot) {
5010 case OT_BYTE:
5011 gen_jmp_im(pc_start - s->cs_base);
5012 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5013 break;
5014 case OT_WORD:
5015 gen_jmp_im(pc_start - s->cs_base);
5016 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5017 break;
5018 default:
5019 case OT_LONG:
5020 gen_jmp_im(pc_start - s->cs_base);
5021 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5022 break;
5023#ifdef TARGET_X86_64
5024 case OT_QUAD:
5025 gen_jmp_im(pc_start - s->cs_base);
5026 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5027 break;
5028#endif
5029 }
5030 break;
5031 case 7: /* idiv */
5032 switch(ot) {
5033 case OT_BYTE:
5034 gen_jmp_im(pc_start - s->cs_base);
5035 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5036 break;
5037 case OT_WORD:
5038 gen_jmp_im(pc_start - s->cs_base);
5039 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5040 break;
5041 default:
5042 case OT_LONG:
5043 gen_jmp_im(pc_start - s->cs_base);
5044 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5045 break;
5046#ifdef TARGET_X86_64
5047 case OT_QUAD:
5048 gen_jmp_im(pc_start - s->cs_base);
5049 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5050 break;
5051#endif
5052 }
5053 break;
5054 default:
5055 goto illegal_op;
5056 }
5057 break;
5058
5059 case 0xfe: /* GRP4 */
5060 case 0xff: /* GRP5 */
5061 if ((b & 1) == 0)
5062 ot = OT_BYTE;
5063 else
5064 ot = dflag + OT_WORD;
5065
5066 modrm = ldub_code(s->pc++);
5067 mod = (modrm >> 6) & 3;
5068 rm = (modrm & 7) | REX_B(s);
5069 op = (modrm >> 3) & 7;
5070 if (op >= 2 && b == 0xfe) {
5071 goto illegal_op;
5072 }
5073 if (CODE64(s)) {
5074 if (op == 2 || op == 4) {
5075 /* operand size for jumps is 64 bit */
5076 ot = OT_QUAD;
5077 } else if (op == 3 || op == 5) {
5078 /* for call calls, the operand is 16 or 32 bit, even
5079 in long mode */
5080 ot = dflag ? OT_LONG : OT_WORD;
5081 } else if (op == 6) {
5082 /* default push size is 64 bit */
5083 ot = dflag ? OT_QUAD : OT_WORD;
5084 }
5085 }
5086 if (mod != 3) {
5087 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5088 if (op >= 2 && op != 3 && op != 5)
5089 gen_op_ld_T0_A0(ot + s->mem_index);
5090 } else {
5091 gen_op_mov_TN_reg(ot, 0, rm);
5092 }
5093
5094 switch(op) {
5095 case 0: /* inc Ev */
5096 if (mod != 3)
5097 opreg = OR_TMP0;
5098 else
5099 opreg = rm;
5100 gen_inc(s, ot, opreg, 1);
5101 break;
5102 case 1: /* dec Ev */
5103 if (mod != 3)
5104 opreg = OR_TMP0;
5105 else
5106 opreg = rm;
5107 gen_inc(s, ot, opreg, -1);
5108 break;
5109 case 2: /* call Ev */
5110 /* XXX: optimize if memory (no 'and' is necessary) */
5111#ifdef VBOX_WITH_CALL_RECORD
5112 if (s->record_call)
5113 gen_op_record_call();
5114#endif
5115 if (s->dflag == 0)
5116 gen_op_andl_T0_ffff();
5117 next_eip = s->pc - s->cs_base;
5118 gen_movtl_T1_im(next_eip);
5119 gen_push_T1(s);
5120 gen_op_jmp_T0();
5121 gen_eob(s);
5122 break;
5123 case 3: /* lcall Ev */
5124 gen_op_ld_T1_A0(ot + s->mem_index);
5125 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5126 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5127 do_lcall:
5128 if (s->pe && !s->vm86) {
5129 if (s->cc_op != CC_OP_DYNAMIC)
5130 gen_op_set_cc_op(s->cc_op);
5131 gen_jmp_im(pc_start - s->cs_base);
5132 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5133 tcg_gen_helper_0_4(helper_lcall_protected,
5134 cpu_tmp2_i32, cpu_T[1],
5135 tcg_const_i32(dflag),
5136 tcg_const_i32(s->pc - pc_start));
5137 } else {
5138 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5139 tcg_gen_helper_0_4(helper_lcall_real,
5140 cpu_tmp2_i32, cpu_T[1],
5141 tcg_const_i32(dflag),
5142 tcg_const_i32(s->pc - s->cs_base));
5143 }
5144 gen_eob(s);
5145 break;
5146 case 4: /* jmp Ev */
5147 if (s->dflag == 0)
5148 gen_op_andl_T0_ffff();
5149 gen_op_jmp_T0();
5150 gen_eob(s);
5151 break;
5152 case 5: /* ljmp Ev */
5153 gen_op_ld_T1_A0(ot + s->mem_index);
5154 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5155 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5156 do_ljmp:
5157 if (s->pe && !s->vm86) {
5158 if (s->cc_op != CC_OP_DYNAMIC)
5159 gen_op_set_cc_op(s->cc_op);
5160 gen_jmp_im(pc_start - s->cs_base);
5161 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5162 tcg_gen_helper_0_3(helper_ljmp_protected,
5163 cpu_tmp2_i32,
5164 cpu_T[1],
5165 tcg_const_i32(s->pc - pc_start));
5166 } else {
5167 gen_op_movl_seg_T0_vm(R_CS);
5168 gen_op_movl_T0_T1();
5169 gen_op_jmp_T0();
5170 }
5171 gen_eob(s);
5172 break;
5173 case 6: /* push Ev */
5174 gen_push_T0(s);
5175 break;
5176 default:
5177 goto illegal_op;
5178 }
5179 break;
5180
5181 case 0x84: /* test Ev, Gv */
5182 case 0x85:
5183 if ((b & 1) == 0)
5184 ot = OT_BYTE;
5185 else
5186 ot = dflag + OT_WORD;
5187
5188 modrm = ldub_code(s->pc++);
5189 mod = (modrm >> 6) & 3;
5190 rm = (modrm & 7) | REX_B(s);
5191 reg = ((modrm >> 3) & 7) | rex_r;
5192
5193 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5194 gen_op_mov_TN_reg(ot, 1, reg);
5195 gen_op_testl_T0_T1_cc();
5196 s->cc_op = CC_OP_LOGICB + ot;
5197 break;
5198
5199 case 0xa8: /* test eAX, Iv */
5200 case 0xa9:
5201 if ((b & 1) == 0)
5202 ot = OT_BYTE;
5203 else
5204 ot = dflag + OT_WORD;
5205 val = insn_get(s, ot);
5206
5207 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5208 gen_op_movl_T1_im(val);
5209 gen_op_testl_T0_T1_cc();
5210 s->cc_op = CC_OP_LOGICB + ot;
5211 break;
5212
5213 case 0x98: /* CWDE/CBW */
5214#ifdef TARGET_X86_64
5215 if (dflag == 2) {
5216 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5217 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5218 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5219 } else
5220#endif
5221 if (dflag == 1) {
5222 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5223 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5224 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5225 } else {
5226 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5227 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5228 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5229 }
5230 break;
5231 case 0x99: /* CDQ/CWD */
5232#ifdef TARGET_X86_64
5233 if (dflag == 2) {
5234 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5235 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5236 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5237 } else
5238#endif
5239 if (dflag == 1) {
5240 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5241 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5242 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5243 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5244 } else {
5245 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5246 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5247 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5248 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5249 }
5250 break;
5251 case 0x1af: /* imul Gv, Ev */
5252 case 0x69: /* imul Gv, Ev, I */
5253 case 0x6b:
5254 ot = dflag + OT_WORD;
5255 modrm = ldub_code(s->pc++);
5256 reg = ((modrm >> 3) & 7) | rex_r;
5257 if (b == 0x69)
5258 s->rip_offset = insn_const_size(ot);
5259 else if (b == 0x6b)
5260 s->rip_offset = 1;
5261 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5262 if (b == 0x69) {
5263 val = insn_get(s, ot);
5264 gen_op_movl_T1_im(val);
5265 } else if (b == 0x6b) {
5266 val = (int8_t)insn_get(s, OT_BYTE);
5267 gen_op_movl_T1_im(val);
5268 } else {
5269 gen_op_mov_TN_reg(ot, 1, reg);
5270 }
5271
5272#ifdef TARGET_X86_64
5273 if (ot == OT_QUAD) {
5274 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5275 } else
5276#endif
5277 if (ot == OT_LONG) {
5278#ifdef TARGET_X86_64
5279 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5280 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5281 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5282 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5283 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5284 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5285#else
5286 {
5287 TCGv t0, t1;
5288 t0 = tcg_temp_new(TCG_TYPE_I64);
5289 t1 = tcg_temp_new(TCG_TYPE_I64);
5290 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5291 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5292 tcg_gen_mul_i64(t0, t0, t1);
5293 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5294 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5295 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5296 tcg_gen_shri_i64(t0, t0, 32);
5297 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5298 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5299 }
5300#endif
5301 } else {
5302 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5303 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5304 /* XXX: use 32 bit mul which could be faster */
5305 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5306 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5307 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5308 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5309 }
5310 gen_op_mov_reg_T0(ot, reg);
5311 s->cc_op = CC_OP_MULB + ot;
5312 break;
5313 case 0x1c0:
5314 case 0x1c1: /* xadd Ev, Gv */
5315 if ((b & 1) == 0)
5316 ot = OT_BYTE;
5317 else
5318 ot = dflag + OT_WORD;
5319 modrm = ldub_code(s->pc++);
5320 reg = ((modrm >> 3) & 7) | rex_r;
5321 mod = (modrm >> 6) & 3;
5322 if (mod == 3) {
5323 rm = (modrm & 7) | REX_B(s);
5324 gen_op_mov_TN_reg(ot, 0, reg);
5325 gen_op_mov_TN_reg(ot, 1, rm);
5326 gen_op_addl_T0_T1();
5327 gen_op_mov_reg_T1(ot, reg);
5328 gen_op_mov_reg_T0(ot, rm);
5329 } else {
5330 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5331 gen_op_mov_TN_reg(ot, 0, reg);
5332 gen_op_ld_T1_A0(ot + s->mem_index);
5333 gen_op_addl_T0_T1();
5334 gen_op_st_T0_A0(ot + s->mem_index);
5335 gen_op_mov_reg_T1(ot, reg);
5336 }
5337 gen_op_update2_cc();
5338 s->cc_op = CC_OP_ADDB + ot;
5339 break;
5340 case 0x1b0:
5341 case 0x1b1: /* cmpxchg Ev, Gv */
5342 {
5343 int label1, label2;
5344 TCGv t0, t1, t2, a0;
5345
5346 if ((b & 1) == 0)
5347 ot = OT_BYTE;
5348 else
5349 ot = dflag + OT_WORD;
5350 modrm = ldub_code(s->pc++);
5351 reg = ((modrm >> 3) & 7) | rex_r;
5352 mod = (modrm >> 6) & 3;
5353 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5354 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5355 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5356 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5357 gen_op_mov_v_reg(ot, t1, reg);
5358 if (mod == 3) {
5359 rm = (modrm & 7) | REX_B(s);
5360 gen_op_mov_v_reg(ot, t0, rm);
5361 } else {
5362 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5363 tcg_gen_mov_tl(a0, cpu_A0);
5364 gen_op_ld_v(ot + s->mem_index, t0, a0);
5365 rm = 0; /* avoid warning */
5366 }
5367 label1 = gen_new_label();
5368 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5369 tcg_gen_sub_tl(t2, t2, t0);
5370 gen_extu(ot, t2);
5371 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5372 if (mod == 3) {
5373 label2 = gen_new_label();
5374 gen_op_mov_reg_v(ot, R_EAX, t0);
5375 tcg_gen_br(label2);
5376 gen_set_label(label1);
5377 gen_op_mov_reg_v(ot, rm, t1);
5378 gen_set_label(label2);
5379 } else {
5380 tcg_gen_mov_tl(t1, t0);
5381 gen_op_mov_reg_v(ot, R_EAX, t0);
5382 gen_set_label(label1);
5383 /* always store */
5384 gen_op_st_v(ot + s->mem_index, t1, a0);
5385 }
5386 tcg_gen_mov_tl(cpu_cc_src, t0);
5387 tcg_gen_mov_tl(cpu_cc_dst, t2);
5388 s->cc_op = CC_OP_SUBB + ot;
5389 tcg_temp_free(t0);
5390 tcg_temp_free(t1);
5391 tcg_temp_free(t2);
5392 tcg_temp_free(a0);
5393 }
5394 break;
5395 case 0x1c7: /* cmpxchg8b */
5396 modrm = ldub_code(s->pc++);
5397 mod = (modrm >> 6) & 3;
5398 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5399 goto illegal_op;
5400#ifdef TARGET_X86_64
5401 if (dflag == 2) {
5402 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5403 goto illegal_op;
5404 gen_jmp_im(pc_start - s->cs_base);
5405 if (s->cc_op != CC_OP_DYNAMIC)
5406 gen_op_set_cc_op(s->cc_op);
5407 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5408 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5409 } else
5410#endif
5411 {
5412 if (!(s->cpuid_features & CPUID_CX8))
5413 goto illegal_op;
5414 gen_jmp_im(pc_start - s->cs_base);
5415 if (s->cc_op != CC_OP_DYNAMIC)
5416 gen_op_set_cc_op(s->cc_op);
5417 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5418 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5419 }
5420 s->cc_op = CC_OP_EFLAGS;
5421 break;
5422
5423 /**************************/
5424 /* push/pop */
5425 case 0x50 ... 0x57: /* push */
5426 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5427 gen_push_T0(s);
5428 break;
5429 case 0x58 ... 0x5f: /* pop */
5430 if (CODE64(s)) {
5431 ot = dflag ? OT_QUAD : OT_WORD;
5432 } else {
5433 ot = dflag + OT_WORD;
5434 }
5435 gen_pop_T0(s);
5436 /* NOTE: order is important for pop %sp */
5437 gen_pop_update(s);
5438 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5439 break;
5440 case 0x60: /* pusha */
5441 if (CODE64(s))
5442 goto illegal_op;
5443 gen_pusha(s);
5444 break;
5445 case 0x61: /* popa */
5446 if (CODE64(s))
5447 goto illegal_op;
5448 gen_popa(s);
5449 break;
5450 case 0x68: /* push Iv */
5451 case 0x6a:
5452 if (CODE64(s)) {
5453 ot = dflag ? OT_QUAD : OT_WORD;
5454 } else {
5455 ot = dflag + OT_WORD;
5456 }
5457 if (b == 0x68)
5458 val = insn_get(s, ot);
5459 else
5460 val = (int8_t)insn_get(s, OT_BYTE);
5461 gen_op_movl_T0_im(val);
5462 gen_push_T0(s);
5463 break;
5464 case 0x8f: /* pop Ev */
5465 if (CODE64(s)) {
5466 ot = dflag ? OT_QUAD : OT_WORD;
5467 } else {
5468 ot = dflag + OT_WORD;
5469 }
5470 modrm = ldub_code(s->pc++);
5471 mod = (modrm >> 6) & 3;
5472 gen_pop_T0(s);
5473 if (mod == 3) {
5474 /* NOTE: order is important for pop %sp */
5475 gen_pop_update(s);
5476 rm = (modrm & 7) | REX_B(s);
5477 gen_op_mov_reg_T0(ot, rm);
5478 } else {
5479 /* NOTE: order is important too for MMU exceptions */
5480 s->popl_esp_hack = 1 << ot;
5481 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5482 s->popl_esp_hack = 0;
5483 gen_pop_update(s);
5484 }
5485 break;
5486 case 0xc8: /* enter */
5487 {
5488 int level;
5489 val = lduw_code(s->pc);
5490 s->pc += 2;
5491 level = ldub_code(s->pc++);
5492 gen_enter(s, val, level);
5493 }
5494 break;
5495 case 0xc9: /* leave */
5496 /* XXX: exception not precise (ESP is updated before potential exception) */
5497 if (CODE64(s)) {
5498 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5499 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5500 } else if (s->ss32) {
5501 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5502 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5503 } else {
5504 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5505 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5506 }
5507 gen_pop_T0(s);
5508 if (CODE64(s)) {
5509 ot = dflag ? OT_QUAD : OT_WORD;
5510 } else {
5511 ot = dflag + OT_WORD;
5512 }
5513 gen_op_mov_reg_T0(ot, R_EBP);
5514 gen_pop_update(s);
5515 break;
5516 case 0x06: /* push es */
5517 case 0x0e: /* push cs */
5518 case 0x16: /* push ss */
5519 case 0x1e: /* push ds */
5520 if (CODE64(s))
5521 goto illegal_op;
5522 gen_op_movl_T0_seg(b >> 3);
5523 gen_push_T0(s);
5524 break;
5525 case 0x1a0: /* push fs */
5526 case 0x1a8: /* push gs */
5527 gen_op_movl_T0_seg((b >> 3) & 7);
5528 gen_push_T0(s);
5529 break;
5530 case 0x07: /* pop es */
5531 case 0x17: /* pop ss */
5532 case 0x1f: /* pop ds */
5533 if (CODE64(s))
5534 goto illegal_op;
5535 reg = b >> 3;
5536 gen_pop_T0(s);
5537 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5538 gen_pop_update(s);
5539 if (reg == R_SS) {
5540 /* if reg == SS, inhibit interrupts/trace. */
5541 /* If several instructions disable interrupts, only the
5542 _first_ does it */
5543 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5544 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5545 s->tf = 0;
5546 }
5547 if (s->is_jmp) {
5548 gen_jmp_im(s->pc - s->cs_base);
5549 gen_eob(s);
5550 }
5551 break;
5552 case 0x1a1: /* pop fs */
5553 case 0x1a9: /* pop gs */
5554 gen_pop_T0(s);
5555 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5556 gen_pop_update(s);
5557 if (s->is_jmp) {
5558 gen_jmp_im(s->pc - s->cs_base);
5559 gen_eob(s);
5560 }
5561 break;
5562
5563 /**************************/
5564 /* mov */
5565 case 0x88:
5566 case 0x89: /* mov Gv, Ev */
5567 if ((b & 1) == 0)
5568 ot = OT_BYTE;
5569 else
5570 ot = dflag + OT_WORD;
5571 modrm = ldub_code(s->pc++);
5572 reg = ((modrm >> 3) & 7) | rex_r;
5573
5574 /* generate a generic store */
5575 gen_ldst_modrm(s, modrm, ot, reg, 1);
5576 break;
5577 case 0xc6:
5578 case 0xc7: /* mov Ev, Iv */
5579 if ((b & 1) == 0)
5580 ot = OT_BYTE;
5581 else
5582 ot = dflag + OT_WORD;
5583 modrm = ldub_code(s->pc++);
5584 mod = (modrm >> 6) & 3;
5585 if (mod != 3) {
5586 s->rip_offset = insn_const_size(ot);
5587 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5588 }
5589 val = insn_get(s, ot);
5590 gen_op_movl_T0_im(val);
5591 if (mod != 3)
5592 gen_op_st_T0_A0(ot + s->mem_index);
5593 else
5594 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5595 break;
5596 case 0x8a:
5597 case 0x8b: /* mov Ev, Gv */
5598#ifdef VBOX /* dtrace hot fix */
5599 if (prefixes & PREFIX_LOCK)
5600 goto illegal_op;
5601#endif
5602 if ((b & 1) == 0)
5603 ot = OT_BYTE;
5604 else
5605 ot = OT_WORD + dflag;
5606 modrm = ldub_code(s->pc++);
5607 reg = ((modrm >> 3) & 7) | rex_r;
5608
5609 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5610 gen_op_mov_reg_T0(ot, reg);
5611 break;
5612 case 0x8e: /* mov seg, Gv */
5613 modrm = ldub_code(s->pc++);
5614 reg = (modrm >> 3) & 7;
5615 if (reg >= 6 || reg == R_CS)
5616 goto illegal_op;
5617 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5618 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5619 if (reg == R_SS) {
5620 /* if reg == SS, inhibit interrupts/trace */
5621 /* If several instructions disable interrupts, only the
5622 _first_ does it */
5623 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5624 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5625 s->tf = 0;
5626 }
5627 if (s->is_jmp) {
5628 gen_jmp_im(s->pc - s->cs_base);
5629 gen_eob(s);
5630 }
5631 break;
5632 case 0x8c: /* mov Gv, seg */
5633 modrm = ldub_code(s->pc++);
5634 reg = (modrm >> 3) & 7;
5635 mod = (modrm >> 6) & 3;
5636 if (reg >= 6)
5637 goto illegal_op;
5638 gen_op_movl_T0_seg(reg);
5639 if (mod == 3)
5640 ot = OT_WORD + dflag;
5641 else
5642 ot = OT_WORD;
5643 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5644 break;
5645
5646 case 0x1b6: /* movzbS Gv, Eb */
5647 case 0x1b7: /* movzwS Gv, Eb */
5648 case 0x1be: /* movsbS Gv, Eb */
5649 case 0x1bf: /* movswS Gv, Eb */
5650 {
5651 int d_ot;
5652 /* d_ot is the size of destination */
5653 d_ot = dflag + OT_WORD;
5654 /* ot is the size of source */
5655 ot = (b & 1) + OT_BYTE;
5656 modrm = ldub_code(s->pc++);
5657 reg = ((modrm >> 3) & 7) | rex_r;
5658 mod = (modrm >> 6) & 3;
5659 rm = (modrm & 7) | REX_B(s);
5660
5661 if (mod == 3) {
5662 gen_op_mov_TN_reg(ot, 0, rm);
5663 switch(ot | (b & 8)) {
5664 case OT_BYTE:
5665 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5666 break;
5667 case OT_BYTE | 8:
5668 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5669 break;
5670 case OT_WORD:
5671 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5672 break;
5673 default:
5674 case OT_WORD | 8:
5675 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5676 break;
5677 }
5678 gen_op_mov_reg_T0(d_ot, reg);
5679 } else {
5680 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5681 if (b & 8) {
5682 gen_op_lds_T0_A0(ot + s->mem_index);
5683 } else {
5684 gen_op_ldu_T0_A0(ot + s->mem_index);
5685 }
5686 gen_op_mov_reg_T0(d_ot, reg);
5687 }
5688 }
5689 break;
5690
5691 case 0x8d: /* lea */
5692 ot = dflag + OT_WORD;
5693 modrm = ldub_code(s->pc++);
5694 mod = (modrm >> 6) & 3;
5695 if (mod == 3)
5696 goto illegal_op;
5697 reg = ((modrm >> 3) & 7) | rex_r;
5698 /* we must ensure that no segment is added */
5699 s->override = -1;
5700 val = s->addseg;
5701 s->addseg = 0;
5702 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5703 s->addseg = val;
5704 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5705 break;
5706
5707 case 0xa0: /* mov EAX, Ov */
5708 case 0xa1:
5709 case 0xa2: /* mov Ov, EAX */
5710 case 0xa3:
5711 {
5712 target_ulong offset_addr;
5713
5714 if ((b & 1) == 0)
5715 ot = OT_BYTE;
5716 else
5717 ot = dflag + OT_WORD;
5718#ifdef TARGET_X86_64
5719 if (s->aflag == 2) {
5720 offset_addr = ldq_code(s->pc);
5721 s->pc += 8;
5722 gen_op_movq_A0_im(offset_addr);
5723 } else
5724#endif
5725 {
5726 if (s->aflag) {
5727 offset_addr = insn_get(s, OT_LONG);
5728 } else {
5729 offset_addr = insn_get(s, OT_WORD);
5730 }
5731 gen_op_movl_A0_im(offset_addr);
5732 }
5733 gen_add_A0_ds_seg(s);
5734 if ((b & 2) == 0) {
5735 gen_op_ld_T0_A0(ot + s->mem_index);
5736 gen_op_mov_reg_T0(ot, R_EAX);
5737 } else {
5738 gen_op_mov_TN_reg(ot, 0, R_EAX);
5739 gen_op_st_T0_A0(ot + s->mem_index);
5740 }
5741 }
5742 break;
5743 case 0xd7: /* xlat */
5744#ifdef TARGET_X86_64
5745 if (s->aflag == 2) {
5746 gen_op_movq_A0_reg(R_EBX);
5747 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5748 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5749 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5750 } else
5751#endif
5752 {
5753 gen_op_movl_A0_reg(R_EBX);
5754 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5755 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5756 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5757 if (s->aflag == 0)
5758 gen_op_andl_A0_ffff();
5759 else
5760 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5761 }
5762 gen_add_A0_ds_seg(s);
5763 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5764 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5765 break;
5766 case 0xb0 ... 0xb7: /* mov R, Ib */
5767 val = insn_get(s, OT_BYTE);
5768 gen_op_movl_T0_im(val);
5769 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5770 break;
5771 case 0xb8 ... 0xbf: /* mov R, Iv */
5772#ifdef TARGET_X86_64
5773 if (dflag == 2) {
5774 uint64_t tmp;
5775 /* 64 bit case */
5776 tmp = ldq_code(s->pc);
5777 s->pc += 8;
5778 reg = (b & 7) | REX_B(s);
5779 gen_movtl_T0_im(tmp);
5780 gen_op_mov_reg_T0(OT_QUAD, reg);
5781 } else
5782#endif
5783 {
5784 ot = dflag ? OT_LONG : OT_WORD;
5785 val = insn_get(s, ot);
5786 reg = (b & 7) | REX_B(s);
5787 gen_op_movl_T0_im(val);
5788 gen_op_mov_reg_T0(ot, reg);
5789 }
5790 break;
5791
5792 case 0x91 ... 0x97: /* xchg R, EAX */
5793 ot = dflag + OT_WORD;
5794 reg = (b & 7) | REX_B(s);
5795 rm = R_EAX;
5796 goto do_xchg_reg;
5797 case 0x86:
5798 case 0x87: /* xchg Ev, Gv */
5799 if ((b & 1) == 0)
5800 ot = OT_BYTE;
5801 else
5802 ot = dflag + OT_WORD;
5803 modrm = ldub_code(s->pc++);
5804 reg = ((modrm >> 3) & 7) | rex_r;
5805 mod = (modrm >> 6) & 3;
5806 if (mod == 3) {
5807 rm = (modrm & 7) | REX_B(s);
5808 do_xchg_reg:
5809 gen_op_mov_TN_reg(ot, 0, reg);
5810 gen_op_mov_TN_reg(ot, 1, rm);
5811 gen_op_mov_reg_T0(ot, rm);
5812 gen_op_mov_reg_T1(ot, reg);
5813 } else {
5814 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5815 gen_op_mov_TN_reg(ot, 0, reg);
5816 /* for xchg, lock is implicit */
5817 if (!(prefixes & PREFIX_LOCK))
5818 tcg_gen_helper_0_0(helper_lock);
5819 gen_op_ld_T1_A0(ot + s->mem_index);
5820 gen_op_st_T0_A0(ot + s->mem_index);
5821 if (!(prefixes & PREFIX_LOCK))
5822 tcg_gen_helper_0_0(helper_unlock);
5823 gen_op_mov_reg_T1(ot, reg);
5824 }
5825 break;
5826 case 0xc4: /* les Gv */
5827 if (CODE64(s))
5828 goto illegal_op;
5829 op = R_ES;
5830 goto do_lxx;
5831 case 0xc5: /* lds Gv */
5832 if (CODE64(s))
5833 goto illegal_op;
5834 op = R_DS;
5835 goto do_lxx;
5836 case 0x1b2: /* lss Gv */
5837 op = R_SS;
5838 goto do_lxx;
5839 case 0x1b4: /* lfs Gv */
5840 op = R_FS;
5841 goto do_lxx;
5842 case 0x1b5: /* lgs Gv */
5843 op = R_GS;
5844 do_lxx:
5845 ot = dflag ? OT_LONG : OT_WORD;
5846 modrm = ldub_code(s->pc++);
5847 reg = ((modrm >> 3) & 7) | rex_r;
5848 mod = (modrm >> 6) & 3;
5849 if (mod == 3)
5850 goto illegal_op;
5851 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5852 gen_op_ld_T1_A0(ot + s->mem_index);
5853 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5854 /* load the segment first to handle exceptions properly */
5855 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5856 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5857 /* then put the data */
5858 gen_op_mov_reg_T1(ot, reg);
5859 if (s->is_jmp) {
5860 gen_jmp_im(s->pc - s->cs_base);
5861 gen_eob(s);
5862 }
5863 break;
5864
5865 /************************/
5866 /* shifts */
5867 case 0xc0:
5868 case 0xc1:
5869 /* shift Ev,Ib */
5870 shift = 2;
5871 grp2:
5872 {
5873 if ((b & 1) == 0)
5874 ot = OT_BYTE;
5875 else
5876 ot = dflag + OT_WORD;
5877
5878 modrm = ldub_code(s->pc++);
5879 mod = (modrm >> 6) & 3;
5880 op = (modrm >> 3) & 7;
5881
5882 if (mod != 3) {
5883 if (shift == 2) {
5884 s->rip_offset = 1;
5885 }
5886 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5887 opreg = OR_TMP0;
5888 } else {
5889 opreg = (modrm & 7) | REX_B(s);
5890 }
5891
5892 /* simpler op */
5893 if (shift == 0) {
5894 gen_shift(s, op, ot, opreg, OR_ECX);
5895 } else {
5896 if (shift == 2) {
5897 shift = ldub_code(s->pc++);
5898 }
5899 gen_shifti(s, op, ot, opreg, shift);
5900 }
5901 }
5902 break;
5903 case 0xd0:
5904 case 0xd1:
5905 /* shift Ev,1 */
5906 shift = 1;
5907 goto grp2;
5908 case 0xd2:
5909 case 0xd3:
5910 /* shift Ev,cl */
5911 shift = 0;
5912 goto grp2;
5913
5914 case 0x1a4: /* shld imm */
5915 op = 0;
5916 shift = 1;
5917 goto do_shiftd;
5918 case 0x1a5: /* shld cl */
5919 op = 0;
5920 shift = 0;
5921 goto do_shiftd;
5922 case 0x1ac: /* shrd imm */
5923 op = 1;
5924 shift = 1;
5925 goto do_shiftd;
5926 case 0x1ad: /* shrd cl */
5927 op = 1;
5928 shift = 0;
5929 do_shiftd:
5930 ot = dflag + OT_WORD;
5931 modrm = ldub_code(s->pc++);
5932 mod = (modrm >> 6) & 3;
5933 rm = (modrm & 7) | REX_B(s);
5934 reg = ((modrm >> 3) & 7) | rex_r;
5935 if (mod != 3) {
5936 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5937 opreg = OR_TMP0;
5938 } else {
5939 opreg = rm;
5940 }
5941 gen_op_mov_TN_reg(ot, 1, reg);
5942
5943 if (shift) {
5944 val = ldub_code(s->pc++);
5945 tcg_gen_movi_tl(cpu_T3, val);
5946 } else {
5947 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
5948 }
5949 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5950 break;
5951
5952 /************************/
5953 /* floats */
5954 case 0xd8 ... 0xdf:
5955 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5956 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5957 /* XXX: what to do if illegal op ? */
5958 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5959 break;
5960 }
5961 modrm = ldub_code(s->pc++);
5962 mod = (modrm >> 6) & 3;
5963 rm = modrm & 7;
5964 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5965 if (mod != 3) {
5966 /* memory op */
5967 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5968 switch(op) {
5969 case 0x00 ... 0x07: /* fxxxs */
5970 case 0x10 ... 0x17: /* fixxxl */
5971 case 0x20 ... 0x27: /* fxxxl */
5972 case 0x30 ... 0x37: /* fixxx */
5973 {
5974 int op1;
5975 op1 = op & 7;
5976
5977 switch(op >> 4) {
5978 case 0:
5979 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5980 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5981 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
5982 break;
5983 case 1:
5984 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5985 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5986 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5987 break;
5988 case 2:
5989 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5990 (s->mem_index >> 2) - 1);
5991 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
5992 break;
5993 case 3:
5994 default:
5995 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5996 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5997 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5998 break;
5999 }
6000
6001 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6002 if (op1 == 3) {
6003 /* fcomp needs pop */
6004 tcg_gen_helper_0_0(helper_fpop);
6005 }
6006 }
6007 break;
6008 case 0x08: /* flds */
6009 case 0x0a: /* fsts */
6010 case 0x0b: /* fstps */
6011 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6012 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6013 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6014 switch(op & 7) {
6015 case 0:
6016 switch(op >> 4) {
6017 case 0:
6018 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6019 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6020 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6021 break;
6022 case 1:
6023 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6024 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6025 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6026 break;
6027 case 2:
6028 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6029 (s->mem_index >> 2) - 1);
6030 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6031 break;
6032 case 3:
6033 default:
6034 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6035 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6036 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6037 break;
6038 }
6039 break;
6040 case 1:
6041 /* XXX: the corresponding CPUID bit must be tested ! */
6042 switch(op >> 4) {
6043 case 1:
6044 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6045 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6046 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6047 break;
6048 case 2:
6049 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6050 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6051 (s->mem_index >> 2) - 1);
6052 break;
6053 case 3:
6054 default:
6055 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6056 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6057 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6058 break;
6059 }
6060 tcg_gen_helper_0_0(helper_fpop);
6061 break;
6062 default:
6063 switch(op >> 4) {
6064 case 0:
6065 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6066 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6067 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6068 break;
6069 case 1:
6070 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6071 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6072 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6073 break;
6074 case 2:
6075 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6076 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6077 (s->mem_index >> 2) - 1);
6078 break;
6079 case 3:
6080 default:
6081 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6082 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6083 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6084 break;
6085 }
6086 if ((op & 7) == 3)
6087 tcg_gen_helper_0_0(helper_fpop);
6088 break;
6089 }
6090 break;
6091 case 0x0c: /* fldenv mem */
6092 if (s->cc_op != CC_OP_DYNAMIC)
6093 gen_op_set_cc_op(s->cc_op);
6094 gen_jmp_im(pc_start - s->cs_base);
6095 tcg_gen_helper_0_2(helper_fldenv,
6096 cpu_A0, tcg_const_i32(s->dflag));
6097 break;
6098 case 0x0d: /* fldcw mem */
6099 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6100 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6101 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6102 break;
6103 case 0x0e: /* fnstenv mem */
6104 if (s->cc_op != CC_OP_DYNAMIC)
6105 gen_op_set_cc_op(s->cc_op);
6106 gen_jmp_im(pc_start - s->cs_base);
6107 tcg_gen_helper_0_2(helper_fstenv,
6108 cpu_A0, tcg_const_i32(s->dflag));
6109 break;
6110 case 0x0f: /* fnstcw mem */
6111 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6112 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6113 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6114 break;
6115 case 0x1d: /* fldt mem */
6116 if (s->cc_op != CC_OP_DYNAMIC)
6117 gen_op_set_cc_op(s->cc_op);
6118 gen_jmp_im(pc_start - s->cs_base);
6119 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6120 break;
6121 case 0x1f: /* fstpt mem */
6122 if (s->cc_op != CC_OP_DYNAMIC)
6123 gen_op_set_cc_op(s->cc_op);
6124 gen_jmp_im(pc_start - s->cs_base);
6125 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6126 tcg_gen_helper_0_0(helper_fpop);
6127 break;
6128 case 0x2c: /* frstor mem */
6129 if (s->cc_op != CC_OP_DYNAMIC)
6130 gen_op_set_cc_op(s->cc_op);
6131 gen_jmp_im(pc_start - s->cs_base);
6132 tcg_gen_helper_0_2(helper_frstor,
6133 cpu_A0, tcg_const_i32(s->dflag));
6134 break;
6135 case 0x2e: /* fnsave mem */
6136 if (s->cc_op != CC_OP_DYNAMIC)
6137 gen_op_set_cc_op(s->cc_op);
6138 gen_jmp_im(pc_start - s->cs_base);
6139 tcg_gen_helper_0_2(helper_fsave,
6140 cpu_A0, tcg_const_i32(s->dflag));
6141 break;
6142 case 0x2f: /* fnstsw mem */
6143 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6144 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6145 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6146 break;
6147 case 0x3c: /* fbld */
6148 if (s->cc_op != CC_OP_DYNAMIC)
6149 gen_op_set_cc_op(s->cc_op);
6150 gen_jmp_im(pc_start - s->cs_base);
6151 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6152 break;
6153 case 0x3e: /* fbstp */
6154 if (s->cc_op != CC_OP_DYNAMIC)
6155 gen_op_set_cc_op(s->cc_op);
6156 gen_jmp_im(pc_start - s->cs_base);
6157 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6158 tcg_gen_helper_0_0(helper_fpop);
6159 break;
6160 case 0x3d: /* fildll */
6161 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6162 (s->mem_index >> 2) - 1);
6163 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6164 break;
6165 case 0x3f: /* fistpll */
6166 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6167 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6168 (s->mem_index >> 2) - 1);
6169 tcg_gen_helper_0_0(helper_fpop);
6170 break;
6171 default:
6172 goto illegal_op;
6173 }
6174 } else {
6175 /* register float ops */
6176 opreg = rm;
6177
6178 switch(op) {
6179 case 0x08: /* fld sti */
6180 tcg_gen_helper_0_0(helper_fpush);
6181 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6182 break;
6183 case 0x09: /* fxchg sti */
6184 case 0x29: /* fxchg4 sti, undocumented op */
6185 case 0x39: /* fxchg7 sti, undocumented op */
6186 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6187 break;
6188 case 0x0a: /* grp d9/2 */
6189 switch(rm) {
6190 case 0: /* fnop */
6191 /* check exceptions (FreeBSD FPU probe) */
6192 if (s->cc_op != CC_OP_DYNAMIC)
6193 gen_op_set_cc_op(s->cc_op);
6194 gen_jmp_im(pc_start - s->cs_base);
6195 tcg_gen_helper_0_0(helper_fwait);
6196 break;
6197 default:
6198 goto illegal_op;
6199 }
6200 break;
6201 case 0x0c: /* grp d9/4 */
6202 switch(rm) {
6203 case 0: /* fchs */
6204 tcg_gen_helper_0_0(helper_fchs_ST0);
6205 break;
6206 case 1: /* fabs */
6207 tcg_gen_helper_0_0(helper_fabs_ST0);
6208 break;
6209 case 4: /* ftst */
6210 tcg_gen_helper_0_0(helper_fldz_FT0);
6211 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6212 break;
6213 case 5: /* fxam */
6214 tcg_gen_helper_0_0(helper_fxam_ST0);
6215 break;
6216 default:
6217 goto illegal_op;
6218 }
6219 break;
6220 case 0x0d: /* grp d9/5 */
6221 {
6222 switch(rm) {
6223 case 0:
6224 tcg_gen_helper_0_0(helper_fpush);
6225 tcg_gen_helper_0_0(helper_fld1_ST0);
6226 break;
6227 case 1:
6228 tcg_gen_helper_0_0(helper_fpush);
6229 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6230 break;
6231 case 2:
6232 tcg_gen_helper_0_0(helper_fpush);
6233 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6234 break;
6235 case 3:
6236 tcg_gen_helper_0_0(helper_fpush);
6237 tcg_gen_helper_0_0(helper_fldpi_ST0);
6238 break;
6239 case 4:
6240 tcg_gen_helper_0_0(helper_fpush);
6241 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6242 break;
6243 case 5:
6244 tcg_gen_helper_0_0(helper_fpush);
6245 tcg_gen_helper_0_0(helper_fldln2_ST0);
6246 break;
6247 case 6:
6248 tcg_gen_helper_0_0(helper_fpush);
6249 tcg_gen_helper_0_0(helper_fldz_ST0);
6250 break;
6251 default:
6252 goto illegal_op;
6253 }
6254 }
6255 break;
6256 case 0x0e: /* grp d9/6 */
6257 switch(rm) {
6258 case 0: /* f2xm1 */
6259 tcg_gen_helper_0_0(helper_f2xm1);
6260 break;
6261 case 1: /* fyl2x */
6262 tcg_gen_helper_0_0(helper_fyl2x);
6263 break;
6264 case 2: /* fptan */
6265 tcg_gen_helper_0_0(helper_fptan);
6266 break;
6267 case 3: /* fpatan */
6268 tcg_gen_helper_0_0(helper_fpatan);
6269 break;
6270 case 4: /* fxtract */
6271 tcg_gen_helper_0_0(helper_fxtract);
6272 break;
6273 case 5: /* fprem1 */
6274 tcg_gen_helper_0_0(helper_fprem1);
6275 break;
6276 case 6: /* fdecstp */
6277 tcg_gen_helper_0_0(helper_fdecstp);
6278 break;
6279 default:
6280 case 7: /* fincstp */
6281 tcg_gen_helper_0_0(helper_fincstp);
6282 break;
6283 }
6284 break;
6285 case 0x0f: /* grp d9/7 */
6286 switch(rm) {
6287 case 0: /* fprem */
6288 tcg_gen_helper_0_0(helper_fprem);
6289 break;
6290 case 1: /* fyl2xp1 */
6291 tcg_gen_helper_0_0(helper_fyl2xp1);
6292 break;
6293 case 2: /* fsqrt */
6294 tcg_gen_helper_0_0(helper_fsqrt);
6295 break;
6296 case 3: /* fsincos */
6297 tcg_gen_helper_0_0(helper_fsincos);
6298 break;
6299 case 5: /* fscale */
6300 tcg_gen_helper_0_0(helper_fscale);
6301 break;
6302 case 4: /* frndint */
6303 tcg_gen_helper_0_0(helper_frndint);
6304 break;
6305 case 6: /* fsin */
6306 tcg_gen_helper_0_0(helper_fsin);
6307 break;
6308 default:
6309 case 7: /* fcos */
6310 tcg_gen_helper_0_0(helper_fcos);
6311 break;
6312 }
6313 break;
6314 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6315 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6316 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6317 {
6318 int op1;
6319
6320 op1 = op & 7;
6321 if (op >= 0x20) {
6322 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6323 if (op >= 0x30)
6324 tcg_gen_helper_0_0(helper_fpop);
6325 } else {
6326 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6327 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6328 }
6329 }
6330 break;
6331 case 0x02: /* fcom */
6332 case 0x22: /* fcom2, undocumented op */
6333 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6334 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6335 break;
6336 case 0x03: /* fcomp */
6337 case 0x23: /* fcomp3, undocumented op */
6338 case 0x32: /* fcomp5, undocumented op */
6339 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6340 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6341 tcg_gen_helper_0_0(helper_fpop);
6342 break;
6343 case 0x15: /* da/5 */
6344 switch(rm) {
6345 case 1: /* fucompp */
6346 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6347 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6348 tcg_gen_helper_0_0(helper_fpop);
6349 tcg_gen_helper_0_0(helper_fpop);
6350 break;
6351 default:
6352 goto illegal_op;
6353 }
6354 break;
6355 case 0x1c:
6356 switch(rm) {
6357 case 0: /* feni (287 only, just do nop here) */
6358 break;
6359 case 1: /* fdisi (287 only, just do nop here) */
6360 break;
6361 case 2: /* fclex */
6362 tcg_gen_helper_0_0(helper_fclex);
6363 break;
6364 case 3: /* fninit */
6365 tcg_gen_helper_0_0(helper_fninit);
6366 break;
6367 case 4: /* fsetpm (287 only, just do nop here) */
6368 break;
6369 default:
6370 goto illegal_op;
6371 }
6372 break;
6373 case 0x1d: /* fucomi */
6374 if (s->cc_op != CC_OP_DYNAMIC)
6375 gen_op_set_cc_op(s->cc_op);
6376 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6377 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6378 s->cc_op = CC_OP_EFLAGS;
6379 break;
6380 case 0x1e: /* fcomi */
6381 if (s->cc_op != CC_OP_DYNAMIC)
6382 gen_op_set_cc_op(s->cc_op);
6383 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6384 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6385 s->cc_op = CC_OP_EFLAGS;
6386 break;
6387 case 0x28: /* ffree sti */
6388 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6389 break;
6390 case 0x2a: /* fst sti */
6391 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6392 break;
6393 case 0x2b: /* fstp sti */
6394 case 0x0b: /* fstp1 sti, undocumented op */
6395 case 0x3a: /* fstp8 sti, undocumented op */
6396 case 0x3b: /* fstp9 sti, undocumented op */
6397 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6398 tcg_gen_helper_0_0(helper_fpop);
6399 break;
6400 case 0x2c: /* fucom st(i) */
6401 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6402 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6403 break;
6404 case 0x2d: /* fucomp st(i) */
6405 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6406 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6407 tcg_gen_helper_0_0(helper_fpop);
6408 break;
6409 case 0x33: /* de/3 */
6410 switch(rm) {
6411 case 1: /* fcompp */
6412 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6413 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6414 tcg_gen_helper_0_0(helper_fpop);
6415 tcg_gen_helper_0_0(helper_fpop);
6416 break;
6417 default:
6418 goto illegal_op;
6419 }
6420 break;
6421 case 0x38: /* ffreep sti, undocumented op */
6422 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6423 tcg_gen_helper_0_0(helper_fpop);
6424 break;
6425 case 0x3c: /* df/4 */
6426 switch(rm) {
6427 case 0:
6428 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6429 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6430 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6431 break;
6432 default:
6433 goto illegal_op;
6434 }
6435 break;
6436 case 0x3d: /* fucomip */
6437 if (s->cc_op != CC_OP_DYNAMIC)
6438 gen_op_set_cc_op(s->cc_op);
6439 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6440 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6441 tcg_gen_helper_0_0(helper_fpop);
6442 s->cc_op = CC_OP_EFLAGS;
6443 break;
6444 case 0x3e: /* fcomip */
6445 if (s->cc_op != CC_OP_DYNAMIC)
6446 gen_op_set_cc_op(s->cc_op);
6447 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6448 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6449 tcg_gen_helper_0_0(helper_fpop);
6450 s->cc_op = CC_OP_EFLAGS;
6451 break;
6452 case 0x10 ... 0x13: /* fcmovxx */
6453 case 0x18 ... 0x1b:
6454 {
6455 int op1, l1;
6456 static const uint8_t fcmov_cc[8] = {
6457 (JCC_B << 1),
6458 (JCC_Z << 1),
6459 (JCC_BE << 1),
6460 (JCC_P << 1),
6461 };
6462 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6463 l1 = gen_new_label();
6464 gen_jcc1(s, s->cc_op, op1, l1);
6465 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6466 gen_set_label(l1);
6467 }
6468 break;
6469 default:
6470 goto illegal_op;
6471 }
6472 }
6473 break;
6474 /************************/
6475 /* string ops */
6476
6477 case 0xa4: /* movsS */
6478 case 0xa5:
6479 if ((b & 1) == 0)
6480 ot = OT_BYTE;
6481 else
6482 ot = dflag + OT_WORD;
6483
6484 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6485 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6486 } else {
6487 gen_movs(s, ot);
6488 }
6489 break;
6490
6491 case 0xaa: /* stosS */
6492 case 0xab:
6493 if ((b & 1) == 0)
6494 ot = OT_BYTE;
6495 else
6496 ot = dflag + OT_WORD;
6497
6498 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6499 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6500 } else {
6501 gen_stos(s, ot);
6502 }
6503 break;
6504 case 0xac: /* lodsS */
6505 case 0xad:
6506 if ((b & 1) == 0)
6507 ot = OT_BYTE;
6508 else
6509 ot = dflag + OT_WORD;
6510 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6511 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6512 } else {
6513 gen_lods(s, ot);
6514 }
6515 break;
6516 case 0xae: /* scasS */
6517 case 0xaf:
6518 if ((b & 1) == 0)
6519 ot = OT_BYTE;
6520 else
6521 ot = dflag + OT_WORD;
6522 if (prefixes & PREFIX_REPNZ) {
6523 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6524 } else if (prefixes & PREFIX_REPZ) {
6525 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6526 } else {
6527 gen_scas(s, ot);
6528 s->cc_op = CC_OP_SUBB + ot;
6529 }
6530 break;
6531
6532 case 0xa6: /* cmpsS */
6533 case 0xa7:
6534 if ((b & 1) == 0)
6535 ot = OT_BYTE;
6536 else
6537 ot = dflag + OT_WORD;
6538 if (prefixes & PREFIX_REPNZ) {
6539 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6540 } else if (prefixes & PREFIX_REPZ) {
6541 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6542 } else {
6543 gen_cmps(s, ot);
6544 s->cc_op = CC_OP_SUBB + ot;
6545 }
6546 break;
6547 case 0x6c: /* insS */
6548 case 0x6d:
6549 if ((b & 1) == 0)
6550 ot = OT_BYTE;
6551 else
6552 ot = dflag ? OT_LONG : OT_WORD;
6553 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6554 gen_op_andl_T0_ffff();
6555 gen_check_io(s, ot, pc_start - s->cs_base,
6556 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6557 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6558 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6559 } else {
6560 gen_ins(s, ot);
6561 if (use_icount) {
6562 gen_jmp(s, s->pc - s->cs_base);
6563 }
6564 }
6565 break;
6566 case 0x6e: /* outsS */
6567 case 0x6f:
6568 if ((b & 1) == 0)
6569 ot = OT_BYTE;
6570 else
6571 ot = dflag ? OT_LONG : OT_WORD;
6572 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6573 gen_op_andl_T0_ffff();
6574 gen_check_io(s, ot, pc_start - s->cs_base,
6575 svm_is_rep(prefixes) | 4);
6576 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6577 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6578 } else {
6579 gen_outs(s, ot);
6580 if (use_icount) {
6581 gen_jmp(s, s->pc - s->cs_base);
6582 }
6583 }
6584 break;
6585
6586 /************************/
6587 /* port I/O */
6588
6589 case 0xe4:
6590 case 0xe5:
6591 if ((b & 1) == 0)
6592 ot = OT_BYTE;
6593 else
6594 ot = dflag ? OT_LONG : OT_WORD;
6595 val = ldub_code(s->pc++);
6596 gen_op_movl_T0_im(val);
6597 gen_check_io(s, ot, pc_start - s->cs_base,
6598 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6599 if (use_icount)
6600 gen_io_start();
6601 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6602 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6603 gen_op_mov_reg_T1(ot, R_EAX);
6604 if (use_icount) {
6605 gen_io_end();
6606 gen_jmp(s, s->pc - s->cs_base);
6607 }
6608 break;
6609 case 0xe6:
6610 case 0xe7:
6611 if ((b & 1) == 0)
6612 ot = OT_BYTE;
6613 else
6614 ot = dflag ? OT_LONG : OT_WORD;
6615 val = ldub_code(s->pc++);
6616 gen_op_movl_T0_im(val);
6617 gen_check_io(s, ot, pc_start - s->cs_base,
6618 svm_is_rep(prefixes));
6619#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6620 if (val == 0x80)
6621 break;
6622#endif /* VBOX */
6623 gen_op_mov_TN_reg(ot, 1, R_EAX);
6624
6625 if (use_icount)
6626 gen_io_start();
6627 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6628 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6629 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6630 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6631 if (use_icount) {
6632 gen_io_end();
6633 gen_jmp(s, s->pc - s->cs_base);
6634 }
6635 break;
6636 case 0xec:
6637 case 0xed:
6638 if ((b & 1) == 0)
6639 ot = OT_BYTE;
6640 else
6641 ot = dflag ? OT_LONG : OT_WORD;
6642 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6643 gen_op_andl_T0_ffff();
6644 gen_check_io(s, ot, pc_start - s->cs_base,
6645 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6646 if (use_icount)
6647 gen_io_start();
6648 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6649 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6650 gen_op_mov_reg_T1(ot, R_EAX);
6651 if (use_icount) {
6652 gen_io_end();
6653 gen_jmp(s, s->pc - s->cs_base);
6654 }
6655 break;
6656 case 0xee:
6657 case 0xef:
6658 if ((b & 1) == 0)
6659 ot = OT_BYTE;
6660 else
6661 ot = dflag ? OT_LONG : OT_WORD;
6662 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6663 gen_op_andl_T0_ffff();
6664 gen_check_io(s, ot, pc_start - s->cs_base,
6665 svm_is_rep(prefixes));
6666 gen_op_mov_TN_reg(ot, 1, R_EAX);
6667
6668 if (use_icount)
6669 gen_io_start();
6670 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6671 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6672 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6673 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6674 if (use_icount) {
6675 gen_io_end();
6676 gen_jmp(s, s->pc - s->cs_base);
6677 }
6678 break;
6679
6680 /************************/
6681 /* control */
6682 case 0xc2: /* ret im */
6683 val = ldsw_code(s->pc);
6684 s->pc += 2;
6685 gen_pop_T0(s);
6686 if (CODE64(s) && s->dflag)
6687 s->dflag = 2;
6688 gen_stack_update(s, val + (2 << s->dflag));
6689 if (s->dflag == 0)
6690 gen_op_andl_T0_ffff();
6691 gen_op_jmp_T0();
6692 gen_eob(s);
6693 break;
6694 case 0xc3: /* ret */
6695 gen_pop_T0(s);
6696 gen_pop_update(s);
6697 if (s->dflag == 0)
6698 gen_op_andl_T0_ffff();
6699 gen_op_jmp_T0();
6700 gen_eob(s);
6701 break;
6702 case 0xca: /* lret im */
6703 val = ldsw_code(s->pc);
6704 s->pc += 2;
6705 do_lret:
6706 if (s->pe && !s->vm86) {
6707 if (s->cc_op != CC_OP_DYNAMIC)
6708 gen_op_set_cc_op(s->cc_op);
6709 gen_jmp_im(pc_start - s->cs_base);
6710 tcg_gen_helper_0_2(helper_lret_protected,
6711 tcg_const_i32(s->dflag),
6712 tcg_const_i32(val));
6713 } else {
6714 gen_stack_A0(s);
6715 /* pop offset */
6716 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6717 if (s->dflag == 0)
6718 gen_op_andl_T0_ffff();
6719 /* NOTE: keeping EIP updated is not a problem in case of
6720 exception */
6721 gen_op_jmp_T0();
6722 /* pop selector */
6723 gen_op_addl_A0_im(2 << s->dflag);
6724 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6725 gen_op_movl_seg_T0_vm(R_CS);
6726 /* add stack offset */
6727 gen_stack_update(s, val + (4 << s->dflag));
6728 }
6729 gen_eob(s);
6730 break;
6731 case 0xcb: /* lret */
6732 val = 0;
6733 goto do_lret;
6734 case 0xcf: /* iret */
6735 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6736 if (!s->pe) {
6737 /* real mode */
6738 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6739 s->cc_op = CC_OP_EFLAGS;
6740 } else if (s->vm86) {
6741#ifdef VBOX
6742 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6743#else
6744 if (s->iopl != 3) {
6745#endif
6746 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6747 } else {
6748 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6749 s->cc_op = CC_OP_EFLAGS;
6750 }
6751 } else {
6752 if (s->cc_op != CC_OP_DYNAMIC)
6753 gen_op_set_cc_op(s->cc_op);
6754 gen_jmp_im(pc_start - s->cs_base);
6755 tcg_gen_helper_0_2(helper_iret_protected,
6756 tcg_const_i32(s->dflag),
6757 tcg_const_i32(s->pc - s->cs_base));
6758 s->cc_op = CC_OP_EFLAGS;
6759 }
6760 gen_eob(s);
6761 break;
6762 case 0xe8: /* call im */
6763 {
6764 if (dflag)
6765 tval = (int32_t)insn_get(s, OT_LONG);
6766 else
6767 tval = (int16_t)insn_get(s, OT_WORD);
6768 next_eip = s->pc - s->cs_base;
6769 tval += next_eip;
6770 if (s->dflag == 0)
6771 tval &= 0xffff;
6772 gen_movtl_T0_im(next_eip);
6773 gen_push_T0(s);
6774 gen_jmp(s, tval);
6775 }
6776 break;
6777 case 0x9a: /* lcall im */
6778 {
6779 unsigned int selector, offset;
6780
6781 if (CODE64(s))
6782 goto illegal_op;
6783 ot = dflag ? OT_LONG : OT_WORD;
6784 offset = insn_get(s, ot);
6785 selector = insn_get(s, OT_WORD);
6786
6787 gen_op_movl_T0_im(selector);
6788 gen_op_movl_T1_imu(offset);
6789 }
6790 goto do_lcall;
6791 case 0xe9: /* jmp im */
6792 if (dflag)
6793 tval = (int32_t)insn_get(s, OT_LONG);
6794 else
6795 tval = (int16_t)insn_get(s, OT_WORD);
6796 tval += s->pc - s->cs_base;
6797 if (s->dflag == 0)
6798 tval &= 0xffff;
6799 gen_jmp(s, tval);
6800 break;
6801 case 0xea: /* ljmp im */
6802 {
6803 unsigned int selector, offset;
6804
6805 if (CODE64(s))
6806 goto illegal_op;
6807 ot = dflag ? OT_LONG : OT_WORD;
6808 offset = insn_get(s, ot);
6809 selector = insn_get(s, OT_WORD);
6810
6811 gen_op_movl_T0_im(selector);
6812 gen_op_movl_T1_imu(offset);
6813 }
6814 goto do_ljmp;
6815 case 0xeb: /* jmp Jb */
6816 tval = (int8_t)insn_get(s, OT_BYTE);
6817 tval += s->pc - s->cs_base;
6818 if (s->dflag == 0)
6819 tval &= 0xffff;
6820 gen_jmp(s, tval);
6821 break;
6822 case 0x70 ... 0x7f: /* jcc Jb */
6823 tval = (int8_t)insn_get(s, OT_BYTE);
6824 goto do_jcc;
6825 case 0x180 ... 0x18f: /* jcc Jv */
6826 if (dflag) {
6827 tval = (int32_t)insn_get(s, OT_LONG);
6828 } else {
6829 tval = (int16_t)insn_get(s, OT_WORD);
6830 }
6831 do_jcc:
6832 next_eip = s->pc - s->cs_base;
6833 tval += next_eip;
6834 if (s->dflag == 0)
6835 tval &= 0xffff;
6836 gen_jcc(s, b, tval, next_eip);
6837 break;
6838
6839 case 0x190 ... 0x19f: /* setcc Gv */
6840 modrm = ldub_code(s->pc++);
6841 gen_setcc(s, b);
6842 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6843 break;
6844 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6845 {
6846 int l1;
6847 TCGv t0;
6848
6849 ot = dflag + OT_WORD;
6850 modrm = ldub_code(s->pc++);
6851 reg = ((modrm >> 3) & 7) | rex_r;
6852 mod = (modrm >> 6) & 3;
6853 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6854 if (mod != 3) {
6855 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6856 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6857 } else {
6858 rm = (modrm & 7) | REX_B(s);
6859 gen_op_mov_v_reg(ot, t0, rm);
6860 }
6861#ifdef TARGET_X86_64
6862 if (ot == OT_LONG) {
6863 /* XXX: specific Intel behaviour ? */
6864 l1 = gen_new_label();
6865 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6866 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6867 gen_set_label(l1);
6868 tcg_gen_movi_tl(cpu_tmp0, 0);
6869 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6870 } else
6871#endif
6872 {
6873 l1 = gen_new_label();
6874 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6875 gen_op_mov_reg_v(ot, reg, t0);
6876 gen_set_label(l1);
6877 }
6878 tcg_temp_free(t0);
6879 }
6880 break;
6881
6882 /************************/
6883 /* flags */
6884 case 0x9c: /* pushf */
6885 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6886#ifdef VBOX
6887 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6888#else
6889 if (s->vm86 && s->iopl != 3) {
6890#endif
6891 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6892 } else {
6893 if (s->cc_op != CC_OP_DYNAMIC)
6894 gen_op_set_cc_op(s->cc_op);
6895#ifdef VBOX
6896 if (s->vm86 && s->vme && s->iopl != 3)
6897 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6898 else
6899#endif
6900 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6901 gen_push_T0(s);
6902 }
6903 break;
6904 case 0x9d: /* popf */
6905 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6906#ifdef VBOX
6907 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6908#else
6909 if (s->vm86 && s->iopl != 3) {
6910#endif
6911 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6912 } else {
6913 gen_pop_T0(s);
6914 if (s->cpl == 0) {
6915 if (s->dflag) {
6916 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6917 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6918 } else {
6919 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6920 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6921 }
6922 } else {
6923 if (s->cpl <= s->iopl) {
6924 if (s->dflag) {
6925 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6926 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6927 } else {
6928 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6929 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6930 }
6931 } else {
6932 if (s->dflag) {
6933 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6934 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6935 } else {
6936#ifdef VBOX
6937 if (s->vm86 && s->vme)
6938 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
6939 else
6940#endif
6941 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6942 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6943 }
6944 }
6945 }
6946 gen_pop_update(s);
6947 s->cc_op = CC_OP_EFLAGS;
6948 /* abort translation because TF flag may change */
6949 gen_jmp_im(s->pc - s->cs_base);
6950 gen_eob(s);
6951 }
6952 break;
6953 case 0x9e: /* sahf */
6954 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6955 goto illegal_op;
6956 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6957 if (s->cc_op != CC_OP_DYNAMIC)
6958 gen_op_set_cc_op(s->cc_op);
6959 gen_compute_eflags(cpu_cc_src);
6960 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6961 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6962 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6963 s->cc_op = CC_OP_EFLAGS;
6964 break;
6965 case 0x9f: /* lahf */
6966 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6967 goto illegal_op;
6968 if (s->cc_op != CC_OP_DYNAMIC)
6969 gen_op_set_cc_op(s->cc_op);
6970 gen_compute_eflags(cpu_T[0]);
6971 /* Note: gen_compute_eflags() only gives the condition codes */
6972 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6973 gen_op_mov_reg_T0(OT_BYTE, R_AH);
6974 break;
6975 case 0xf5: /* cmc */
6976 if (s->cc_op != CC_OP_DYNAMIC)
6977 gen_op_set_cc_op(s->cc_op);
6978 gen_compute_eflags(cpu_cc_src);
6979 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6980 s->cc_op = CC_OP_EFLAGS;
6981 break;
6982 case 0xf8: /* clc */
6983 if (s->cc_op != CC_OP_DYNAMIC)
6984 gen_op_set_cc_op(s->cc_op);
6985 gen_compute_eflags(cpu_cc_src);
6986 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6987 s->cc_op = CC_OP_EFLAGS;
6988 break;
6989 case 0xf9: /* stc */
6990 if (s->cc_op != CC_OP_DYNAMIC)
6991 gen_op_set_cc_op(s->cc_op);
6992 gen_compute_eflags(cpu_cc_src);
6993 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6994 s->cc_op = CC_OP_EFLAGS;
6995 break;
6996 case 0xfc: /* cld */
6997 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
6998 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6999 break;
7000 case 0xfd: /* std */
7001 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7002 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7003 break;
7004
7005 /************************/
7006 /* bit operations */
7007 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7008 ot = dflag + OT_WORD;
7009 modrm = ldub_code(s->pc++);
7010 op = (modrm >> 3) & 7;
7011 mod = (modrm >> 6) & 3;
7012 rm = (modrm & 7) | REX_B(s);
7013 if (mod != 3) {
7014 s->rip_offset = 1;
7015 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7016 gen_op_ld_T0_A0(ot + s->mem_index);
7017 } else {
7018 gen_op_mov_TN_reg(ot, 0, rm);
7019 }
7020 /* load shift */
7021 val = ldub_code(s->pc++);
7022 gen_op_movl_T1_im(val);
7023 if (op < 4)
7024 goto illegal_op;
7025 op -= 4;
7026 goto bt_op;
7027 case 0x1a3: /* bt Gv, Ev */
7028 op = 0;
7029 goto do_btx;
7030 case 0x1ab: /* bts */
7031 op = 1;
7032 goto do_btx;
7033 case 0x1b3: /* btr */
7034 op = 2;
7035 goto do_btx;
7036 case 0x1bb: /* btc */
7037 op = 3;
7038 do_btx:
7039 ot = dflag + OT_WORD;
7040 modrm = ldub_code(s->pc++);
7041 reg = ((modrm >> 3) & 7) | rex_r;
7042 mod = (modrm >> 6) & 3;
7043 rm = (modrm & 7) | REX_B(s);
7044 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7045 if (mod != 3) {
7046 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7047 /* specific case: we need to add a displacement */
7048 gen_exts(ot, cpu_T[1]);
7049 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7050 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7051 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7052 gen_op_ld_T0_A0(ot + s->mem_index);
7053 } else {
7054 gen_op_mov_TN_reg(ot, 0, rm);
7055 }
7056 bt_op:
7057 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7058 switch(op) {
7059 case 0:
7060 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7061 tcg_gen_movi_tl(cpu_cc_dst, 0);
7062 break;
7063 case 1:
7064 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7065 tcg_gen_movi_tl(cpu_tmp0, 1);
7066 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7067 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7068 break;
7069 case 2:
7070 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7071 tcg_gen_movi_tl(cpu_tmp0, 1);
7072 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7073 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7074 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7075 break;
7076 default:
7077 case 3:
7078 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7079 tcg_gen_movi_tl(cpu_tmp0, 1);
7080 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7081 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7082 break;
7083 }
7084 s->cc_op = CC_OP_SARB + ot;
7085 if (op != 0) {
7086 if (mod != 3)
7087 gen_op_st_T0_A0(ot + s->mem_index);
7088 else
7089 gen_op_mov_reg_T0(ot, rm);
7090 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7091 tcg_gen_movi_tl(cpu_cc_dst, 0);
7092 }
7093 break;
7094 case 0x1bc: /* bsf */
7095 case 0x1bd: /* bsr */
7096 {
7097 int label1;
7098 TCGv t0;
7099
7100 ot = dflag + OT_WORD;
7101 modrm = ldub_code(s->pc++);
7102 reg = ((modrm >> 3) & 7) | rex_r;
7103 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7104 gen_extu(ot, cpu_T[0]);
7105 label1 = gen_new_label();
7106 tcg_gen_movi_tl(cpu_cc_dst, 0);
7107 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7108 tcg_gen_mov_tl(t0, cpu_T[0]);
7109 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7110 if (b & 1) {
7111 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7112 } else {
7113 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7114 }
7115 gen_op_mov_reg_T0(ot, reg);
7116 tcg_gen_movi_tl(cpu_cc_dst, 1);
7117 gen_set_label(label1);
7118 tcg_gen_discard_tl(cpu_cc_src);
7119 s->cc_op = CC_OP_LOGICB + ot;
7120 tcg_temp_free(t0);
7121 }
7122 break;
7123 /************************/
7124 /* bcd */
7125 case 0x27: /* daa */
7126 if (CODE64(s))
7127 goto illegal_op;
7128 if (s->cc_op != CC_OP_DYNAMIC)
7129 gen_op_set_cc_op(s->cc_op);
7130 tcg_gen_helper_0_0(helper_daa);
7131 s->cc_op = CC_OP_EFLAGS;
7132 break;
7133 case 0x2f: /* das */
7134 if (CODE64(s))
7135 goto illegal_op;
7136 if (s->cc_op != CC_OP_DYNAMIC)
7137 gen_op_set_cc_op(s->cc_op);
7138 tcg_gen_helper_0_0(helper_das);
7139 s->cc_op = CC_OP_EFLAGS;
7140 break;
7141 case 0x37: /* aaa */
7142 if (CODE64(s))
7143 goto illegal_op;
7144 if (s->cc_op != CC_OP_DYNAMIC)
7145 gen_op_set_cc_op(s->cc_op);
7146 tcg_gen_helper_0_0(helper_aaa);
7147 s->cc_op = CC_OP_EFLAGS;
7148 break;
7149 case 0x3f: /* aas */
7150 if (CODE64(s))
7151 goto illegal_op;
7152 if (s->cc_op != CC_OP_DYNAMIC)
7153 gen_op_set_cc_op(s->cc_op);
7154 tcg_gen_helper_0_0(helper_aas);
7155 s->cc_op = CC_OP_EFLAGS;
7156 break;
7157 case 0xd4: /* aam */
7158 if (CODE64(s))
7159 goto illegal_op;
7160 val = ldub_code(s->pc++);
7161 if (val == 0) {
7162 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7163 } else {
7164 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7165 s->cc_op = CC_OP_LOGICB;
7166 }
7167 break;
7168 case 0xd5: /* aad */
7169 if (CODE64(s))
7170 goto illegal_op;
7171 val = ldub_code(s->pc++);
7172 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7173 s->cc_op = CC_OP_LOGICB;
7174 break;
7175 /************************/
7176 /* misc */
7177 case 0x90: /* nop */
7178 /* XXX: xchg + rex handling */
7179 /* XXX: correct lock test for all insn */
7180 if (prefixes & PREFIX_LOCK)
7181 goto illegal_op;
7182 if (prefixes & PREFIX_REPZ) {
7183 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7184 }
7185 break;
7186 case 0x9b: /* fwait */
7187 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7188 (HF_MP_MASK | HF_TS_MASK)) {
7189 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7190 } else {
7191 if (s->cc_op != CC_OP_DYNAMIC)
7192 gen_op_set_cc_op(s->cc_op);
7193 gen_jmp_im(pc_start - s->cs_base);
7194 tcg_gen_helper_0_0(helper_fwait);
7195 }
7196 break;
7197 case 0xcc: /* int3 */
7198#ifdef VBOX
7199 if (s->vm86 && s->iopl != 3 && !s->vme) {
7200 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7201 } else
7202#endif
7203 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7204 break;
7205 case 0xcd: /* int N */
7206 val = ldub_code(s->pc++);
7207#ifdef VBOX
7208 if (s->vm86 && s->iopl != 3 && !s->vme) {
7209#else
7210 if (s->vm86 && s->iopl != 3) {
7211#endif
7212 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7213 } else {
7214 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7215 }
7216 break;
7217 case 0xce: /* into */
7218 if (CODE64(s))
7219 goto illegal_op;
7220 if (s->cc_op != CC_OP_DYNAMIC)
7221 gen_op_set_cc_op(s->cc_op);
7222 gen_jmp_im(pc_start - s->cs_base);
7223 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7224 break;
7225 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7226 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7227#if 1
7228 gen_debug(s, pc_start - s->cs_base);
7229#else
7230 /* start debug */
7231 tb_flush(cpu_single_env);
7232 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7233#endif
7234 break;
7235 case 0xfa: /* cli */
7236 if (!s->vm86) {
7237 if (s->cpl <= s->iopl) {
7238 tcg_gen_helper_0_0(helper_cli);
7239 } else {
7240 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7241 }
7242 } else {
7243 if (s->iopl == 3) {
7244 tcg_gen_helper_0_0(helper_cli);
7245#ifdef VBOX
7246 } else if (s->iopl != 3 && s->vme) {
7247 tcg_gen_helper_0_0(helper_cli_vme);
7248#endif
7249 } else {
7250 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7251 }
7252 }
7253 break;
7254 case 0xfb: /* sti */
7255 if (!s->vm86) {
7256 if (s->cpl <= s->iopl) {
7257 gen_sti:
7258 tcg_gen_helper_0_0(helper_sti);
7259 /* interruptions are enabled only the first insn after sti */
7260 /* If several instructions disable interrupts, only the
7261 _first_ does it */
7262 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7263 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7264 /* give a chance to handle pending irqs */
7265 gen_jmp_im(s->pc - s->cs_base);
7266 gen_eob(s);
7267 } else {
7268 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7269 }
7270 } else {
7271 if (s->iopl == 3) {
7272 goto gen_sti;
7273#ifdef VBOX
7274 } else if (s->iopl != 3 && s->vme) {
7275 tcg_gen_helper_0_0(helper_sti_vme);
7276 /* give a chance to handle pending irqs */
7277 gen_jmp_im(s->pc - s->cs_base);
7278 gen_eob(s);
7279#endif
7280 } else {
7281 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7282 }
7283 }
7284 break;
7285 case 0x62: /* bound */
7286 if (CODE64(s))
7287 goto illegal_op;
7288 ot = dflag ? OT_LONG : OT_WORD;
7289 modrm = ldub_code(s->pc++);
7290 reg = (modrm >> 3) & 7;
7291 mod = (modrm >> 6) & 3;
7292 if (mod == 3)
7293 goto illegal_op;
7294 gen_op_mov_TN_reg(ot, 0, reg);
7295 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7296 gen_jmp_im(pc_start - s->cs_base);
7297 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7298 if (ot == OT_WORD)
7299 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7300 else
7301 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7302 break;
7303 case 0x1c8 ... 0x1cf: /* bswap reg */
7304 reg = (b & 7) | REX_B(s);
7305#ifdef TARGET_X86_64
7306 if (dflag == 2) {
7307 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7308 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7309 gen_op_mov_reg_T0(OT_QUAD, reg);
7310 } else
7311 {
7312 TCGv tmp0;
7313 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7314
7315 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7316 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7317 tcg_gen_bswap_i32(tmp0, tmp0);
7318 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7319 gen_op_mov_reg_T0(OT_LONG, reg);
7320 }
7321#else
7322 {
7323 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7324 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7325 gen_op_mov_reg_T0(OT_LONG, reg);
7326 }
7327#endif
7328 break;
7329 case 0xd6: /* salc */
7330 if (CODE64(s))
7331 goto illegal_op;
7332 if (s->cc_op != CC_OP_DYNAMIC)
7333 gen_op_set_cc_op(s->cc_op);
7334 gen_compute_eflags_c(cpu_T[0]);
7335 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7336 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7337 break;
7338 case 0xe0: /* loopnz */
7339 case 0xe1: /* loopz */
7340 case 0xe2: /* loop */
7341 case 0xe3: /* jecxz */
7342 {
7343 int l1, l2, l3;
7344
7345 tval = (int8_t)insn_get(s, OT_BYTE);
7346 next_eip = s->pc - s->cs_base;
7347 tval += next_eip;
7348 if (s->dflag == 0)
7349 tval &= 0xffff;
7350
7351 l1 = gen_new_label();
7352 l2 = gen_new_label();
7353 l3 = gen_new_label();
7354 b &= 3;
7355 switch(b) {
7356 case 0: /* loopnz */
7357 case 1: /* loopz */
7358 if (s->cc_op != CC_OP_DYNAMIC)
7359 gen_op_set_cc_op(s->cc_op);
7360 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7361 gen_op_jz_ecx(s->aflag, l3);
7362 gen_compute_eflags(cpu_tmp0);
7363 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7364 if (b == 0) {
7365 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7366 } else {
7367 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7368 }
7369 break;
7370 case 2: /* loop */
7371 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7372 gen_op_jnz_ecx(s->aflag, l1);
7373 break;
7374 default:
7375 case 3: /* jcxz */
7376 gen_op_jz_ecx(s->aflag, l1);
7377 break;
7378 }
7379
7380 gen_set_label(l3);
7381 gen_jmp_im(next_eip);
7382 tcg_gen_br(l2);
7383
7384 gen_set_label(l1);
7385 gen_jmp_im(tval);
7386 gen_set_label(l2);
7387 gen_eob(s);
7388 }
7389 break;
7390 case 0x130: /* wrmsr */
7391 case 0x132: /* rdmsr */
7392 if (s->cpl != 0) {
7393 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7394 } else {
7395 if (s->cc_op != CC_OP_DYNAMIC)
7396 gen_op_set_cc_op(s->cc_op);
7397 gen_jmp_im(pc_start - s->cs_base);
7398 if (b & 2) {
7399 tcg_gen_helper_0_0(helper_rdmsr);
7400 } else {
7401 tcg_gen_helper_0_0(helper_wrmsr);
7402 }
7403 }
7404 break;
7405 case 0x131: /* rdtsc */
7406 if (s->cc_op != CC_OP_DYNAMIC)
7407 gen_op_set_cc_op(s->cc_op);
7408 gen_jmp_im(pc_start - s->cs_base);
7409 if (use_icount)
7410 gen_io_start();
7411 tcg_gen_helper_0_0(helper_rdtsc);
7412 if (use_icount) {
7413 gen_io_end();
7414 gen_jmp(s, s->pc - s->cs_base);
7415 }
7416 break;
7417 case 0x133: /* rdpmc */
7418 if (s->cc_op != CC_OP_DYNAMIC)
7419 gen_op_set_cc_op(s->cc_op);
7420 gen_jmp_im(pc_start - s->cs_base);
7421 tcg_gen_helper_0_0(helper_rdpmc);
7422 break;
7423 case 0x134: /* sysenter */
7424#ifndef VBOX
7425 /* For Intel SYSENTER is valid on 64-bit */
7426 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7427#else
7428 /** @todo: make things right */
7429 if (CODE64(s))
7430#endif
7431 goto illegal_op;
7432 if (!s->pe) {
7433 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7434 } else {
7435 if (s->cc_op != CC_OP_DYNAMIC) {
7436 gen_op_set_cc_op(s->cc_op);
7437 s->cc_op = CC_OP_DYNAMIC;
7438 }
7439 gen_jmp_im(pc_start - s->cs_base);
7440 tcg_gen_helper_0_0(helper_sysenter);
7441 gen_eob(s);
7442 }
7443 break;
7444 case 0x135: /* sysexit */
7445#ifndef VBOX
7446 /* For Intel SYSEXIT is valid on 64-bit */
7447 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7448#else
7449 /** @todo: make things right */
7450 if (CODE64(s))
7451#endif
7452 goto illegal_op;
7453 if (!s->pe) {
7454 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7455 } else {
7456 if (s->cc_op != CC_OP_DYNAMIC) {
7457 gen_op_set_cc_op(s->cc_op);
7458 s->cc_op = CC_OP_DYNAMIC;
7459 }
7460 gen_jmp_im(pc_start - s->cs_base);
7461 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7462 gen_eob(s);
7463 }
7464 break;
7465#ifdef TARGET_X86_64
7466 case 0x105: /* syscall */
7467 /* XXX: is it usable in real mode ? */
7468 if (s->cc_op != CC_OP_DYNAMIC) {
7469 gen_op_set_cc_op(s->cc_op);
7470 s->cc_op = CC_OP_DYNAMIC;
7471 }
7472 gen_jmp_im(pc_start - s->cs_base);
7473 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7474 gen_eob(s);
7475 break;
7476 case 0x107: /* sysret */
7477 if (!s->pe) {
7478 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7479 } else {
7480 if (s->cc_op != CC_OP_DYNAMIC) {
7481 gen_op_set_cc_op(s->cc_op);
7482 s->cc_op = CC_OP_DYNAMIC;
7483 }
7484 gen_jmp_im(pc_start - s->cs_base);
7485 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7486 /* condition codes are modified only in long mode */
7487 if (s->lma)
7488 s->cc_op = CC_OP_EFLAGS;
7489 gen_eob(s);
7490 }
7491 break;
7492#endif
7493 case 0x1a2: /* cpuid */
7494 if (s->cc_op != CC_OP_DYNAMIC)
7495 gen_op_set_cc_op(s->cc_op);
7496 gen_jmp_im(pc_start - s->cs_base);
7497 tcg_gen_helper_0_0(helper_cpuid);
7498 break;
7499 case 0xf4: /* hlt */
7500 if (s->cpl != 0) {
7501 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7502 } else {
7503 if (s->cc_op != CC_OP_DYNAMIC)
7504 gen_op_set_cc_op(s->cc_op);
7505 gen_jmp_im(pc_start - s->cs_base);
7506 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7507 s->is_jmp = 3;
7508 }
7509 break;
7510 case 0x100:
7511 modrm = ldub_code(s->pc++);
7512 mod = (modrm >> 6) & 3;
7513 op = (modrm >> 3) & 7;
7514 switch(op) {
7515 case 0: /* sldt */
7516 if (!s->pe || s->vm86)
7517 goto illegal_op;
7518 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7519 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7520 ot = OT_WORD;
7521 if (mod == 3)
7522 ot += s->dflag;
7523 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7524 break;
7525 case 2: /* lldt */
7526 if (!s->pe || s->vm86)
7527 goto illegal_op;
7528 if (s->cpl != 0) {
7529 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7530 } else {
7531 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7532 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7533 gen_jmp_im(pc_start - s->cs_base);
7534 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7535 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7536 }
7537 break;
7538 case 1: /* str */
7539 if (!s->pe || s->vm86)
7540 goto illegal_op;
7541 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7542 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7543 ot = OT_WORD;
7544 if (mod == 3)
7545 ot += s->dflag;
7546 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7547 break;
7548 case 3: /* ltr */
7549 if (!s->pe || s->vm86)
7550 goto illegal_op;
7551 if (s->cpl != 0) {
7552 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7553 } else {
7554 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7555 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7556 gen_jmp_im(pc_start - s->cs_base);
7557 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7558 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7559 }
7560 break;
7561 case 4: /* verr */
7562 case 5: /* verw */
7563 if (!s->pe || s->vm86)
7564 goto illegal_op;
7565 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7566 if (s->cc_op != CC_OP_DYNAMIC)
7567 gen_op_set_cc_op(s->cc_op);
7568 if (op == 4)
7569 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7570 else
7571 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7572 s->cc_op = CC_OP_EFLAGS;
7573 break;
7574 default:
7575 goto illegal_op;
7576 }
7577 break;
7578 case 0x101:
7579 modrm = ldub_code(s->pc++);
7580 mod = (modrm >> 6) & 3;
7581 op = (modrm >> 3) & 7;
7582 rm = modrm & 7;
7583 switch(op) {
7584 case 0: /* sgdt */
7585 if (mod == 3)
7586 goto illegal_op;
7587 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7588 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7589 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7590 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7591 gen_add_A0_im(s, 2);
7592 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7593 if (!s->dflag)
7594 gen_op_andl_T0_im(0xffffff);
7595 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7596 break;
7597 case 1:
7598 if (mod == 3) {
7599 switch (rm) {
7600 case 0: /* monitor */
7601 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7602 s->cpl != 0)
7603 goto illegal_op;
7604 if (s->cc_op != CC_OP_DYNAMIC)
7605 gen_op_set_cc_op(s->cc_op);
7606 gen_jmp_im(pc_start - s->cs_base);
7607#ifdef TARGET_X86_64
7608 if (s->aflag == 2) {
7609 gen_op_movq_A0_reg(R_EAX);
7610 } else
7611#endif
7612 {
7613 gen_op_movl_A0_reg(R_EAX);
7614 if (s->aflag == 0)
7615 gen_op_andl_A0_ffff();
7616 }
7617 gen_add_A0_ds_seg(s);
7618 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7619 break;
7620 case 1: /* mwait */
7621 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7622 s->cpl != 0)
7623 goto illegal_op;
7624 if (s->cc_op != CC_OP_DYNAMIC) {
7625 gen_op_set_cc_op(s->cc_op);
7626 s->cc_op = CC_OP_DYNAMIC;
7627 }
7628 gen_jmp_im(pc_start - s->cs_base);
7629 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7630 gen_eob(s);
7631 break;
7632 default:
7633 goto illegal_op;
7634 }
7635 } else { /* sidt */
7636 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7637 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7638 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7639 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7640 gen_add_A0_im(s, 2);
7641 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7642 if (!s->dflag)
7643 gen_op_andl_T0_im(0xffffff);
7644 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7645 }
7646 break;
7647 case 2: /* lgdt */
7648 case 3: /* lidt */
7649 if (mod == 3) {
7650 if (s->cc_op != CC_OP_DYNAMIC)
7651 gen_op_set_cc_op(s->cc_op);
7652 gen_jmp_im(pc_start - s->cs_base);
7653 switch(rm) {
7654 case 0: /* VMRUN */
7655 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7656 goto illegal_op;
7657 if (s->cpl != 0) {
7658 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7659 break;
7660 } else {
7661 tcg_gen_helper_0_2(helper_vmrun,
7662 tcg_const_i32(s->aflag),
7663 tcg_const_i32(s->pc - pc_start));
7664 tcg_gen_exit_tb(0);
7665 s->is_jmp = 3;
7666 }
7667 break;
7668 case 1: /* VMMCALL */
7669 if (!(s->flags & HF_SVME_MASK))
7670 goto illegal_op;
7671 tcg_gen_helper_0_0(helper_vmmcall);
7672 break;
7673 case 2: /* VMLOAD */
7674 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7675 goto illegal_op;
7676 if (s->cpl != 0) {
7677 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7678 break;
7679 } else {
7680 tcg_gen_helper_0_1(helper_vmload,
7681 tcg_const_i32(s->aflag));
7682 }
7683 break;
7684 case 3: /* VMSAVE */
7685 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7686 goto illegal_op;
7687 if (s->cpl != 0) {
7688 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7689 break;
7690 } else {
7691 tcg_gen_helper_0_1(helper_vmsave,
7692 tcg_const_i32(s->aflag));
7693 }
7694 break;
7695 case 4: /* STGI */
7696 if ((!(s->flags & HF_SVME_MASK) &&
7697 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7698 !s->pe)
7699 goto illegal_op;
7700 if (s->cpl != 0) {
7701 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7702 break;
7703 } else {
7704 tcg_gen_helper_0_0(helper_stgi);
7705 }
7706 break;
7707 case 5: /* CLGI */
7708 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7709 goto illegal_op;
7710 if (s->cpl != 0) {
7711 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7712 break;
7713 } else {
7714 tcg_gen_helper_0_0(helper_clgi);
7715 }
7716 break;
7717 case 6: /* SKINIT */
7718 if ((!(s->flags & HF_SVME_MASK) &&
7719 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7720 !s->pe)
7721 goto illegal_op;
7722 tcg_gen_helper_0_0(helper_skinit);
7723 break;
7724 case 7: /* INVLPGA */
7725 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7726 goto illegal_op;
7727 if (s->cpl != 0) {
7728 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7729 break;
7730 } else {
7731 tcg_gen_helper_0_1(helper_invlpga,
7732 tcg_const_i32(s->aflag));
7733 }
7734 break;
7735 default:
7736 goto illegal_op;
7737 }
7738 } else if (s->cpl != 0) {
7739 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7740 } else {
7741 gen_svm_check_intercept(s, pc_start,
7742 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7744 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7745 gen_add_A0_im(s, 2);
7746 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7747 if (!s->dflag)
7748 gen_op_andl_T0_im(0xffffff);
7749 if (op == 2) {
7750 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7751 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7752 } else {
7753 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7754 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7755 }
7756 }
7757 break;
7758 case 4: /* smsw */
7759 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7760 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7761 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7762 break;
7763 case 6: /* lmsw */
7764 if (s->cpl != 0) {
7765 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7766 } else {
7767 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7768 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7769 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7770 gen_jmp_im(s->pc - s->cs_base);
7771 gen_eob(s);
7772 }
7773 break;
7774 case 7: /* invlpg */
7775 if (s->cpl != 0) {
7776 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7777 } else {
7778 if (mod == 3) {
7779#ifdef TARGET_X86_64
7780 if (CODE64(s) && rm == 0) {
7781 /* swapgs */
7782 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7783 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7784 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7785 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7786 } else
7787#endif
7788 {
7789 goto illegal_op;
7790 }
7791 } else {
7792 if (s->cc_op != CC_OP_DYNAMIC)
7793 gen_op_set_cc_op(s->cc_op);
7794 gen_jmp_im(pc_start - s->cs_base);
7795 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7796 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7797 gen_jmp_im(s->pc - s->cs_base);
7798 gen_eob(s);
7799 }
7800 }
7801 break;
7802 default:
7803 goto illegal_op;
7804 }
7805 break;
7806 case 0x108: /* invd */
7807 case 0x109: /* wbinvd */
7808 if (s->cpl != 0) {
7809 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7810 } else {
7811 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7812 /* nothing to do */
7813 }
7814 break;
7815 case 0x63: /* arpl or movslS (x86_64) */
7816#ifdef TARGET_X86_64
7817 if (CODE64(s)) {
7818 int d_ot;
7819 /* d_ot is the size of destination */
7820 d_ot = dflag + OT_WORD;
7821
7822 modrm = ldub_code(s->pc++);
7823 reg = ((modrm >> 3) & 7) | rex_r;
7824 mod = (modrm >> 6) & 3;
7825 rm = (modrm & 7) | REX_B(s);
7826
7827 if (mod == 3) {
7828 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7829 /* sign extend */
7830 if (d_ot == OT_QUAD)
7831 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7832 gen_op_mov_reg_T0(d_ot, reg);
7833 } else {
7834 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7835 if (d_ot == OT_QUAD) {
7836 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7837 } else {
7838 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7839 }
7840 gen_op_mov_reg_T0(d_ot, reg);
7841 }
7842 } else
7843#endif
7844 {
7845 int label1;
7846 TCGv t0, t1, t2;
7847
7848 if (!s->pe || s->vm86)
7849 goto illegal_op;
7850 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7851 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7852 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7853 ot = OT_WORD;
7854 modrm = ldub_code(s->pc++);
7855 reg = (modrm >> 3) & 7;
7856 mod = (modrm >> 6) & 3;
7857 rm = modrm & 7;
7858#ifdef VBOX /* Fix for obvious bug - T1 needs to be loaded */
7859 /** @todo: how to do that right? */
7860 //gen_op_mov_TN_reg[ot][1][reg]();
7861#endif
7862 if (mod != 3) {
7863 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7864 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7865 } else {
7866 gen_op_mov_v_reg(ot, t0, rm);
7867 }
7868 gen_op_mov_v_reg(ot, t1, reg);
7869 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7870 tcg_gen_andi_tl(t1, t1, 3);
7871 tcg_gen_movi_tl(t2, 0);
7872 label1 = gen_new_label();
7873 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7874 tcg_gen_andi_tl(t0, t0, ~3);
7875 tcg_gen_or_tl(t0, t0, t1);
7876 tcg_gen_movi_tl(t2, CC_Z);
7877 gen_set_label(label1);
7878 if (mod != 3) {
7879 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7880 } else {
7881 gen_op_mov_reg_v(ot, rm, t0);
7882 }
7883 if (s->cc_op != CC_OP_DYNAMIC)
7884 gen_op_set_cc_op(s->cc_op);
7885 gen_compute_eflags(cpu_cc_src);
7886 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7887 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7888 s->cc_op = CC_OP_EFLAGS;
7889 tcg_temp_free(t0);
7890 tcg_temp_free(t1);
7891 tcg_temp_free(t2);
7892 }
7893 break;
7894 case 0x102: /* lar */
7895 case 0x103: /* lsl */
7896 {
7897 int label1;
7898 TCGv t0;
7899 if (!s->pe || s->vm86)
7900 goto illegal_op;
7901 ot = dflag ? OT_LONG : OT_WORD;
7902 modrm = ldub_code(s->pc++);
7903 reg = ((modrm >> 3) & 7) | rex_r;
7904 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7905 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7906 if (s->cc_op != CC_OP_DYNAMIC)
7907 gen_op_set_cc_op(s->cc_op);
7908 if (b == 0x102)
7909 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
7910 else
7911 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
7912 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7913 label1 = gen_new_label();
7914 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7915 gen_op_mov_reg_v(ot, reg, t0);
7916 gen_set_label(label1);
7917 s->cc_op = CC_OP_EFLAGS;
7918 tcg_temp_free(t0);
7919 }
7920 break;
7921 case 0x118:
7922 modrm = ldub_code(s->pc++);
7923 mod = (modrm >> 6) & 3;
7924 op = (modrm >> 3) & 7;
7925 switch(op) {
7926 case 0: /* prefetchnta */
7927 case 1: /* prefetchnt0 */
7928 case 2: /* prefetchnt0 */
7929 case 3: /* prefetchnt0 */
7930 if (mod == 3)
7931 goto illegal_op;
7932 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7933 /* nothing more to do */
7934 break;
7935 default: /* nop (multi byte) */
7936 gen_nop_modrm(s, modrm);
7937 break;
7938 }
7939 break;
7940 case 0x119 ... 0x11f: /* nop (multi byte) */
7941 modrm = ldub_code(s->pc++);
7942 gen_nop_modrm(s, modrm);
7943 break;
7944 case 0x120: /* mov reg, crN */
7945 case 0x122: /* mov crN, reg */
7946 if (s->cpl != 0) {
7947 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7948 } else {
7949 modrm = ldub_code(s->pc++);
7950 if ((modrm & 0xc0) != 0xc0)
7951 goto illegal_op;
7952 rm = (modrm & 7) | REX_B(s);
7953 reg = ((modrm >> 3) & 7) | rex_r;
7954 if (CODE64(s))
7955 ot = OT_QUAD;
7956 else
7957 ot = OT_LONG;
7958 switch(reg) {
7959 case 0:
7960 case 2:
7961 case 3:
7962 case 4:
7963 case 8:
7964 if (s->cc_op != CC_OP_DYNAMIC)
7965 gen_op_set_cc_op(s->cc_op);
7966 gen_jmp_im(pc_start - s->cs_base);
7967 if (b & 2) {
7968 gen_op_mov_TN_reg(ot, 0, rm);
7969 tcg_gen_helper_0_2(helper_write_crN,
7970 tcg_const_i32(reg), cpu_T[0]);
7971 gen_jmp_im(s->pc - s->cs_base);
7972 gen_eob(s);
7973 } else {
7974 tcg_gen_helper_1_1(helper_read_crN,
7975 cpu_T[0], tcg_const_i32(reg));
7976 gen_op_mov_reg_T0(ot, rm);
7977 }
7978 break;
7979 default:
7980 goto illegal_op;
7981 }
7982 }
7983 break;
7984 case 0x121: /* mov reg, drN */
7985 case 0x123: /* mov drN, reg */
7986 if (s->cpl != 0) {
7987 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7988 } else {
7989 modrm = ldub_code(s->pc++);
7990 if ((modrm & 0xc0) != 0xc0)
7991 goto illegal_op;
7992 rm = (modrm & 7) | REX_B(s);
7993 reg = ((modrm >> 3) & 7) | rex_r;
7994 if (CODE64(s))
7995 ot = OT_QUAD;
7996 else
7997 ot = OT_LONG;
7998 /* XXX: do it dynamically with CR4.DE bit */
7999 if (reg == 4 || reg == 5 || reg >= 8)
8000 goto illegal_op;
8001 if (b & 2) {
8002 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8003 gen_op_mov_TN_reg(ot, 0, rm);
8004 tcg_gen_helper_0_2(helper_movl_drN_T0,
8005 tcg_const_i32(reg), cpu_T[0]);
8006 gen_jmp_im(s->pc - s->cs_base);
8007 gen_eob(s);
8008 } else {
8009 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8010 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8011 gen_op_mov_reg_T0(ot, rm);
8012 }
8013 }
8014 break;
8015 case 0x106: /* clts */
8016 if (s->cpl != 0) {
8017 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8018 } else {
8019 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8020 tcg_gen_helper_0_0(helper_clts);
8021 /* abort block because static cpu state changed */
8022 gen_jmp_im(s->pc - s->cs_base);
8023 gen_eob(s);
8024 }
8025 break;
8026 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8027 case 0x1c3: /* MOVNTI reg, mem */
8028 if (!(s->cpuid_features & CPUID_SSE2))
8029 goto illegal_op;
8030 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8031 modrm = ldub_code(s->pc++);
8032 mod = (modrm >> 6) & 3;
8033 if (mod == 3)
8034 goto illegal_op;
8035 reg = ((modrm >> 3) & 7) | rex_r;
8036 /* generate a generic store */
8037 gen_ldst_modrm(s, modrm, ot, reg, 1);
8038 break;
8039 case 0x1ae:
8040 modrm = ldub_code(s->pc++);
8041 mod = (modrm >> 6) & 3;
8042 op = (modrm >> 3) & 7;
8043 switch(op) {
8044 case 0: /* fxsave */
8045 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8046 (s->flags & HF_EM_MASK))
8047 goto illegal_op;
8048 if (s->flags & HF_TS_MASK) {
8049 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8050 break;
8051 }
8052 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8053 if (s->cc_op != CC_OP_DYNAMIC)
8054 gen_op_set_cc_op(s->cc_op);
8055 gen_jmp_im(pc_start - s->cs_base);
8056 tcg_gen_helper_0_2(helper_fxsave,
8057 cpu_A0, tcg_const_i32((s->dflag == 2)));
8058 break;
8059 case 1: /* fxrstor */
8060 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8061 (s->flags & HF_EM_MASK))
8062 goto illegal_op;
8063 if (s->flags & HF_TS_MASK) {
8064 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8065 break;
8066 }
8067 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8068 if (s->cc_op != CC_OP_DYNAMIC)
8069 gen_op_set_cc_op(s->cc_op);
8070 gen_jmp_im(pc_start - s->cs_base);
8071 tcg_gen_helper_0_2(helper_fxrstor,
8072 cpu_A0, tcg_const_i32((s->dflag == 2)));
8073 break;
8074 case 2: /* ldmxcsr */
8075 case 3: /* stmxcsr */
8076 if (s->flags & HF_TS_MASK) {
8077 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8078 break;
8079 }
8080 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8081 mod == 3)
8082 goto illegal_op;
8083 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8084 if (op == 2) {
8085 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8086 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8087 } else {
8088 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8089 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8090 }
8091 break;
8092 case 5: /* lfence */
8093 case 6: /* mfence */
8094 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8095 goto illegal_op;
8096 break;
8097 case 7: /* sfence / clflush */
8098 if ((modrm & 0xc7) == 0xc0) {
8099 /* sfence */
8100 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8101 if (!(s->cpuid_features & CPUID_SSE))
8102 goto illegal_op;
8103 } else {
8104 /* clflush */
8105 if (!(s->cpuid_features & CPUID_CLFLUSH))
8106 goto illegal_op;
8107 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8108 }
8109 break;
8110 default:
8111 goto illegal_op;
8112 }
8113 break;
8114 case 0x10d: /* 3DNow! prefetch(w) */
8115 modrm = ldub_code(s->pc++);
8116 mod = (modrm >> 6) & 3;
8117 if (mod == 3)
8118 goto illegal_op;
8119 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8120 /* ignore for now */
8121 break;
8122 case 0x1aa: /* rsm */
8123 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8124 if (!(s->flags & HF_SMM_MASK))
8125 goto illegal_op;
8126 if (s->cc_op != CC_OP_DYNAMIC) {
8127 gen_op_set_cc_op(s->cc_op);
8128 s->cc_op = CC_OP_DYNAMIC;
8129 }
8130 gen_jmp_im(s->pc - s->cs_base);
8131 tcg_gen_helper_0_0(helper_rsm);
8132 gen_eob(s);
8133 break;
8134 case 0x1b8: /* SSE4.2 popcnt */
8135 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8136 PREFIX_REPZ)
8137 goto illegal_op;
8138 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8139 goto illegal_op;
8140
8141 modrm = ldub_code(s->pc++);
8142 reg = ((modrm >> 3) & 7);
8143
8144 if (s->prefix & PREFIX_DATA)
8145 ot = OT_WORD;
8146 else if (s->dflag != 2)
8147 ot = OT_LONG;
8148 else
8149 ot = OT_QUAD;
8150
8151 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8152 tcg_gen_helper_1_2(helper_popcnt,
8153 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8154 gen_op_mov_reg_T0(ot, reg);
8155
8156 s->cc_op = CC_OP_EFLAGS;
8157 break;
8158 case 0x10e ... 0x10f:
8159 /* 3DNow! instructions, ignore prefixes */
8160 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8161 case 0x110 ... 0x117:
8162 case 0x128 ... 0x12f:
8163 case 0x138 ... 0x13a:
8164 case 0x150 ... 0x177:
8165 case 0x17c ... 0x17f:
8166 case 0x1c2:
8167 case 0x1c4 ... 0x1c6:
8168 case 0x1d0 ... 0x1fe:
8169 gen_sse(s, b, pc_start, rex_r);
8170 break;
8171 default:
8172 goto illegal_op;
8173 }
8174 /* lock generation */
8175 if (s->prefix & PREFIX_LOCK)
8176 tcg_gen_helper_0_0(helper_unlock);
8177 return s->pc;
8178 illegal_op:
8179 if (s->prefix & PREFIX_LOCK)
8180 tcg_gen_helper_0_0(helper_unlock);
8181 /* XXX: ensure that no lock was generated */
8182 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8183 return s->pc;
8184}
8185
8186void optimize_flags_init(void)
8187{
8188#ifndef VBOX
8189#if TCG_TARGET_REG_BITS == 32
8190 assert(sizeof(CCTable) == (1 << 3));
8191#else
8192 assert(sizeof(CCTable) == (1 << 4));
8193#endif
8194#endif
8195 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8196 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8197 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8198 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8199 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8200 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8201 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8202 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8203 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8204
8205 /* register helpers */
8206
8207#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8208#include "helper.h"
8209}
8210
8211/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8212 basic block 'tb'. If search_pc is TRUE, also generate PC
8213 information for each intermediate instruction. */
8214#ifndef VBOX
8215static inline void gen_intermediate_code_internal(CPUState *env,
8216#else /* VBOX */
8217DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8218#endif /* VBOX */
8219 TranslationBlock *tb,
8220 int search_pc)
8221{
8222 DisasContext dc1, *dc = &dc1;
8223 target_ulong pc_ptr;
8224 uint16_t *gen_opc_end;
8225 int j, lj, cflags;
8226 uint64_t flags;
8227 target_ulong pc_start;
8228 target_ulong cs_base;
8229 int num_insns;
8230 int max_insns;
8231
8232 /* generate intermediate code */
8233 pc_start = tb->pc;
8234 cs_base = tb->cs_base;
8235 flags = tb->flags;
8236 cflags = tb->cflags;
8237
8238 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8239 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8240 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8241 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8242 dc->f_st = 0;
8243 dc->vm86 = (flags >> VM_SHIFT) & 1;
8244 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8245 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8246 dc->tf = (flags >> TF_SHIFT) & 1;
8247 dc->singlestep_enabled = env->singlestep_enabled;
8248 dc->cc_op = CC_OP_DYNAMIC;
8249 dc->cs_base = cs_base;
8250 dc->tb = tb;
8251 dc->popl_esp_hack = 0;
8252 /* select memory access functions */
8253 dc->mem_index = 0;
8254 if (flags & HF_SOFTMMU_MASK) {
8255 if (dc->cpl == 3)
8256 dc->mem_index = 2 * 4;
8257 else
8258 dc->mem_index = 1 * 4;
8259 }
8260 dc->cpuid_features = env->cpuid_features;
8261 dc->cpuid_ext_features = env->cpuid_ext_features;
8262 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8263 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8264#ifdef TARGET_X86_64
8265 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8266 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8267#endif
8268 dc->flags = flags;
8269 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8270 (flags & HF_INHIBIT_IRQ_MASK)
8271#ifndef CONFIG_SOFTMMU
8272 || (flags & HF_SOFTMMU_MASK)
8273#endif
8274 );
8275#if 0
8276 /* check addseg logic */
8277 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8278 printf("ERROR addseg\n");
8279#endif
8280
8281 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8282 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8283 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8284 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8285
8286 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8287 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8288 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8289 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8290 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8291 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8292 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8293 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8294 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8295
8296 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8297
8298 dc->is_jmp = DISAS_NEXT;
8299 pc_ptr = pc_start;
8300 lj = -1;
8301 num_insns = 0;
8302 max_insns = tb->cflags & CF_COUNT_MASK;
8303 if (max_insns == 0)
8304 max_insns = CF_COUNT_MASK;
8305
8306 gen_icount_start();
8307 for(;;) {
8308 if (env->nb_breakpoints > 0) {
8309 for(j = 0; j < env->nb_breakpoints; j++) {
8310 if (env->breakpoints[j] == pc_ptr) {
8311 gen_debug(dc, pc_ptr - dc->cs_base);
8312 break;
8313 }
8314 }
8315 }
8316 if (search_pc) {
8317 j = gen_opc_ptr - gen_opc_buf;
8318 if (lj < j) {
8319 lj++;
8320 while (lj < j)
8321 gen_opc_instr_start[lj++] = 0;
8322 }
8323 gen_opc_pc[lj] = pc_ptr;
8324 gen_opc_cc_op[lj] = dc->cc_op;
8325 gen_opc_instr_start[lj] = 1;
8326 gen_opc_icount[lj] = num_insns;
8327 }
8328 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8329 gen_io_start();
8330
8331 pc_ptr = disas_insn(dc, pc_ptr);
8332 num_insns++;
8333 /* stop translation if indicated */
8334 if (dc->is_jmp)
8335 break;
8336 /* if single step mode, we generate only one instruction and
8337 generate an exception */
8338 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8339 the flag and abort the translation to give the irqs a
8340 change to be happen */
8341 if (dc->tf || dc->singlestep_enabled ||
8342 (flags & HF_INHIBIT_IRQ_MASK)) {
8343 gen_jmp_im(pc_ptr - dc->cs_base);
8344 gen_eob(dc);
8345 break;
8346 }
8347 /* if too long translation, stop generation too */
8348 if (gen_opc_ptr >= gen_opc_end ||
8349 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8350 num_insns >= max_insns) {
8351 gen_jmp_im(pc_ptr - dc->cs_base);
8352 gen_eob(dc);
8353 break;
8354 }
8355 }
8356 if (tb->cflags & CF_LAST_IO)
8357 gen_io_end();
8358 gen_icount_end(tb, num_insns);
8359 *gen_opc_ptr = INDEX_op_end;
8360 /* we don't forget to fill the last values */
8361 if (search_pc) {
8362 j = gen_opc_ptr - gen_opc_buf;
8363 lj++;
8364 while (lj <= j)
8365 gen_opc_instr_start[lj++] = 0;
8366 }
8367
8368#ifdef DEBUG_DISAS
8369 if (loglevel & CPU_LOG_TB_CPU) {
8370 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8371 }
8372 if (loglevel & CPU_LOG_TB_IN_ASM) {
8373 int disas_flags;
8374 fprintf(logfile, "----------------\n");
8375 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8376#ifdef TARGET_X86_64
8377 if (dc->code64)
8378 disas_flags = 2;
8379 else
8380#endif
8381 disas_flags = !dc->code32;
8382 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8383 fprintf(logfile, "\n");
8384 }
8385#endif
8386
8387 if (!search_pc) {
8388 tb->size = pc_ptr - pc_start;
8389 tb->icount = num_insns;
8390 }
8391}
8392
8393void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8394{
8395 gen_intermediate_code_internal(env, tb, 0);
8396}
8397
8398void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8399{
8400 gen_intermediate_code_internal(env, tb, 1);
8401}
8402
8403void gen_pc_load(CPUState *env, TranslationBlock *tb,
8404 unsigned long searched_pc, int pc_pos, void *puc)
8405{
8406 int cc_op;
8407#ifdef DEBUG_DISAS
8408 if (loglevel & CPU_LOG_TB_OP) {
8409 int i;
8410 fprintf(logfile, "RESTORE:\n");
8411 for(i = 0;i <= pc_pos; i++) {
8412 if (gen_opc_instr_start[i]) {
8413 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8414 }
8415 }
8416 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8417 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8418 (uint32_t)tb->cs_base);
8419 }
8420#endif
8421 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8422 cc_op = gen_opc_cc_op[pc_pos];
8423 if (cc_op != CC_OP_DYNAMIC)
8424 env->cc_op = cc_op;
8425}
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette