VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 13610

最後變更 在這個檔案從13610是 13600,由 vboxsync 提交於 16 年 前

simplified JAX-WS wrappper generation, fixed SDK build

  • 屬性 svn:eol-style 設為 native
檔案大小: 273.7 KB
 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int record_call; /* record calls for CSAM or not? */
148#endif
149 int cpl;
150 int iopl;
151 int tf; /* TF cpu flag */
152 int singlestep_enabled; /* "hardware" single step enabled */
153 int jmp_opt; /* use direct block chaining for direct jumps */
154 int mem_index; /* select memory access functions */
155 uint64_t flags; /* all execution flags */
156 struct TranslationBlock *tb;
157 int popl_esp_hack; /* for correct popl with esp base handling */
158 int rip_offset; /* only used in x86_64, but left for simplicity */
159 int cpuid_features;
160 int cpuid_ext_features;
161 int cpuid_ext2_features;
162 int cpuid_ext3_features;
163} DisasContext;
164
165static void gen_eob(DisasContext *s);
166static void gen_jmp(DisasContext *s, target_ulong eip);
167static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
168
169#ifdef VBOX
170static void gen_check_external_event();
171#endif
172
173/* i386 arith/logic operations */
174enum {
175 OP_ADDL,
176 OP_ORL,
177 OP_ADCL,
178 OP_SBBL,
179 OP_ANDL,
180 OP_SUBL,
181 OP_XORL,
182 OP_CMPL,
183};
184
185/* i386 shift ops */
186enum {
187 OP_ROL,
188 OP_ROR,
189 OP_RCL,
190 OP_RCR,
191 OP_SHL,
192 OP_SHR,
193 OP_SHL1, /* undocumented */
194 OP_SAR = 7,
195};
196
197enum {
198 JCC_O,
199 JCC_B,
200 JCC_Z,
201 JCC_BE,
202 JCC_S,
203 JCC_P,
204 JCC_L,
205 JCC_LE,
206};
207
208/* operand size */
209enum {
210 OT_BYTE = 0,
211 OT_WORD,
212 OT_LONG,
213 OT_QUAD,
214};
215
216enum {
217 /* I386 int registers */
218 OR_EAX, /* MUST be even numbered */
219 OR_ECX,
220 OR_EDX,
221 OR_EBX,
222 OR_ESP,
223 OR_EBP,
224 OR_ESI,
225 OR_EDI,
226
227 OR_TMP0 = 16, /* temporary operand register */
228 OR_TMP1,
229 OR_A0, /* temporary register used when doing address evaluation */
230};
231
232#ifndef VBOX
233static inline void gen_op_movl_T0_0(void)
234#else /* VBOX */
235DECLINLINE(void) gen_op_movl_T0_0(void)
236#endif /* VBOX */
237{
238 tcg_gen_movi_tl(cpu_T[0], 0);
239}
240
241#ifndef VBOX
242static inline void gen_op_movl_T0_im(int32_t val)
243#else /* VBOX */
244DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
245#endif /* VBOX */
246{
247 tcg_gen_movi_tl(cpu_T[0], val);
248}
249
250#ifndef VBOX
251static inline void gen_op_movl_T0_imu(uint32_t val)
252#else /* VBOX */
253DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
254#endif /* VBOX */
255{
256 tcg_gen_movi_tl(cpu_T[0], val);
257}
258
259#ifndef VBOX
260static inline void gen_op_movl_T1_im(int32_t val)
261#else /* VBOX */
262DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
263#endif /* VBOX */
264{
265 tcg_gen_movi_tl(cpu_T[1], val);
266}
267
268#ifndef VBOX
269static inline void gen_op_movl_T1_imu(uint32_t val)
270#else /* VBOX */
271DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
272#endif /* VBOX */
273{
274 tcg_gen_movi_tl(cpu_T[1], val);
275}
276
277#ifndef VBOX
278static inline void gen_op_movl_A0_im(uint32_t val)
279#else /* VBOX */
280DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
281#endif /* VBOX */
282{
283 tcg_gen_movi_tl(cpu_A0, val);
284}
285
286#ifdef TARGET_X86_64
287#ifndef VBOX
288static inline void gen_op_movq_A0_im(int64_t val)
289#else /* VBOX */
290DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
291#endif /* VBOX */
292{
293 tcg_gen_movi_tl(cpu_A0, val);
294}
295#endif
296
297#ifndef VBOX
298static inline void gen_movtl_T0_im(target_ulong val)
299#else /* VBOX */
300DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
301#endif /* VBOX */
302{
303 tcg_gen_movi_tl(cpu_T[0], val);
304}
305
306#ifndef VBOX
307static inline void gen_movtl_T1_im(target_ulong val)
308#else /* VBOX */
309DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
310#endif /* VBOX */
311{
312 tcg_gen_movi_tl(cpu_T[1], val);
313}
314
315#ifndef VBOX
316static inline void gen_op_andl_T0_ffff(void)
317#else /* VBOX */
318DECLINLINE(void) gen_op_andl_T0_ffff(void)
319#endif /* VBOX */
320{
321 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
322}
323
324#ifndef VBOX
325static inline void gen_op_andl_T0_im(uint32_t val)
326#else /* VBOX */
327DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
328#endif /* VBOX */
329{
330 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
331}
332
333#ifndef VBOX
334static inline void gen_op_movl_T0_T1(void)
335#else /* VBOX */
336DECLINLINE(void) gen_op_movl_T0_T1(void)
337#endif /* VBOX */
338{
339 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
340}
341
342#ifndef VBOX
343static inline void gen_op_andl_A0_ffff(void)
344#else /* VBOX */
345DECLINLINE(void) gen_op_andl_A0_ffff(void)
346#endif /* VBOX */
347{
348 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
349}
350
351#ifdef TARGET_X86_64
352
353#define NB_OP_SIZES 4
354
355#else /* !TARGET_X86_64 */
356
357#define NB_OP_SIZES 3
358
359#endif /* !TARGET_X86_64 */
360
361#if defined(WORDS_BIGENDIAN)
362#define REG_B_OFFSET (sizeof(target_ulong) - 1)
363#define REG_H_OFFSET (sizeof(target_ulong) - 2)
364#define REG_W_OFFSET (sizeof(target_ulong) - 2)
365#define REG_L_OFFSET (sizeof(target_ulong) - 4)
366#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
367#else
368#define REG_B_OFFSET 0
369#define REG_H_OFFSET 1
370#define REG_W_OFFSET 0
371#define REG_L_OFFSET 0
372#define REG_LH_OFFSET 4
373#endif
374
375#ifndef VBOX
376static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
377#else /* VBOX */
378DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#endif /* VBOX */
380{
381 switch(ot) {
382 case OT_BYTE:
383 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
384 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
385 } else {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
387 }
388 break;
389 case OT_WORD:
390 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
391 break;
392#ifdef TARGET_X86_64
393 case OT_LONG:
394 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
395 /* high part of register set to zero */
396 tcg_gen_movi_tl(cpu_tmp0, 0);
397 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
398 break;
399 default:
400 case OT_QUAD:
401 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
402 break;
403#else
404 default:
405 case OT_LONG:
406 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
407 break;
408#endif
409 }
410}
411
412#ifndef VBOX
413static inline void gen_op_mov_reg_T0(int ot, int reg)
414#else /* VBOX */
415DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
416#endif /* VBOX */
417{
418 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
419}
420
421#ifndef VBOX
422static inline void gen_op_mov_reg_T1(int ot, int reg)
423#else /* VBOX */
424DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
425#endif /* VBOX */
426{
427 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
428}
429
430#ifndef VBOX
431static inline void gen_op_mov_reg_A0(int size, int reg)
432#else /* VBOX */
433DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
434#endif /* VBOX */
435{
436 switch(size) {
437 case 0:
438 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
439 break;
440#ifdef TARGET_X86_64
441 case 1:
442 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
443 /* high part of register set to zero */
444 tcg_gen_movi_tl(cpu_tmp0, 0);
445 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
446 break;
447 default:
448 case 2:
449 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
450 break;
451#else
452 default:
453 case 1:
454 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
455 break;
456#endif
457 }
458}
459
460#ifndef VBOX
461static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
462#else /* VBOX */
463DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#endif /* VBOX */
465{
466 switch(ot) {
467 case OT_BYTE:
468 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
469 goto std_case;
470 } else {
471 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
472 }
473 break;
474 default:
475 std_case:
476 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
477 break;
478 }
479}
480
481#ifndef VBOX
482static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
483#else /* VBOX */
484DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
485#endif /* VBOX */
486{
487 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
488}
489
490#ifndef VBOX
491static inline void gen_op_movl_A0_reg(int reg)
492#else /* VBOX */
493DECLINLINE(void) gen_op_movl_A0_reg(int reg)
494#endif /* VBOX */
495{
496 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
497}
498
499#ifndef VBOX
500static inline void gen_op_addl_A0_im(int32_t val)
501#else /* VBOX */
502DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
503#endif /* VBOX */
504{
505 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
506#ifdef TARGET_X86_64
507 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
508#endif
509}
510
511#ifdef TARGET_X86_64
512#ifndef VBOX
513static inline void gen_op_addq_A0_im(int64_t val)
514#else /* VBOX */
515DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
516#endif /* VBOX */
517{
518 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
519}
520#endif
521
522static void gen_add_A0_im(DisasContext *s, int val)
523{
524#ifdef TARGET_X86_64
525 if (CODE64(s))
526 gen_op_addq_A0_im(val);
527 else
528#endif
529 gen_op_addl_A0_im(val);
530}
531
532#ifndef VBOX
533static inline void gen_op_addl_T0_T1(void)
534#else /* VBOX */
535DECLINLINE(void) gen_op_addl_T0_T1(void)
536#endif /* VBOX */
537{
538 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
539}
540
541#ifndef VBOX
542static inline void gen_op_jmp_T0(void)
543#else /* VBOX */
544DECLINLINE(void) gen_op_jmp_T0(void)
545#endif /* VBOX */
546{
547 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
548}
549
550#ifndef VBOX
551static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
552#else /* VBOX */
553DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
554#endif /* VBOX */
555{
556 switch(size) {
557 case 0:
558 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
559 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
560 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
561 break;
562 case 1:
563 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
564 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
565#ifdef TARGET_X86_64
566 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
567#endif
568 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
569 break;
570#ifdef TARGET_X86_64
571 case 2:
572 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
573 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
574 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
575 break;
576#endif
577 }
578}
579
580#ifndef VBOX
581static inline void gen_op_add_reg_T0(int size, int reg)
582#else /* VBOX */
583DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
584#endif /* VBOX */
585{
586 switch(size) {
587 case 0:
588 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
589 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
590 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
591 break;
592 case 1:
593 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
594 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
595#ifdef TARGET_X86_64
596 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
597#endif
598 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
599 break;
600#ifdef TARGET_X86_64
601 case 2:
602 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
603 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
604 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
605 break;
606#endif
607 }
608}
609
610#ifndef VBOX
611static inline void gen_op_set_cc_op(int32_t val)
612#else /* VBOX */
613DECLINLINE(void) gen_op_set_cc_op(int32_t val)
614#endif /* VBOX */
615{
616 tcg_gen_movi_i32(cpu_cc_op, val);
617}
618
619#ifndef VBOX
620static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
621#else /* VBOX */
622DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
623#endif /* VBOX */
624{
625 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
626 if (shift != 0)
627 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
628 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
629#ifdef TARGET_X86_64
630 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
631#endif
632}
633
634#ifndef VBOX
635static inline void gen_op_movl_A0_seg(int reg)
636#else /* VBOX */
637DECLINLINE(void) gen_op_movl_A0_seg(int reg)
638#endif /* VBOX */
639{
640 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
641}
642
643#ifndef VBOX
644static inline void gen_op_addl_A0_seg(int reg)
645#else /* VBOX */
646DECLINLINE(void) gen_op_addl_A0_seg(int reg)
647#endif /* VBOX */
648{
649 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
650 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
651#ifdef TARGET_X86_64
652 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
653#endif
654}
655
656#ifdef TARGET_X86_64
657#ifndef VBOX
658static inline void gen_op_movq_A0_seg(int reg)
659#else /* VBOX */
660DECLINLINE(void) gen_op_movq_A0_seg(int reg)
661#endif /* VBOX */
662{
663 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
664}
665
666#ifndef VBOX
667static inline void gen_op_addq_A0_seg(int reg)
668#else /* VBOX */
669DECLINLINE(void) gen_op_addq_A0_seg(int reg)
670#endif /* VBOX */
671{
672 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
673 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
674}
675
676#ifndef VBOX
677static inline void gen_op_movq_A0_reg(int reg)
678#else /* VBOX */
679DECLINLINE(void) gen_op_movq_A0_reg(int reg)
680#endif /* VBOX */
681{
682 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
683}
684
685#ifndef VBOX
686static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
687#else /* VBOX */
688DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
689#endif /* VBOX */
690{
691 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
692 if (shift != 0)
693 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
694 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
695}
696#endif
697
698#ifndef VBOX
699static inline void gen_op_lds_T0_A0(int idx)
700#else /* VBOX */
701DECLINLINE(void) gen_op_lds_T0_A0(int idx)
702#endif /* VBOX */
703{
704 int mem_index = (idx >> 2) - 1;
705 switch(idx & 3) {
706 case 0:
707 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
708 break;
709 case 1:
710 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
711 break;
712 default:
713 case 2:
714 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
715 break;
716 }
717}
718
719#ifndef VBOX
720static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
721#else /* VBOX */
722DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
723#endif /* VBOX */
724{
725 int mem_index = (idx >> 2) - 1;
726 switch(idx & 3) {
727 case 0:
728 tcg_gen_qemu_ld8u(t0, a0, mem_index);
729 break;
730 case 1:
731 tcg_gen_qemu_ld16u(t0, a0, mem_index);
732 break;
733 case 2:
734 tcg_gen_qemu_ld32u(t0, a0, mem_index);
735 break;
736 default:
737 case 3:
738 tcg_gen_qemu_ld64(t0, a0, mem_index);
739 break;
740 }
741}
742
743/* XXX: always use ldu or lds */
744#ifndef VBOX
745static inline void gen_op_ld_T0_A0(int idx)
746#else /* VBOX */
747DECLINLINE(void) gen_op_ld_T0_A0(int idx)
748#endif /* VBOX */
749{
750 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
751}
752
753#ifndef VBOX
754static inline void gen_op_ldu_T0_A0(int idx)
755#else /* VBOX */
756DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
757#endif /* VBOX */
758{
759 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
760}
761
762#ifndef VBOX
763static inline void gen_op_ld_T1_A0(int idx)
764#else /* VBOX */
765DECLINLINE(void) gen_op_ld_T1_A0(int idx)
766#endif /* VBOX */
767{
768 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
769}
770
771#ifndef VBOX
772static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
773#else /* VBOX */
774DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
775#endif /* VBOX */
776{
777 int mem_index = (idx >> 2) - 1;
778 switch(idx & 3) {
779 case 0:
780 tcg_gen_qemu_st8(t0, a0, mem_index);
781 break;
782 case 1:
783 tcg_gen_qemu_st16(t0, a0, mem_index);
784 break;
785 case 2:
786 tcg_gen_qemu_st32(t0, a0, mem_index);
787 break;
788 default:
789 case 3:
790 tcg_gen_qemu_st64(t0, a0, mem_index);
791 break;
792 }
793}
794
795#ifndef VBOX
796static inline void gen_op_st_T0_A0(int idx)
797#else /* VBOX */
798DECLINLINE(void) gen_op_st_T0_A0(int idx)
799#endif /* VBOX */
800{
801 gen_op_st_v(idx, cpu_T[0], cpu_A0);
802}
803
804#ifndef VBOX
805static inline void gen_op_st_T1_A0(int idx)
806#else /* VBOX */
807DECLINLINE(void) gen_op_st_T1_A0(int idx)
808#endif /* VBOX */
809{
810 gen_op_st_v(idx, cpu_T[1], cpu_A0);
811}
812
813#ifndef VBOX
814static inline void gen_jmp_im(target_ulong pc)
815#else /* VBOX */
816DECLINLINE(void) gen_jmp_im(target_ulong pc)
817#endif /* VBOX */
818{
819#ifdef VBOX
820 gen_check_external_event();
821#endif /* VBOX */
822 tcg_gen_movi_tl(cpu_tmp0, pc);
823 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
824}
825
826#ifdef VBOX
827static void gen_check_external_event()
828{
829#if 0
830 /** @todo: nike make it work */
831 /* This code is more effective, but for whatever reason TCG fails to compile it */
832 int skip_label;
833 TCGv t0;
834
835 skip_label = gen_new_label();
836 t0 = tcg_temp_local_new(TCG_TYPE_TL);
837 /* t0 = cpu_tmp0; */
838
839 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
840 /* Keep in sync with helper_check_external_event() */
841 tcg_gen_andi_tl(t0, t0,
842 CPU_INTERRUPT_EXTERNAL_EXIT
843 | CPU_INTERRUPT_EXTERNAL_TIMER
844 | CPU_INTERRUPT_EXTERNAL_DMA
845 | CPU_INTERRUPT_EXTERNAL_HARD);
846 tcg_gen_brcond_i32(TCG_COND_EQ, t0, 0, skip_label);
847 tcg_temp_free(t0);
848
849 tcg_gen_helper_0_0(helper_check_external_event);
850
851 gen_set_label(skip_label);
852#else
853 tcg_gen_helper_0_0(helper_check_external_event);
854#endif
855}
856
857#ifndef VBOX
858static inline void gen_update_eip(target_ulong pc)
859#else /* VBOX */
860DECLINLINE(void) gen_update_eip(target_ulong pc)
861#endif /* VBOX */
862{
863 gen_jmp_im(pc);
864
865}
866#endif
867
868#ifndef VBOX
869static inline void gen_string_movl_A0_ESI(DisasContext *s)
870#else /* VBOX */
871DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
872#endif /* VBOX */
873{
874 int override;
875
876 override = s->override;
877#ifdef TARGET_X86_64
878 if (s->aflag == 2) {
879 if (override >= 0) {
880 gen_op_movq_A0_seg(override);
881 gen_op_addq_A0_reg_sN(0, R_ESI);
882 } else {
883 gen_op_movq_A0_reg(R_ESI);
884 }
885 } else
886#endif
887 if (s->aflag) {
888 /* 32 bit address */
889 if (s->addseg && override < 0)
890 override = R_DS;
891 if (override >= 0) {
892 gen_op_movl_A0_seg(override);
893 gen_op_addl_A0_reg_sN(0, R_ESI);
894 } else {
895 gen_op_movl_A0_reg(R_ESI);
896 }
897 } else {
898 /* 16 address, always override */
899 if (override < 0)
900 override = R_DS;
901 gen_op_movl_A0_reg(R_ESI);
902 gen_op_andl_A0_ffff();
903 gen_op_addl_A0_seg(override);
904 }
905}
906
907#ifndef VBOX
908static inline void gen_string_movl_A0_EDI(DisasContext *s)
909#else /* VBOX */
910DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
911#endif /* VBOX */
912{
913#ifdef TARGET_X86_64
914 if (s->aflag == 2) {
915 gen_op_movq_A0_reg(R_EDI);
916 } else
917#endif
918 if (s->aflag) {
919 if (s->addseg) {
920 gen_op_movl_A0_seg(R_ES);
921 gen_op_addl_A0_reg_sN(0, R_EDI);
922 } else {
923 gen_op_movl_A0_reg(R_EDI);
924 }
925 } else {
926 gen_op_movl_A0_reg(R_EDI);
927 gen_op_andl_A0_ffff();
928 gen_op_addl_A0_seg(R_ES);
929 }
930}
931
932#ifndef VBOX
933static inline void gen_op_movl_T0_Dshift(int ot)
934#else /* VBOX */
935DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
936#endif /* VBOX */
937{
938 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
939 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
940};
941
942static void gen_extu(int ot, TCGv reg)
943{
944 switch(ot) {
945 case OT_BYTE:
946 tcg_gen_ext8u_tl(reg, reg);
947 break;
948 case OT_WORD:
949 tcg_gen_ext16u_tl(reg, reg);
950 break;
951 case OT_LONG:
952 tcg_gen_ext32u_tl(reg, reg);
953 break;
954 default:
955 break;
956 }
957}
958
959static void gen_exts(int ot, TCGv reg)
960{
961 switch(ot) {
962 case OT_BYTE:
963 tcg_gen_ext8s_tl(reg, reg);
964 break;
965 case OT_WORD:
966 tcg_gen_ext16s_tl(reg, reg);
967 break;
968 case OT_LONG:
969 tcg_gen_ext32s_tl(reg, reg);
970 break;
971 default:
972 break;
973 }
974}
975
976#ifndef VBOX
977static inline void gen_op_jnz_ecx(int size, int label1)
978#else /* VBOX */
979DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
980#endif /* VBOX */
981{
982 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
983 gen_extu(size + 1, cpu_tmp0);
984 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
985}
986
987#ifndef VBOX
988static inline void gen_op_jz_ecx(int size, int label1)
989#else /* VBOX */
990DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
991#endif /* VBOX */
992{
993 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
994 gen_extu(size + 1, cpu_tmp0);
995 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
996}
997
998static void *helper_in_func[3] = {
999 helper_inb,
1000 helper_inw,
1001 helper_inl,
1002};
1003
1004static void *helper_out_func[3] = {
1005 helper_outb,
1006 helper_outw,
1007 helper_outl,
1008};
1009
1010static void *gen_check_io_func[3] = {
1011 helper_check_iob,
1012 helper_check_iow,
1013 helper_check_iol,
1014};
1015
1016static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1017 uint32_t svm_flags)
1018{
1019 int state_saved;
1020 target_ulong next_eip;
1021
1022 state_saved = 0;
1023 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1024 if (s->cc_op != CC_OP_DYNAMIC)
1025 gen_op_set_cc_op(s->cc_op);
1026 gen_jmp_im(cur_eip);
1027 state_saved = 1;
1028 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1029 tcg_gen_helper_0_1(gen_check_io_func[ot],
1030 cpu_tmp2_i32);
1031 }
1032 if(s->flags & HF_SVMI_MASK) {
1033 if (!state_saved) {
1034 if (s->cc_op != CC_OP_DYNAMIC)
1035 gen_op_set_cc_op(s->cc_op);
1036 gen_jmp_im(cur_eip);
1037 state_saved = 1;
1038 }
1039 svm_flags |= (1 << (4 + ot));
1040 next_eip = s->pc - s->cs_base;
1041 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1042 tcg_gen_helper_0_3(helper_svm_check_io,
1043 cpu_tmp2_i32,
1044 tcg_const_i32(svm_flags),
1045 tcg_const_i32(next_eip - cur_eip));
1046 }
1047}
1048
1049#ifndef VBOX
1050static inline void gen_movs(DisasContext *s, int ot)
1051#else /* VBOX */
1052DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1053#endif /* VBOX */
1054{
1055 gen_string_movl_A0_ESI(s);
1056 gen_op_ld_T0_A0(ot + s->mem_index);
1057 gen_string_movl_A0_EDI(s);
1058 gen_op_st_T0_A0(ot + s->mem_index);
1059 gen_op_movl_T0_Dshift(ot);
1060 gen_op_add_reg_T0(s->aflag, R_ESI);
1061 gen_op_add_reg_T0(s->aflag, R_EDI);
1062}
1063
1064#ifndef VBOX
1065static inline void gen_update_cc_op(DisasContext *s)
1066#else /* VBOX */
1067DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1068#endif /* VBOX */
1069{
1070 if (s->cc_op != CC_OP_DYNAMIC) {
1071 gen_op_set_cc_op(s->cc_op);
1072 s->cc_op = CC_OP_DYNAMIC;
1073 }
1074}
1075
1076static void gen_op_update1_cc(void)
1077{
1078 tcg_gen_discard_tl(cpu_cc_src);
1079 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1080}
1081
1082static void gen_op_update2_cc(void)
1083{
1084 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1085 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1086}
1087
1088#ifndef VBOX
1089static inline void gen_op_cmpl_T0_T1_cc(void)
1090#else /* VBOX */
1091DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1092#endif /* VBOX */
1093{
1094 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1095 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1096}
1097
1098#ifndef VBOX
1099static inline void gen_op_testl_T0_T1_cc(void)
1100#else /* VBOX */
1101DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1102#endif /* VBOX */
1103{
1104 tcg_gen_discard_tl(cpu_cc_src);
1105 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1106}
1107
1108static void gen_op_update_neg_cc(void)
1109{
1110 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1111 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1112}
1113
1114/* compute eflags.C to reg */
1115static void gen_compute_eflags_c(TCGv reg)
1116{
1117#if TCG_TARGET_REG_BITS == 32
1118 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1119 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1120 (long)cc_table + offsetof(CCTable, compute_c));
1121 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1122 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1123 1, &cpu_tmp2_i32, 0, NULL);
1124#else
1125 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1126 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1127 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1128 (long)cc_table + offsetof(CCTable, compute_c));
1129 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1130 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1131 1, &cpu_tmp2_i32, 0, NULL);
1132#endif
1133 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1134}
1135
1136/* compute all eflags to cc_src */
1137static void gen_compute_eflags(TCGv reg)
1138{
1139#if TCG_TARGET_REG_BITS == 32
1140 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1141 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1142 (long)cc_table + offsetof(CCTable, compute_all));
1143 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1144 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1145 1, &cpu_tmp2_i32, 0, NULL);
1146#else
1147 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1148 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1149 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1150 (long)cc_table + offsetof(CCTable, compute_all));
1151 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1152 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1153 1, &cpu_tmp2_i32, 0, NULL);
1154#endif
1155 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1156}
1157
1158#ifndef VBOX
1159static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1160#else /* VBOX */
1161DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1162#endif /* VBOX */
1163{
1164 if (s->cc_op != CC_OP_DYNAMIC)
1165 gen_op_set_cc_op(s->cc_op);
1166 switch(jcc_op) {
1167 case JCC_O:
1168 gen_compute_eflags(cpu_T[0]);
1169 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1170 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1171 break;
1172 case JCC_B:
1173 gen_compute_eflags_c(cpu_T[0]);
1174 break;
1175 case JCC_Z:
1176 gen_compute_eflags(cpu_T[0]);
1177 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1178 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1179 break;
1180 case JCC_BE:
1181 gen_compute_eflags(cpu_tmp0);
1182 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1183 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1184 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1185 break;
1186 case JCC_S:
1187 gen_compute_eflags(cpu_T[0]);
1188 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1189 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1190 break;
1191 case JCC_P:
1192 gen_compute_eflags(cpu_T[0]);
1193 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1194 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1195 break;
1196 case JCC_L:
1197 gen_compute_eflags(cpu_tmp0);
1198 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1199 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1200 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1201 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1202 break;
1203 default:
1204 case JCC_LE:
1205 gen_compute_eflags(cpu_tmp0);
1206 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1207 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1208 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1209 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1210 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1211 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1212 break;
1213 }
1214}
1215
1216/* return true if setcc_slow is not needed (WARNING: must be kept in
1217 sync with gen_jcc1) */
1218static int is_fast_jcc_case(DisasContext *s, int b)
1219{
1220 int jcc_op;
1221 jcc_op = (b >> 1) & 7;
1222 switch(s->cc_op) {
1223 /* we optimize the cmp/jcc case */
1224 case CC_OP_SUBB:
1225 case CC_OP_SUBW:
1226 case CC_OP_SUBL:
1227 case CC_OP_SUBQ:
1228 if (jcc_op == JCC_O || jcc_op == JCC_P)
1229 goto slow_jcc;
1230 break;
1231
1232 /* some jumps are easy to compute */
1233 case CC_OP_ADDB:
1234 case CC_OP_ADDW:
1235 case CC_OP_ADDL:
1236 case CC_OP_ADDQ:
1237
1238 case CC_OP_LOGICB:
1239 case CC_OP_LOGICW:
1240 case CC_OP_LOGICL:
1241 case CC_OP_LOGICQ:
1242
1243 case CC_OP_INCB:
1244 case CC_OP_INCW:
1245 case CC_OP_INCL:
1246 case CC_OP_INCQ:
1247
1248 case CC_OP_DECB:
1249 case CC_OP_DECW:
1250 case CC_OP_DECL:
1251 case CC_OP_DECQ:
1252
1253 case CC_OP_SHLB:
1254 case CC_OP_SHLW:
1255 case CC_OP_SHLL:
1256 case CC_OP_SHLQ:
1257 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1258 goto slow_jcc;
1259 break;
1260 default:
1261 slow_jcc:
1262 return 0;
1263 }
1264 return 1;
1265}
1266
1267/* generate a conditional jump to label 'l1' according to jump opcode
1268 value 'b'. In the fast case, T0 is guaranted not to be used. */
1269#ifndef VBOX
1270static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1271#else /* VBOX */
1272DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1273#endif /* VBOX */
1274{
1275 int inv, jcc_op, size, cond;
1276 TCGv t0;
1277
1278 inv = b & 1;
1279 jcc_op = (b >> 1) & 7;
1280
1281 switch(cc_op) {
1282 /* we optimize the cmp/jcc case */
1283 case CC_OP_SUBB:
1284 case CC_OP_SUBW:
1285 case CC_OP_SUBL:
1286 case CC_OP_SUBQ:
1287
1288 size = cc_op - CC_OP_SUBB;
1289 switch(jcc_op) {
1290 case JCC_Z:
1291 fast_jcc_z:
1292 switch(size) {
1293 case 0:
1294 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1295 t0 = cpu_tmp0;
1296 break;
1297 case 1:
1298 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1299 t0 = cpu_tmp0;
1300 break;
1301#ifdef TARGET_X86_64
1302 case 2:
1303 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1304 t0 = cpu_tmp0;
1305 break;
1306#endif
1307 default:
1308 t0 = cpu_cc_dst;
1309 break;
1310 }
1311 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1312 break;
1313 case JCC_S:
1314 fast_jcc_s:
1315 switch(size) {
1316 case 0:
1317 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1318 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1319 0, l1);
1320 break;
1321 case 1:
1322 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1323 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1324 0, l1);
1325 break;
1326#ifdef TARGET_X86_64
1327 case 2:
1328 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1329 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1330 0, l1);
1331 break;
1332#endif
1333 default:
1334 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1335 0, l1);
1336 break;
1337 }
1338 break;
1339
1340 case JCC_B:
1341 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1342 goto fast_jcc_b;
1343 case JCC_BE:
1344 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1345 fast_jcc_b:
1346 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1347 switch(size) {
1348 case 0:
1349 t0 = cpu_tmp0;
1350 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1351 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1352 break;
1353 case 1:
1354 t0 = cpu_tmp0;
1355 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1356 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1357 break;
1358#ifdef TARGET_X86_64
1359 case 2:
1360 t0 = cpu_tmp0;
1361 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1362 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1363 break;
1364#endif
1365 default:
1366 t0 = cpu_cc_src;
1367 break;
1368 }
1369 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1370 break;
1371
1372 case JCC_L:
1373 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1374 goto fast_jcc_l;
1375 case JCC_LE:
1376 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1377 fast_jcc_l:
1378 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1379 switch(size) {
1380 case 0:
1381 t0 = cpu_tmp0;
1382 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1383 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1384 break;
1385 case 1:
1386 t0 = cpu_tmp0;
1387 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1388 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1389 break;
1390#ifdef TARGET_X86_64
1391 case 2:
1392 t0 = cpu_tmp0;
1393 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1394 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1395 break;
1396#endif
1397 default:
1398 t0 = cpu_cc_src;
1399 break;
1400 }
1401 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1402 break;
1403
1404 default:
1405 goto slow_jcc;
1406 }
1407 break;
1408
1409 /* some jumps are easy to compute */
1410 case CC_OP_ADDB:
1411 case CC_OP_ADDW:
1412 case CC_OP_ADDL:
1413 case CC_OP_ADDQ:
1414
1415 case CC_OP_ADCB:
1416 case CC_OP_ADCW:
1417 case CC_OP_ADCL:
1418 case CC_OP_ADCQ:
1419
1420 case CC_OP_SBBB:
1421 case CC_OP_SBBW:
1422 case CC_OP_SBBL:
1423 case CC_OP_SBBQ:
1424
1425 case CC_OP_LOGICB:
1426 case CC_OP_LOGICW:
1427 case CC_OP_LOGICL:
1428 case CC_OP_LOGICQ:
1429
1430 case CC_OP_INCB:
1431 case CC_OP_INCW:
1432 case CC_OP_INCL:
1433 case CC_OP_INCQ:
1434
1435 case CC_OP_DECB:
1436 case CC_OP_DECW:
1437 case CC_OP_DECL:
1438 case CC_OP_DECQ:
1439
1440 case CC_OP_SHLB:
1441 case CC_OP_SHLW:
1442 case CC_OP_SHLL:
1443 case CC_OP_SHLQ:
1444
1445 case CC_OP_SARB:
1446 case CC_OP_SARW:
1447 case CC_OP_SARL:
1448 case CC_OP_SARQ:
1449 switch(jcc_op) {
1450 case JCC_Z:
1451 size = (cc_op - CC_OP_ADDB) & 3;
1452 goto fast_jcc_z;
1453 case JCC_S:
1454 size = (cc_op - CC_OP_ADDB) & 3;
1455 goto fast_jcc_s;
1456 default:
1457 goto slow_jcc;
1458 }
1459 break;
1460 default:
1461 slow_jcc:
1462 gen_setcc_slow_T0(s, jcc_op);
1463 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1464 cpu_T[0], 0, l1);
1465 break;
1466 }
1467}
1468
1469/* XXX: does not work with gdbstub "ice" single step - not a
1470 serious problem */
1471static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1472{
1473 int l1, l2;
1474
1475 l1 = gen_new_label();
1476 l2 = gen_new_label();
1477 gen_op_jnz_ecx(s->aflag, l1);
1478 gen_set_label(l2);
1479 gen_jmp_tb(s, next_eip, 1);
1480 gen_set_label(l1);
1481 return l2;
1482}
1483
1484#ifndef VBOX
1485static inline void gen_stos(DisasContext *s, int ot)
1486#else /* VBOX */
1487DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1488#endif /* VBOX */
1489{
1490 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1491 gen_string_movl_A0_EDI(s);
1492 gen_op_st_T0_A0(ot + s->mem_index);
1493 gen_op_movl_T0_Dshift(ot);
1494 gen_op_add_reg_T0(s->aflag, R_EDI);
1495}
1496
1497#ifndef VBOX
1498static inline void gen_lods(DisasContext *s, int ot)
1499#else /* VBOX */
1500DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1501#endif /* VBOX */
1502{
1503 gen_string_movl_A0_ESI(s);
1504 gen_op_ld_T0_A0(ot + s->mem_index);
1505 gen_op_mov_reg_T0(ot, R_EAX);
1506 gen_op_movl_T0_Dshift(ot);
1507 gen_op_add_reg_T0(s->aflag, R_ESI);
1508}
1509
1510#ifndef VBOX
1511static inline void gen_scas(DisasContext *s, int ot)
1512#else /* VBOX */
1513DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1514#endif /* VBOX */
1515{
1516 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1517 gen_string_movl_A0_EDI(s);
1518 gen_op_ld_T1_A0(ot + s->mem_index);
1519 gen_op_cmpl_T0_T1_cc();
1520 gen_op_movl_T0_Dshift(ot);
1521 gen_op_add_reg_T0(s->aflag, R_EDI);
1522}
1523
1524#ifndef VBOX
1525static inline void gen_cmps(DisasContext *s, int ot)
1526#else /* VBOX */
1527DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1528#endif /* VBOX */
1529{
1530 gen_string_movl_A0_ESI(s);
1531 gen_op_ld_T0_A0(ot + s->mem_index);
1532 gen_string_movl_A0_EDI(s);
1533 gen_op_ld_T1_A0(ot + s->mem_index);
1534 gen_op_cmpl_T0_T1_cc();
1535 gen_op_movl_T0_Dshift(ot);
1536 gen_op_add_reg_T0(s->aflag, R_ESI);
1537 gen_op_add_reg_T0(s->aflag, R_EDI);
1538}
1539
1540#ifndef VBOX
1541static inline void gen_ins(DisasContext *s, int ot)
1542#else /* VBOX */
1543DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1544#endif /* VBOX */
1545{
1546 if (use_icount)
1547 gen_io_start();
1548 gen_string_movl_A0_EDI(s);
1549 /* Note: we must do this dummy write first to be restartable in
1550 case of page fault. */
1551 gen_op_movl_T0_0();
1552 gen_op_st_T0_A0(ot + s->mem_index);
1553 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1554 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1555 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1556 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1557 gen_op_st_T0_A0(ot + s->mem_index);
1558 gen_op_movl_T0_Dshift(ot);
1559 gen_op_add_reg_T0(s->aflag, R_EDI);
1560 if (use_icount)
1561 gen_io_end();
1562}
1563
1564#ifndef VBOX
1565static inline void gen_outs(DisasContext *s, int ot)
1566#else /* VBOX */
1567DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1568#endif /* VBOX */
1569{
1570 if (use_icount)
1571 gen_io_start();
1572 gen_string_movl_A0_ESI(s);
1573 gen_op_ld_T0_A0(ot + s->mem_index);
1574
1575 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1576 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1577 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1578 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1579 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1580
1581 gen_op_movl_T0_Dshift(ot);
1582 gen_op_add_reg_T0(s->aflag, R_ESI);
1583 if (use_icount)
1584 gen_io_end();
1585}
1586
1587/* same method as Valgrind : we generate jumps to current or next
1588 instruction */
1589#ifndef VBOX
1590#define GEN_REPZ(op) \
1591static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1592 target_ulong cur_eip, target_ulong next_eip) \
1593{ \
1594 int l2; \
1595 gen_update_cc_op(s); \
1596 l2 = gen_jz_ecx_string(s, next_eip); \
1597 gen_ ## op(s, ot); \
1598 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1599 /* a loop would cause two single step exceptions if ECX = 1 \
1600 before rep string_insn */ \
1601 if (!s->jmp_opt) \
1602 gen_op_jz_ecx(s->aflag, l2); \
1603 gen_jmp(s, cur_eip); \
1604}
1605#else /* VBOX */
1606#define GEN_REPZ(op) \
1607DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1608 target_ulong cur_eip, target_ulong next_eip) \
1609{ \
1610 int l2; \
1611 gen_update_cc_op(s); \
1612 l2 = gen_jz_ecx_string(s, next_eip); \
1613 gen_ ## op(s, ot); \
1614 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1615 /* a loop would cause two single step exceptions if ECX = 1 \
1616 before rep string_insn */ \
1617 if (!s->jmp_opt) \
1618 gen_op_jz_ecx(s->aflag, l2); \
1619 gen_jmp(s, cur_eip); \
1620}
1621#endif /* VBOX */
1622
1623#ifndef VBOX
1624#define GEN_REPZ2(op) \
1625static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1626 target_ulong cur_eip, \
1627 target_ulong next_eip, \
1628 int nz) \
1629{ \
1630 int l2; \
1631 gen_update_cc_op(s); \
1632 l2 = gen_jz_ecx_string(s, next_eip); \
1633 gen_ ## op(s, ot); \
1634 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1635 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1636 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1637 if (!s->jmp_opt) \
1638 gen_op_jz_ecx(s->aflag, l2); \
1639 gen_jmp(s, cur_eip); \
1640}
1641#else /* VBOX */
1642#define GEN_REPZ2(op) \
1643DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1644 target_ulong cur_eip, \
1645 target_ulong next_eip, \
1646 int nz) \
1647{ \
1648 int l2;\
1649 gen_update_cc_op(s); \
1650 l2 = gen_jz_ecx_string(s, next_eip); \
1651 gen_ ## op(s, ot); \
1652 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1653 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1654 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1655 if (!s->jmp_opt) \
1656 gen_op_jz_ecx(s->aflag, l2); \
1657 gen_jmp(s, cur_eip); \
1658}
1659#endif /* VBOX */
1660
1661GEN_REPZ(movs)
1662GEN_REPZ(stos)
1663GEN_REPZ(lods)
1664GEN_REPZ(ins)
1665GEN_REPZ(outs)
1666GEN_REPZ2(scas)
1667GEN_REPZ2(cmps)
1668
1669static void *helper_fp_arith_ST0_FT0[8] = {
1670 helper_fadd_ST0_FT0,
1671 helper_fmul_ST0_FT0,
1672 helper_fcom_ST0_FT0,
1673 helper_fcom_ST0_FT0,
1674 helper_fsub_ST0_FT0,
1675 helper_fsubr_ST0_FT0,
1676 helper_fdiv_ST0_FT0,
1677 helper_fdivr_ST0_FT0,
1678};
1679
1680/* NOTE the exception in "r" op ordering */
1681static void *helper_fp_arith_STN_ST0[8] = {
1682 helper_fadd_STN_ST0,
1683 helper_fmul_STN_ST0,
1684 NULL,
1685 NULL,
1686 helper_fsubr_STN_ST0,
1687 helper_fsub_STN_ST0,
1688 helper_fdivr_STN_ST0,
1689 helper_fdiv_STN_ST0,
1690};
1691
1692/* if d == OR_TMP0, it means memory operand (address in A0) */
1693static void gen_op(DisasContext *s1, int op, int ot, int d)
1694{
1695 if (d != OR_TMP0) {
1696 gen_op_mov_TN_reg(ot, 0, d);
1697 } else {
1698 gen_op_ld_T0_A0(ot + s1->mem_index);
1699 }
1700 switch(op) {
1701 case OP_ADCL:
1702 if (s1->cc_op != CC_OP_DYNAMIC)
1703 gen_op_set_cc_op(s1->cc_op);
1704 gen_compute_eflags_c(cpu_tmp4);
1705 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1706 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1707 if (d != OR_TMP0)
1708 gen_op_mov_reg_T0(ot, d);
1709 else
1710 gen_op_st_T0_A0(ot + s1->mem_index);
1711 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1712 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1713 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1714 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1715 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1716 s1->cc_op = CC_OP_DYNAMIC;
1717 break;
1718 case OP_SBBL:
1719 if (s1->cc_op != CC_OP_DYNAMIC)
1720 gen_op_set_cc_op(s1->cc_op);
1721 gen_compute_eflags_c(cpu_tmp4);
1722 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1723 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1724 if (d != OR_TMP0)
1725 gen_op_mov_reg_T0(ot, d);
1726 else
1727 gen_op_st_T0_A0(ot + s1->mem_index);
1728 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1729 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1730 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1731 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1732 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1733 s1->cc_op = CC_OP_DYNAMIC;
1734 break;
1735 case OP_ADDL:
1736 gen_op_addl_T0_T1();
1737 if (d != OR_TMP0)
1738 gen_op_mov_reg_T0(ot, d);
1739 else
1740 gen_op_st_T0_A0(ot + s1->mem_index);
1741 gen_op_update2_cc();
1742 s1->cc_op = CC_OP_ADDB + ot;
1743 break;
1744 case OP_SUBL:
1745 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1746 if (d != OR_TMP0)
1747 gen_op_mov_reg_T0(ot, d);
1748 else
1749 gen_op_st_T0_A0(ot + s1->mem_index);
1750 gen_op_update2_cc();
1751 s1->cc_op = CC_OP_SUBB + ot;
1752 break;
1753 default:
1754 case OP_ANDL:
1755 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1756 if (d != OR_TMP0)
1757 gen_op_mov_reg_T0(ot, d);
1758 else
1759 gen_op_st_T0_A0(ot + s1->mem_index);
1760 gen_op_update1_cc();
1761 s1->cc_op = CC_OP_LOGICB + ot;
1762 break;
1763 case OP_ORL:
1764 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1765 if (d != OR_TMP0)
1766 gen_op_mov_reg_T0(ot, d);
1767 else
1768 gen_op_st_T0_A0(ot + s1->mem_index);
1769 gen_op_update1_cc();
1770 s1->cc_op = CC_OP_LOGICB + ot;
1771 break;
1772 case OP_XORL:
1773 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1774 if (d != OR_TMP0)
1775 gen_op_mov_reg_T0(ot, d);
1776 else
1777 gen_op_st_T0_A0(ot + s1->mem_index);
1778 gen_op_update1_cc();
1779 s1->cc_op = CC_OP_LOGICB + ot;
1780 break;
1781 case OP_CMPL:
1782 gen_op_cmpl_T0_T1_cc();
1783 s1->cc_op = CC_OP_SUBB + ot;
1784 break;
1785 }
1786}
1787
1788/* if d == OR_TMP0, it means memory operand (address in A0) */
1789static void gen_inc(DisasContext *s1, int ot, int d, int c)
1790{
1791 if (d != OR_TMP0)
1792 gen_op_mov_TN_reg(ot, 0, d);
1793 else
1794 gen_op_ld_T0_A0(ot + s1->mem_index);
1795 if (s1->cc_op != CC_OP_DYNAMIC)
1796 gen_op_set_cc_op(s1->cc_op);
1797 if (c > 0) {
1798 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1799 s1->cc_op = CC_OP_INCB + ot;
1800 } else {
1801 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1802 s1->cc_op = CC_OP_DECB + ot;
1803 }
1804 if (d != OR_TMP0)
1805 gen_op_mov_reg_T0(ot, d);
1806 else
1807 gen_op_st_T0_A0(ot + s1->mem_index);
1808 gen_compute_eflags_c(cpu_cc_src);
1809 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1810}
1811
1812static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1813 int is_right, int is_arith)
1814{
1815 target_ulong mask;
1816 int shift_label;
1817 TCGv t0, t1;
1818
1819 if (ot == OT_QUAD)
1820 mask = 0x3f;
1821 else
1822 mask = 0x1f;
1823
1824 /* load */
1825 if (op1 == OR_TMP0)
1826 gen_op_ld_T0_A0(ot + s->mem_index);
1827 else
1828 gen_op_mov_TN_reg(ot, 0, op1);
1829
1830 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1831
1832 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1833
1834 if (is_right) {
1835 if (is_arith) {
1836 gen_exts(ot, cpu_T[0]);
1837 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1838 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1839 } else {
1840 gen_extu(ot, cpu_T[0]);
1841 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1842 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1843 }
1844 } else {
1845 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1846 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1847 }
1848
1849 /* store */
1850 if (op1 == OR_TMP0)
1851 gen_op_st_T0_A0(ot + s->mem_index);
1852 else
1853 gen_op_mov_reg_T0(ot, op1);
1854
1855 /* update eflags if non zero shift */
1856 if (s->cc_op != CC_OP_DYNAMIC)
1857 gen_op_set_cc_op(s->cc_op);
1858
1859 /* XXX: inefficient */
1860 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1861 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1862
1863 tcg_gen_mov_tl(t0, cpu_T[0]);
1864 tcg_gen_mov_tl(t1, cpu_T3);
1865
1866 shift_label = gen_new_label();
1867 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1868
1869 tcg_gen_mov_tl(cpu_cc_src, t1);
1870 tcg_gen_mov_tl(cpu_cc_dst, t0);
1871 if (is_right)
1872 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1873 else
1874 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1875
1876 gen_set_label(shift_label);
1877 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1878
1879 tcg_temp_free(t0);
1880 tcg_temp_free(t1);
1881}
1882
1883static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1884 int is_right, int is_arith)
1885{
1886 int mask;
1887
1888 if (ot == OT_QUAD)
1889 mask = 0x3f;
1890 else
1891 mask = 0x1f;
1892
1893 /* load */
1894 if (op1 == OR_TMP0)
1895 gen_op_ld_T0_A0(ot + s->mem_index);
1896 else
1897 gen_op_mov_TN_reg(ot, 0, op1);
1898
1899 op2 &= mask;
1900 if (op2 != 0) {
1901 if (is_right) {
1902 if (is_arith) {
1903 gen_exts(ot, cpu_T[0]);
1904 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1905 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1906 } else {
1907 gen_extu(ot, cpu_T[0]);
1908 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1909 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1910 }
1911 } else {
1912 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1913 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1914 }
1915 }
1916
1917 /* store */
1918 if (op1 == OR_TMP0)
1919 gen_op_st_T0_A0(ot + s->mem_index);
1920 else
1921 gen_op_mov_reg_T0(ot, op1);
1922
1923 /* update eflags if non zero shift */
1924 if (op2 != 0) {
1925 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1926 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1927 if (is_right)
1928 s->cc_op = CC_OP_SARB + ot;
1929 else
1930 s->cc_op = CC_OP_SHLB + ot;
1931 }
1932}
1933
1934#ifndef VBOX
1935static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1936#else /* VBOX */
1937DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1938#endif /* VBOX */
1939{
1940 if (arg2 >= 0)
1941 tcg_gen_shli_tl(ret, arg1, arg2);
1942 else
1943 tcg_gen_shri_tl(ret, arg1, -arg2);
1944}
1945
1946/* XXX: add faster immediate case */
1947static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1948 int is_right)
1949{
1950 target_ulong mask;
1951 int label1, label2, data_bits;
1952 TCGv t0, t1, t2, a0;
1953
1954 /* XXX: inefficient, but we must use local temps */
1955 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1956 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1957 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1958 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1959
1960 if (ot == OT_QUAD)
1961 mask = 0x3f;
1962 else
1963 mask = 0x1f;
1964
1965 /* load */
1966 if (op1 == OR_TMP0) {
1967 tcg_gen_mov_tl(a0, cpu_A0);
1968 gen_op_ld_v(ot + s->mem_index, t0, a0);
1969 } else {
1970 gen_op_mov_v_reg(ot, t0, op1);
1971 }
1972
1973 tcg_gen_mov_tl(t1, cpu_T[1]);
1974
1975 tcg_gen_andi_tl(t1, t1, mask);
1976
1977 /* Must test zero case to avoid using undefined behaviour in TCG
1978 shifts. */
1979 label1 = gen_new_label();
1980 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1981
1982 if (ot <= OT_WORD)
1983 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1984 else
1985 tcg_gen_mov_tl(cpu_tmp0, t1);
1986
1987 gen_extu(ot, t0);
1988 tcg_gen_mov_tl(t2, t0);
1989
1990 data_bits = 8 << ot;
1991 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1992 fix TCG definition) */
1993 if (is_right) {
1994 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1995 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1996 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1997 } else {
1998 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1999 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2000 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2001 }
2002 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2003
2004 gen_set_label(label1);
2005 /* store */
2006 if (op1 == OR_TMP0) {
2007 gen_op_st_v(ot + s->mem_index, t0, a0);
2008 } else {
2009 gen_op_mov_reg_v(ot, op1, t0);
2010 }
2011
2012 /* update eflags */
2013 if (s->cc_op != CC_OP_DYNAMIC)
2014 gen_op_set_cc_op(s->cc_op);
2015
2016 label2 = gen_new_label();
2017 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2018
2019 gen_compute_eflags(cpu_cc_src);
2020 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2021 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2022 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2023 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2024 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2025 if (is_right) {
2026 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2027 }
2028 tcg_gen_andi_tl(t0, t0, CC_C);
2029 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2030
2031 tcg_gen_discard_tl(cpu_cc_dst);
2032 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2033
2034 gen_set_label(label2);
2035 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2036
2037 tcg_temp_free(t0);
2038 tcg_temp_free(t1);
2039 tcg_temp_free(t2);
2040 tcg_temp_free(a0);
2041}
2042
2043static void *helper_rotc[8] = {
2044 helper_rclb,
2045 helper_rclw,
2046 helper_rcll,
2047 X86_64_ONLY(helper_rclq),
2048 helper_rcrb,
2049 helper_rcrw,
2050 helper_rcrl,
2051 X86_64_ONLY(helper_rcrq),
2052};
2053
2054/* XXX: add faster immediate = 1 case */
2055static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2056 int is_right)
2057{
2058 int label1;
2059
2060 if (s->cc_op != CC_OP_DYNAMIC)
2061 gen_op_set_cc_op(s->cc_op);
2062
2063 /* load */
2064 if (op1 == OR_TMP0)
2065 gen_op_ld_T0_A0(ot + s->mem_index);
2066 else
2067 gen_op_mov_TN_reg(ot, 0, op1);
2068
2069 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2070 cpu_T[0], cpu_T[0], cpu_T[1]);
2071 /* store */
2072 if (op1 == OR_TMP0)
2073 gen_op_st_T0_A0(ot + s->mem_index);
2074 else
2075 gen_op_mov_reg_T0(ot, op1);
2076
2077 /* update eflags */
2078 label1 = gen_new_label();
2079 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2080
2081 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2082 tcg_gen_discard_tl(cpu_cc_dst);
2083 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2084
2085 gen_set_label(label1);
2086 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2087}
2088
2089/* XXX: add faster immediate case */
2090static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2091 int is_right)
2092{
2093 int label1, label2, data_bits;
2094 target_ulong mask;
2095 TCGv t0, t1, t2, a0;
2096
2097 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2098 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2099 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2100 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2101
2102 if (ot == OT_QUAD)
2103 mask = 0x3f;
2104 else
2105 mask = 0x1f;
2106
2107 /* load */
2108 if (op1 == OR_TMP0) {
2109 tcg_gen_mov_tl(a0, cpu_A0);
2110 gen_op_ld_v(ot + s->mem_index, t0, a0);
2111 } else {
2112 gen_op_mov_v_reg(ot, t0, op1);
2113 }
2114
2115 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2116
2117 tcg_gen_mov_tl(t1, cpu_T[1]);
2118 tcg_gen_mov_tl(t2, cpu_T3);
2119
2120 /* Must test zero case to avoid using undefined behaviour in TCG
2121 shifts. */
2122 label1 = gen_new_label();
2123 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2124
2125 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2126 if (ot == OT_WORD) {
2127 /* Note: we implement the Intel behaviour for shift count > 16 */
2128 if (is_right) {
2129 tcg_gen_andi_tl(t0, t0, 0xffff);
2130 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2131 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2132 tcg_gen_ext32u_tl(t0, t0);
2133
2134 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2135
2136 /* only needed if count > 16, but a test would complicate */
2137 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2138 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2139
2140 tcg_gen_shr_tl(t0, t0, t2);
2141
2142 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2143 } else {
2144 /* XXX: not optimal */
2145 tcg_gen_andi_tl(t0, t0, 0xffff);
2146 tcg_gen_shli_tl(t1, t1, 16);
2147 tcg_gen_or_tl(t1, t1, t0);
2148 tcg_gen_ext32u_tl(t1, t1);
2149
2150 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2151 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2152 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2153 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2154
2155 tcg_gen_shl_tl(t0, t0, t2);
2156 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2157 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2158 tcg_gen_or_tl(t0, t0, t1);
2159 }
2160 } else {
2161 data_bits = 8 << ot;
2162 if (is_right) {
2163 if (ot == OT_LONG)
2164 tcg_gen_ext32u_tl(t0, t0);
2165
2166 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2167
2168 tcg_gen_shr_tl(t0, t0, t2);
2169 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2170 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2171 tcg_gen_or_tl(t0, t0, t1);
2172
2173 } else {
2174 if (ot == OT_LONG)
2175 tcg_gen_ext32u_tl(t1, t1);
2176
2177 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2178
2179 tcg_gen_shl_tl(t0, t0, t2);
2180 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2181 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2182 tcg_gen_or_tl(t0, t0, t1);
2183 }
2184 }
2185 tcg_gen_mov_tl(t1, cpu_tmp4);
2186
2187 gen_set_label(label1);
2188 /* store */
2189 if (op1 == OR_TMP0) {
2190 gen_op_st_v(ot + s->mem_index, t0, a0);
2191 } else {
2192 gen_op_mov_reg_v(ot, op1, t0);
2193 }
2194
2195 /* update eflags */
2196 if (s->cc_op != CC_OP_DYNAMIC)
2197 gen_op_set_cc_op(s->cc_op);
2198
2199 label2 = gen_new_label();
2200 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2201
2202 tcg_gen_mov_tl(cpu_cc_src, t1);
2203 tcg_gen_mov_tl(cpu_cc_dst, t0);
2204 if (is_right) {
2205 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2206 } else {
2207 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2208 }
2209 gen_set_label(label2);
2210 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2211
2212 tcg_temp_free(t0);
2213 tcg_temp_free(t1);
2214 tcg_temp_free(t2);
2215 tcg_temp_free(a0);
2216}
2217
2218static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2219{
2220 if (s != OR_TMP1)
2221 gen_op_mov_TN_reg(ot, 1, s);
2222 switch(op) {
2223 case OP_ROL:
2224 gen_rot_rm_T1(s1, ot, d, 0);
2225 break;
2226 case OP_ROR:
2227 gen_rot_rm_T1(s1, ot, d, 1);
2228 break;
2229 case OP_SHL:
2230 case OP_SHL1:
2231 gen_shift_rm_T1(s1, ot, d, 0, 0);
2232 break;
2233 case OP_SHR:
2234 gen_shift_rm_T1(s1, ot, d, 1, 0);
2235 break;
2236 case OP_SAR:
2237 gen_shift_rm_T1(s1, ot, d, 1, 1);
2238 break;
2239 case OP_RCL:
2240 gen_rotc_rm_T1(s1, ot, d, 0);
2241 break;
2242 case OP_RCR:
2243 gen_rotc_rm_T1(s1, ot, d, 1);
2244 break;
2245 }
2246}
2247
2248static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2249{
2250 switch(op) {
2251 case OP_SHL:
2252 case OP_SHL1:
2253 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2254 break;
2255 case OP_SHR:
2256 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2257 break;
2258 case OP_SAR:
2259 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2260 break;
2261 default:
2262 /* currently not optimized */
2263 gen_op_movl_T1_im(c);
2264 gen_shift(s1, op, ot, d, OR_TMP1);
2265 break;
2266 }
2267}
2268
2269static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2270{
2271 target_long disp;
2272 int havesib;
2273 int base;
2274 int index;
2275 int scale;
2276 int opreg;
2277 int mod, rm, code, override, must_add_seg;
2278
2279 override = s->override;
2280 must_add_seg = s->addseg;
2281 if (override >= 0)
2282 must_add_seg = 1;
2283 mod = (modrm >> 6) & 3;
2284 rm = modrm & 7;
2285
2286 if (s->aflag) {
2287
2288 havesib = 0;
2289 base = rm;
2290 index = 0;
2291 scale = 0;
2292
2293 if (base == 4) {
2294 havesib = 1;
2295 code = ldub_code(s->pc++);
2296 scale = (code >> 6) & 3;
2297 index = ((code >> 3) & 7) | REX_X(s);
2298 base = (code & 7);
2299 }
2300 base |= REX_B(s);
2301
2302 switch (mod) {
2303 case 0:
2304 if ((base & 7) == 5) {
2305 base = -1;
2306 disp = (int32_t)ldl_code(s->pc);
2307 s->pc += 4;
2308 if (CODE64(s) && !havesib) {
2309 disp += s->pc + s->rip_offset;
2310 }
2311 } else {
2312 disp = 0;
2313 }
2314 break;
2315 case 1:
2316 disp = (int8_t)ldub_code(s->pc++);
2317 break;
2318 default:
2319 case 2:
2320 disp = ldl_code(s->pc);
2321 s->pc += 4;
2322 break;
2323 }
2324
2325 if (base >= 0) {
2326 /* for correct popl handling with esp */
2327 if (base == 4 && s->popl_esp_hack)
2328 disp += s->popl_esp_hack;
2329#ifdef TARGET_X86_64
2330 if (s->aflag == 2) {
2331 gen_op_movq_A0_reg(base);
2332 if (disp != 0) {
2333 gen_op_addq_A0_im(disp);
2334 }
2335 } else
2336#endif
2337 {
2338 gen_op_movl_A0_reg(base);
2339 if (disp != 0)
2340 gen_op_addl_A0_im(disp);
2341 }
2342 } else {
2343#ifdef TARGET_X86_64
2344 if (s->aflag == 2) {
2345 gen_op_movq_A0_im(disp);
2346 } else
2347#endif
2348 {
2349 gen_op_movl_A0_im(disp);
2350 }
2351 }
2352 /* XXX: index == 4 is always invalid */
2353 if (havesib && (index != 4 || scale != 0)) {
2354#ifdef TARGET_X86_64
2355 if (s->aflag == 2) {
2356 gen_op_addq_A0_reg_sN(scale, index);
2357 } else
2358#endif
2359 {
2360 gen_op_addl_A0_reg_sN(scale, index);
2361 }
2362 }
2363 if (must_add_seg) {
2364 if (override < 0) {
2365 if (base == R_EBP || base == R_ESP)
2366 override = R_SS;
2367 else
2368 override = R_DS;
2369 }
2370#ifdef TARGET_X86_64
2371 if (s->aflag == 2) {
2372 gen_op_addq_A0_seg(override);
2373 } else
2374#endif
2375 {
2376 gen_op_addl_A0_seg(override);
2377 }
2378 }
2379 } else {
2380 switch (mod) {
2381 case 0:
2382 if (rm == 6) {
2383 disp = lduw_code(s->pc);
2384 s->pc += 2;
2385 gen_op_movl_A0_im(disp);
2386 rm = 0; /* avoid SS override */
2387 goto no_rm;
2388 } else {
2389 disp = 0;
2390 }
2391 break;
2392 case 1:
2393 disp = (int8_t)ldub_code(s->pc++);
2394 break;
2395 default:
2396 case 2:
2397 disp = lduw_code(s->pc);
2398 s->pc += 2;
2399 break;
2400 }
2401 switch(rm) {
2402 case 0:
2403 gen_op_movl_A0_reg(R_EBX);
2404 gen_op_addl_A0_reg_sN(0, R_ESI);
2405 break;
2406 case 1:
2407 gen_op_movl_A0_reg(R_EBX);
2408 gen_op_addl_A0_reg_sN(0, R_EDI);
2409 break;
2410 case 2:
2411 gen_op_movl_A0_reg(R_EBP);
2412 gen_op_addl_A0_reg_sN(0, R_ESI);
2413 break;
2414 case 3:
2415 gen_op_movl_A0_reg(R_EBP);
2416 gen_op_addl_A0_reg_sN(0, R_EDI);
2417 break;
2418 case 4:
2419 gen_op_movl_A0_reg(R_ESI);
2420 break;
2421 case 5:
2422 gen_op_movl_A0_reg(R_EDI);
2423 break;
2424 case 6:
2425 gen_op_movl_A0_reg(R_EBP);
2426 break;
2427 default:
2428 case 7:
2429 gen_op_movl_A0_reg(R_EBX);
2430 break;
2431 }
2432 if (disp != 0)
2433 gen_op_addl_A0_im(disp);
2434 gen_op_andl_A0_ffff();
2435 no_rm:
2436 if (must_add_seg) {
2437 if (override < 0) {
2438 if (rm == 2 || rm == 3 || rm == 6)
2439 override = R_SS;
2440 else
2441 override = R_DS;
2442 }
2443 gen_op_addl_A0_seg(override);
2444 }
2445 }
2446
2447 opreg = OR_A0;
2448 disp = 0;
2449 *reg_ptr = opreg;
2450 *offset_ptr = disp;
2451}
2452
2453static void gen_nop_modrm(DisasContext *s, int modrm)
2454{
2455 int mod, rm, base, code;
2456
2457 mod = (modrm >> 6) & 3;
2458 if (mod == 3)
2459 return;
2460 rm = modrm & 7;
2461
2462 if (s->aflag) {
2463
2464 base = rm;
2465
2466 if (base == 4) {
2467 code = ldub_code(s->pc++);
2468 base = (code & 7);
2469 }
2470
2471 switch (mod) {
2472 case 0:
2473 if (base == 5) {
2474 s->pc += 4;
2475 }
2476 break;
2477 case 1:
2478 s->pc++;
2479 break;
2480 default:
2481 case 2:
2482 s->pc += 4;
2483 break;
2484 }
2485 } else {
2486 switch (mod) {
2487 case 0:
2488 if (rm == 6) {
2489 s->pc += 2;
2490 }
2491 break;
2492 case 1:
2493 s->pc++;
2494 break;
2495 default:
2496 case 2:
2497 s->pc += 2;
2498 break;
2499 }
2500 }
2501}
2502
2503/* used for LEA and MOV AX, mem */
2504static void gen_add_A0_ds_seg(DisasContext *s)
2505{
2506 int override, must_add_seg;
2507 must_add_seg = s->addseg;
2508 override = R_DS;
2509 if (s->override >= 0) {
2510 override = s->override;
2511 must_add_seg = 1;
2512 } else {
2513 override = R_DS;
2514 }
2515 if (must_add_seg) {
2516#ifdef TARGET_X86_64
2517 if (CODE64(s)) {
2518 gen_op_addq_A0_seg(override);
2519 } else
2520#endif
2521 {
2522 gen_op_addl_A0_seg(override);
2523 }
2524 }
2525}
2526
2527/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2528 OR_TMP0 */
2529static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2530{
2531 int mod, rm, opreg, disp;
2532
2533 mod = (modrm >> 6) & 3;
2534 rm = (modrm & 7) | REX_B(s);
2535 if (mod == 3) {
2536 if (is_store) {
2537 if (reg != OR_TMP0)
2538 gen_op_mov_TN_reg(ot, 0, reg);
2539 gen_op_mov_reg_T0(ot, rm);
2540 } else {
2541 gen_op_mov_TN_reg(ot, 0, rm);
2542 if (reg != OR_TMP0)
2543 gen_op_mov_reg_T0(ot, reg);
2544 }
2545 } else {
2546 gen_lea_modrm(s, modrm, &opreg, &disp);
2547 if (is_store) {
2548 if (reg != OR_TMP0)
2549 gen_op_mov_TN_reg(ot, 0, reg);
2550 gen_op_st_T0_A0(ot + s->mem_index);
2551 } else {
2552 gen_op_ld_T0_A0(ot + s->mem_index);
2553 if (reg != OR_TMP0)
2554 gen_op_mov_reg_T0(ot, reg);
2555 }
2556 }
2557}
2558
2559#ifndef VBOX
2560static inline uint32_t insn_get(DisasContext *s, int ot)
2561#else /* VBOX */
2562DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2563#endif /* VBOX */
2564{
2565 uint32_t ret;
2566
2567 switch(ot) {
2568 case OT_BYTE:
2569 ret = ldub_code(s->pc);
2570 s->pc++;
2571 break;
2572 case OT_WORD:
2573 ret = lduw_code(s->pc);
2574 s->pc += 2;
2575 break;
2576 default:
2577 case OT_LONG:
2578 ret = ldl_code(s->pc);
2579 s->pc += 4;
2580 break;
2581 }
2582 return ret;
2583}
2584
2585#ifndef VBOX
2586static inline int insn_const_size(unsigned int ot)
2587#else /* VBOX */
2588DECLINLINE(int) insn_const_size(unsigned int ot)
2589#endif /* VBOX */
2590{
2591 if (ot <= OT_LONG)
2592 return 1 << ot;
2593 else
2594 return 4;
2595}
2596
2597#ifndef VBOX
2598static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2599#else /* VBOX */
2600DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2601#endif /* VBOX */
2602{
2603 TranslationBlock *tb;
2604 target_ulong pc;
2605
2606 pc = s->cs_base + eip;
2607 tb = s->tb;
2608 /* NOTE: we handle the case where the TB spans two pages here */
2609 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2610 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2611 /* jump to same page: we can use a direct jump */
2612 tcg_gen_goto_tb(tb_num);
2613 gen_jmp_im(eip);
2614 tcg_gen_exit_tb((long)tb + tb_num);
2615 } else {
2616 /* jump to another page: currently not optimized */
2617 gen_jmp_im(eip);
2618 gen_eob(s);
2619 }
2620}
2621
2622#ifndef VBOX
2623static inline void gen_jcc(DisasContext *s, int b,
2624#else /* VBOX */
2625DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2626#endif /* VBOX */
2627 target_ulong val, target_ulong next_eip)
2628{
2629 int l1, l2, cc_op;
2630
2631 cc_op = s->cc_op;
2632 if (s->cc_op != CC_OP_DYNAMIC) {
2633 gen_op_set_cc_op(s->cc_op);
2634 s->cc_op = CC_OP_DYNAMIC;
2635 }
2636 if (s->jmp_opt) {
2637#ifdef VBOX
2638 gen_check_external_event(s);
2639#endif /* VBOX */
2640 l1 = gen_new_label();
2641 gen_jcc1(s, cc_op, b, l1);
2642
2643 gen_goto_tb(s, 0, next_eip);
2644
2645 gen_set_label(l1);
2646 gen_goto_tb(s, 1, val);
2647 s->is_jmp = 3;
2648 } else {
2649
2650 l1 = gen_new_label();
2651 l2 = gen_new_label();
2652 gen_jcc1(s, cc_op, b, l1);
2653
2654 gen_jmp_im(next_eip);
2655 tcg_gen_br(l2);
2656
2657 gen_set_label(l1);
2658 gen_jmp_im(val);
2659 gen_set_label(l2);
2660 gen_eob(s);
2661 }
2662}
2663
2664static void gen_setcc(DisasContext *s, int b)
2665{
2666 int inv, jcc_op, l1;
2667 TCGv t0;
2668
2669 if (is_fast_jcc_case(s, b)) {
2670 /* nominal case: we use a jump */
2671 /* XXX: make it faster by adding new instructions in TCG */
2672 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2673 tcg_gen_movi_tl(t0, 0);
2674 l1 = gen_new_label();
2675 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2676 tcg_gen_movi_tl(t0, 1);
2677 gen_set_label(l1);
2678 tcg_gen_mov_tl(cpu_T[0], t0);
2679 tcg_temp_free(t0);
2680 } else {
2681 /* slow case: it is more efficient not to generate a jump,
2682 although it is questionnable whether this optimization is
2683 worth to */
2684 inv = b & 1;
2685 jcc_op = (b >> 1) & 7;
2686 gen_setcc_slow_T0(s, jcc_op);
2687 if (inv) {
2688 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2689 }
2690 }
2691}
2692
2693#ifndef VBOX
2694static inline void gen_op_movl_T0_seg(int seg_reg)
2695#else /* VBOX */
2696DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2697#endif /* VBOX */
2698{
2699 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2700 offsetof(CPUX86State,segs[seg_reg].selector));
2701}
2702
2703#ifndef VBOX
2704static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2705#else /* VBOX */
2706DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2707#endif /* VBOX */
2708{
2709 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2710 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2711 offsetof(CPUX86State,segs[seg_reg].selector));
2712 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2713 tcg_gen_st_tl(cpu_T[0], cpu_env,
2714 offsetof(CPUX86State,segs[seg_reg].base));
2715#ifdef VBOX
2716 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2717 if (seg_reg == R_CS)
2718 flags |= DESC_CS_MASK;
2719 gen_op_movl_T0_im(flags);
2720 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2721#endif
2722}
2723
2724/* move T0 to seg_reg and compute if the CPU state may change. Never
2725 call this function with seg_reg == R_CS */
2726static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2727{
2728 if (s->pe && !s->vm86) {
2729 /* XXX: optimize by finding processor state dynamically */
2730 if (s->cc_op != CC_OP_DYNAMIC)
2731 gen_op_set_cc_op(s->cc_op);
2732 gen_jmp_im(cur_eip);
2733 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2734 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2735 /* abort translation because the addseg value may change or
2736 because ss32 may change. For R_SS, translation must always
2737 stop as a special handling must be done to disable hardware
2738 interrupts for the next instruction */
2739 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2740 s->is_jmp = 3;
2741 } else {
2742 gen_op_movl_seg_T0_vm(seg_reg);
2743 if (seg_reg == R_SS)
2744 s->is_jmp = 3;
2745 }
2746}
2747
2748#ifndef VBOX
2749static inline int svm_is_rep(int prefixes)
2750#else /* VBOX */
2751DECLINLINE(int) svm_is_rep(int prefixes)
2752#endif /* VBOX */
2753{
2754 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2755}
2756
2757#ifndef VBOX
2758static inline void
2759#else /* VBOX */
2760DECLINLINE(void)
2761#endif /* VBOX */
2762gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2763 uint32_t type, uint64_t param)
2764{
2765 /* no SVM activated; fast case */
2766 if (likely(!(s->flags & HF_SVMI_MASK)))
2767 return;
2768 if (s->cc_op != CC_OP_DYNAMIC)
2769 gen_op_set_cc_op(s->cc_op);
2770 gen_jmp_im(pc_start - s->cs_base);
2771 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2772 tcg_const_i32(type), tcg_const_i64(param));
2773}
2774
2775#ifndef VBOX
2776static inline void
2777#else /* VBOX */
2778DECLINLINE(void)
2779#endif
2780gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2781{
2782 gen_svm_check_intercept_param(s, pc_start, type, 0);
2783}
2784
2785#ifndef VBOX
2786static inline void gen_stack_update(DisasContext *s, int addend)
2787#else /* VBOX */
2788DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2789#endif /* VBOX */
2790{
2791#ifdef TARGET_X86_64
2792 if (CODE64(s)) {
2793 gen_op_add_reg_im(2, R_ESP, addend);
2794 } else
2795#endif
2796 if (s->ss32) {
2797 gen_op_add_reg_im(1, R_ESP, addend);
2798 } else {
2799 gen_op_add_reg_im(0, R_ESP, addend);
2800 }
2801}
2802
2803/* generate a push. It depends on ss32, addseg and dflag */
2804static void gen_push_T0(DisasContext *s)
2805{
2806#ifdef TARGET_X86_64
2807 if (CODE64(s)) {
2808 gen_op_movq_A0_reg(R_ESP);
2809 if (s->dflag) {
2810 gen_op_addq_A0_im(-8);
2811 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2812 } else {
2813 gen_op_addq_A0_im(-2);
2814 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2815 }
2816 gen_op_mov_reg_A0(2, R_ESP);
2817 } else
2818#endif
2819 {
2820 gen_op_movl_A0_reg(R_ESP);
2821 if (!s->dflag)
2822 gen_op_addl_A0_im(-2);
2823 else
2824 gen_op_addl_A0_im(-4);
2825 if (s->ss32) {
2826 if (s->addseg) {
2827 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2828 gen_op_addl_A0_seg(R_SS);
2829 }
2830 } else {
2831 gen_op_andl_A0_ffff();
2832 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2833 gen_op_addl_A0_seg(R_SS);
2834 }
2835 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2836 if (s->ss32 && !s->addseg)
2837 gen_op_mov_reg_A0(1, R_ESP);
2838 else
2839 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2840 }
2841}
2842
2843/* generate a push. It depends on ss32, addseg and dflag */
2844/* slower version for T1, only used for call Ev */
2845static void gen_push_T1(DisasContext *s)
2846{
2847#ifdef TARGET_X86_64
2848 if (CODE64(s)) {
2849 gen_op_movq_A0_reg(R_ESP);
2850 if (s->dflag) {
2851 gen_op_addq_A0_im(-8);
2852 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2853 } else {
2854 gen_op_addq_A0_im(-2);
2855 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2856 }
2857 gen_op_mov_reg_A0(2, R_ESP);
2858 } else
2859#endif
2860 {
2861 gen_op_movl_A0_reg(R_ESP);
2862 if (!s->dflag)
2863 gen_op_addl_A0_im(-2);
2864 else
2865 gen_op_addl_A0_im(-4);
2866 if (s->ss32) {
2867 if (s->addseg) {
2868 gen_op_addl_A0_seg(R_SS);
2869 }
2870 } else {
2871 gen_op_andl_A0_ffff();
2872 gen_op_addl_A0_seg(R_SS);
2873 }
2874 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2875
2876 if (s->ss32 && !s->addseg)
2877 gen_op_mov_reg_A0(1, R_ESP);
2878 else
2879 gen_stack_update(s, (-2) << s->dflag);
2880 }
2881}
2882
2883/* two step pop is necessary for precise exceptions */
2884static void gen_pop_T0(DisasContext *s)
2885{
2886#ifdef TARGET_X86_64
2887 if (CODE64(s)) {
2888 gen_op_movq_A0_reg(R_ESP);
2889 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2890 } else
2891#endif
2892 {
2893 gen_op_movl_A0_reg(R_ESP);
2894 if (s->ss32) {
2895 if (s->addseg)
2896 gen_op_addl_A0_seg(R_SS);
2897 } else {
2898 gen_op_andl_A0_ffff();
2899 gen_op_addl_A0_seg(R_SS);
2900 }
2901 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2902 }
2903}
2904
2905static void gen_pop_update(DisasContext *s)
2906{
2907#ifdef TARGET_X86_64
2908 if (CODE64(s) && s->dflag) {
2909 gen_stack_update(s, 8);
2910 } else
2911#endif
2912 {
2913 gen_stack_update(s, 2 << s->dflag);
2914 }
2915}
2916
2917static void gen_stack_A0(DisasContext *s)
2918{
2919 gen_op_movl_A0_reg(R_ESP);
2920 if (!s->ss32)
2921 gen_op_andl_A0_ffff();
2922 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2923 if (s->addseg)
2924 gen_op_addl_A0_seg(R_SS);
2925}
2926
2927/* NOTE: wrap around in 16 bit not fully handled */
2928static void gen_pusha(DisasContext *s)
2929{
2930 int i;
2931 gen_op_movl_A0_reg(R_ESP);
2932 gen_op_addl_A0_im(-16 << s->dflag);
2933 if (!s->ss32)
2934 gen_op_andl_A0_ffff();
2935 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2936 if (s->addseg)
2937 gen_op_addl_A0_seg(R_SS);
2938 for(i = 0;i < 8; i++) {
2939 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2940 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2941 gen_op_addl_A0_im(2 << s->dflag);
2942 }
2943 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2944}
2945
2946/* NOTE: wrap around in 16 bit not fully handled */
2947static void gen_popa(DisasContext *s)
2948{
2949 int i;
2950 gen_op_movl_A0_reg(R_ESP);
2951 if (!s->ss32)
2952 gen_op_andl_A0_ffff();
2953 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2954 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2955 if (s->addseg)
2956 gen_op_addl_A0_seg(R_SS);
2957 for(i = 0;i < 8; i++) {
2958 /* ESP is not reloaded */
2959 if (i != 3) {
2960 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2961 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2962 }
2963 gen_op_addl_A0_im(2 << s->dflag);
2964 }
2965 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2966}
2967
2968static void gen_enter(DisasContext *s, int esp_addend, int level)
2969{
2970 int ot, opsize;
2971
2972 level &= 0x1f;
2973#ifdef TARGET_X86_64
2974 if (CODE64(s)) {
2975 ot = s->dflag ? OT_QUAD : OT_WORD;
2976 opsize = 1 << ot;
2977
2978 gen_op_movl_A0_reg(R_ESP);
2979 gen_op_addq_A0_im(-opsize);
2980 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2981
2982 /* push bp */
2983 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2984 gen_op_st_T0_A0(ot + s->mem_index);
2985 if (level) {
2986 /* XXX: must save state */
2987 tcg_gen_helper_0_3(helper_enter64_level,
2988 tcg_const_i32(level),
2989 tcg_const_i32((ot == OT_QUAD)),
2990 cpu_T[1]);
2991 }
2992 gen_op_mov_reg_T1(ot, R_EBP);
2993 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2994 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2995 } else
2996#endif
2997 {
2998 ot = s->dflag + OT_WORD;
2999 opsize = 2 << s->dflag;
3000
3001 gen_op_movl_A0_reg(R_ESP);
3002 gen_op_addl_A0_im(-opsize);
3003 if (!s->ss32)
3004 gen_op_andl_A0_ffff();
3005 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3006 if (s->addseg)
3007 gen_op_addl_A0_seg(R_SS);
3008 /* push bp */
3009 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3010 gen_op_st_T0_A0(ot + s->mem_index);
3011 if (level) {
3012 /* XXX: must save state */
3013 tcg_gen_helper_0_3(helper_enter_level,
3014 tcg_const_i32(level),
3015 tcg_const_i32(s->dflag),
3016 cpu_T[1]);
3017 }
3018 gen_op_mov_reg_T1(ot, R_EBP);
3019 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3020 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3021 }
3022}
3023
3024static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3025{
3026 if (s->cc_op != CC_OP_DYNAMIC)
3027 gen_op_set_cc_op(s->cc_op);
3028 gen_jmp_im(cur_eip);
3029 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3030 s->is_jmp = 3;
3031}
3032
3033/* an interrupt is different from an exception because of the
3034 privilege checks */
3035static void gen_interrupt(DisasContext *s, int intno,
3036 target_ulong cur_eip, target_ulong next_eip)
3037{
3038 if (s->cc_op != CC_OP_DYNAMIC)
3039 gen_op_set_cc_op(s->cc_op);
3040 gen_jmp_im(cur_eip);
3041 tcg_gen_helper_0_2(helper_raise_interrupt,
3042 tcg_const_i32(intno),
3043 tcg_const_i32(next_eip - cur_eip));
3044 s->is_jmp = 3;
3045}
3046
3047static void gen_debug(DisasContext *s, target_ulong cur_eip)
3048{
3049 if (s->cc_op != CC_OP_DYNAMIC)
3050 gen_op_set_cc_op(s->cc_op);
3051 gen_jmp_im(cur_eip);
3052 tcg_gen_helper_0_0(helper_debug);
3053 s->is_jmp = 3;
3054}
3055
3056/* generate a generic end of block. Trace exception is also generated
3057 if needed */
3058static void gen_eob(DisasContext *s)
3059{
3060 if (s->cc_op != CC_OP_DYNAMIC)
3061 gen_op_set_cc_op(s->cc_op);
3062 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3063 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3064 }
3065 if (s->singlestep_enabled) {
3066 tcg_gen_helper_0_0(helper_debug);
3067 } else if (s->tf) {
3068 tcg_gen_helper_0_0(helper_single_step);
3069 } else {
3070 tcg_gen_exit_tb(0);
3071 }
3072 s->is_jmp = 3;
3073}
3074
3075/* generate a jump to eip. No segment change must happen before as a
3076 direct call to the next block may occur */
3077static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3078{
3079 if (s->jmp_opt) {
3080#ifdef VBOX
3081 gen_check_external_event(s);
3082#endif /* VBOX */
3083 if (s->cc_op != CC_OP_DYNAMIC) {
3084 gen_op_set_cc_op(s->cc_op);
3085 s->cc_op = CC_OP_DYNAMIC;
3086 }
3087 gen_goto_tb(s, tb_num, eip);
3088 s->is_jmp = 3;
3089 } else {
3090 gen_jmp_im(eip);
3091 gen_eob(s);
3092 }
3093}
3094
3095static void gen_jmp(DisasContext *s, target_ulong eip)
3096{
3097 gen_jmp_tb(s, eip, 0);
3098}
3099
3100#ifndef VBOX
3101static inline void gen_ldq_env_A0(int idx, int offset)
3102#else /* VBOX */
3103DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3104#endif /* VBOX */
3105{
3106 int mem_index = (idx >> 2) - 1;
3107 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3108 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3109}
3110
3111#ifndef VBOX
3112static inline void gen_stq_env_A0(int idx, int offset)
3113#else /* VBOX */
3114DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3115#endif /* VBOX */
3116{
3117 int mem_index = (idx >> 2) - 1;
3118 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3119 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3120}
3121
3122#ifndef VBOX
3123static inline void gen_ldo_env_A0(int idx, int offset)
3124#else /* VBOX */
3125DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3126#endif /* VBOX */
3127{
3128 int mem_index = (idx >> 2) - 1;
3129 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3130 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3131 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3132 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3133 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3134}
3135
3136#ifndef VBOX
3137static inline void gen_sto_env_A0(int idx, int offset)
3138#else /* VBOX */
3139DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3140#endif /* VBOX */
3141{
3142 int mem_index = (idx >> 2) - 1;
3143 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3144 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3145 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3146 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3147 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3148}
3149
3150#ifndef VBOX
3151static inline void gen_op_movo(int d_offset, int s_offset)
3152#else /* VBOX */
3153DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3154#endif /* VBOX */
3155{
3156 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3157 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3158 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3159 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3160}
3161
3162#ifndef VBOX
3163static inline void gen_op_movq(int d_offset, int s_offset)
3164#else /* VBOX */
3165DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3166#endif /* VBOX */
3167{
3168 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3169 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3170}
3171
3172#ifndef VBOX
3173static inline void gen_op_movl(int d_offset, int s_offset)
3174#else /* VBOX */
3175DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3176#endif /* VBOX */
3177{
3178 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3179 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3180}
3181
3182#ifndef VBOX
3183static inline void gen_op_movq_env_0(int d_offset)
3184#else /* VBOX */
3185DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3186#endif /* VBOX */
3187{
3188 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3189 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3190}
3191
3192#define SSE_SPECIAL ((void *)1)
3193#define SSE_DUMMY ((void *)2)
3194
3195#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3196#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3197 helper_ ## x ## ss, helper_ ## x ## sd, }
3198
3199static void *sse_op_table1[256][4] = {
3200 /* 3DNow! extensions */
3201 [0x0e] = { SSE_DUMMY }, /* femms */
3202 [0x0f] = { SSE_DUMMY }, /* pf... */
3203 /* pure SSE operations */
3204 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3205 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3206 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3207 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3208 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3209 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3210 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3211 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3212
3213 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3214 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3215 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3216 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3217 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3218 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3219 [0x2e] = { helper_ucomiss, helper_ucomisd },
3220 [0x2f] = { helper_comiss, helper_comisd },
3221 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3222 [0x51] = SSE_FOP(sqrt),
3223 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3224 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3225 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3226 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3227 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3228 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3229 [0x58] = SSE_FOP(add),
3230 [0x59] = SSE_FOP(mul),
3231 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3232 helper_cvtss2sd, helper_cvtsd2ss },
3233 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3234 [0x5c] = SSE_FOP(sub),
3235 [0x5d] = SSE_FOP(min),
3236 [0x5e] = SSE_FOP(div),
3237 [0x5f] = SSE_FOP(max),
3238
3239 [0xc2] = SSE_FOP(cmpeq),
3240 [0xc6] = { helper_shufps, helper_shufpd },
3241
3242 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3243 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3244
3245 /* MMX ops and their SSE extensions */
3246 [0x60] = MMX_OP2(punpcklbw),
3247 [0x61] = MMX_OP2(punpcklwd),
3248 [0x62] = MMX_OP2(punpckldq),
3249 [0x63] = MMX_OP2(packsswb),
3250 [0x64] = MMX_OP2(pcmpgtb),
3251 [0x65] = MMX_OP2(pcmpgtw),
3252 [0x66] = MMX_OP2(pcmpgtl),
3253 [0x67] = MMX_OP2(packuswb),
3254 [0x68] = MMX_OP2(punpckhbw),
3255 [0x69] = MMX_OP2(punpckhwd),
3256 [0x6a] = MMX_OP2(punpckhdq),
3257 [0x6b] = MMX_OP2(packssdw),
3258 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3259 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3260 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3261 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3262 [0x70] = { helper_pshufw_mmx,
3263 helper_pshufd_xmm,
3264 helper_pshufhw_xmm,
3265 helper_pshuflw_xmm },
3266 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3267 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3268 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3269 [0x74] = MMX_OP2(pcmpeqb),
3270 [0x75] = MMX_OP2(pcmpeqw),
3271 [0x76] = MMX_OP2(pcmpeql),
3272 [0x77] = { SSE_DUMMY }, /* emms */
3273 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3274 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3275 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3276 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3277 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3278 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3279 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3280 [0xd1] = MMX_OP2(psrlw),
3281 [0xd2] = MMX_OP2(psrld),
3282 [0xd3] = MMX_OP2(psrlq),
3283 [0xd4] = MMX_OP2(paddq),
3284 [0xd5] = MMX_OP2(pmullw),
3285 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3286 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3287 [0xd8] = MMX_OP2(psubusb),
3288 [0xd9] = MMX_OP2(psubusw),
3289 [0xda] = MMX_OP2(pminub),
3290 [0xdb] = MMX_OP2(pand),
3291 [0xdc] = MMX_OP2(paddusb),
3292 [0xdd] = MMX_OP2(paddusw),
3293 [0xde] = MMX_OP2(pmaxub),
3294 [0xdf] = MMX_OP2(pandn),
3295 [0xe0] = MMX_OP2(pavgb),
3296 [0xe1] = MMX_OP2(psraw),
3297 [0xe2] = MMX_OP2(psrad),
3298 [0xe3] = MMX_OP2(pavgw),
3299 [0xe4] = MMX_OP2(pmulhuw),
3300 [0xe5] = MMX_OP2(pmulhw),
3301 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3302 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3303 [0xe8] = MMX_OP2(psubsb),
3304 [0xe9] = MMX_OP2(psubsw),
3305 [0xea] = MMX_OP2(pminsw),
3306 [0xeb] = MMX_OP2(por),
3307 [0xec] = MMX_OP2(paddsb),
3308 [0xed] = MMX_OP2(paddsw),
3309 [0xee] = MMX_OP2(pmaxsw),
3310 [0xef] = MMX_OP2(pxor),
3311 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3312 [0xf1] = MMX_OP2(psllw),
3313 [0xf2] = MMX_OP2(pslld),
3314 [0xf3] = MMX_OP2(psllq),
3315 [0xf4] = MMX_OP2(pmuludq),
3316 [0xf5] = MMX_OP2(pmaddwd),
3317 [0xf6] = MMX_OP2(psadbw),
3318 [0xf7] = MMX_OP2(maskmov),
3319 [0xf8] = MMX_OP2(psubb),
3320 [0xf9] = MMX_OP2(psubw),
3321 [0xfa] = MMX_OP2(psubl),
3322 [0xfb] = MMX_OP2(psubq),
3323 [0xfc] = MMX_OP2(paddb),
3324 [0xfd] = MMX_OP2(paddw),
3325 [0xfe] = MMX_OP2(paddl),
3326};
3327
3328static void *sse_op_table2[3 * 8][2] = {
3329 [0 + 2] = MMX_OP2(psrlw),
3330 [0 + 4] = MMX_OP2(psraw),
3331 [0 + 6] = MMX_OP2(psllw),
3332 [8 + 2] = MMX_OP2(psrld),
3333 [8 + 4] = MMX_OP2(psrad),
3334 [8 + 6] = MMX_OP2(pslld),
3335 [16 + 2] = MMX_OP2(psrlq),
3336 [16 + 3] = { NULL, helper_psrldq_xmm },
3337 [16 + 6] = MMX_OP2(psllq),
3338 [16 + 7] = { NULL, helper_pslldq_xmm },
3339};
3340
3341static void *sse_op_table3[4 * 3] = {
3342 helper_cvtsi2ss,
3343 helper_cvtsi2sd,
3344 X86_64_ONLY(helper_cvtsq2ss),
3345 X86_64_ONLY(helper_cvtsq2sd),
3346
3347 helper_cvttss2si,
3348 helper_cvttsd2si,
3349 X86_64_ONLY(helper_cvttss2sq),
3350 X86_64_ONLY(helper_cvttsd2sq),
3351
3352 helper_cvtss2si,
3353 helper_cvtsd2si,
3354 X86_64_ONLY(helper_cvtss2sq),
3355 X86_64_ONLY(helper_cvtsd2sq),
3356};
3357
3358static void *sse_op_table4[8][4] = {
3359 SSE_FOP(cmpeq),
3360 SSE_FOP(cmplt),
3361 SSE_FOP(cmple),
3362 SSE_FOP(cmpunord),
3363 SSE_FOP(cmpneq),
3364 SSE_FOP(cmpnlt),
3365 SSE_FOP(cmpnle),
3366 SSE_FOP(cmpord),
3367};
3368
3369static void *sse_op_table5[256] = {
3370 [0x0c] = helper_pi2fw,
3371 [0x0d] = helper_pi2fd,
3372 [0x1c] = helper_pf2iw,
3373 [0x1d] = helper_pf2id,
3374 [0x8a] = helper_pfnacc,
3375 [0x8e] = helper_pfpnacc,
3376 [0x90] = helper_pfcmpge,
3377 [0x94] = helper_pfmin,
3378 [0x96] = helper_pfrcp,
3379 [0x97] = helper_pfrsqrt,
3380 [0x9a] = helper_pfsub,
3381 [0x9e] = helper_pfadd,
3382 [0xa0] = helper_pfcmpgt,
3383 [0xa4] = helper_pfmax,
3384 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3385 [0xa7] = helper_movq, /* pfrsqit1 */
3386 [0xaa] = helper_pfsubr,
3387 [0xae] = helper_pfacc,
3388 [0xb0] = helper_pfcmpeq,
3389 [0xb4] = helper_pfmul,
3390 [0xb6] = helper_movq, /* pfrcpit2 */
3391 [0xb7] = helper_pmulhrw_mmx,
3392 [0xbb] = helper_pswapd,
3393 [0xbf] = helper_pavgb_mmx /* pavgusb */
3394};
3395
3396struct sse_op_helper_s {
3397 void *op[2]; uint32_t ext_mask;
3398};
3399#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3400#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3401#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3402#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3403static struct sse_op_helper_s sse_op_table6[256] = {
3404 [0x00] = SSSE3_OP(pshufb),
3405 [0x01] = SSSE3_OP(phaddw),
3406 [0x02] = SSSE3_OP(phaddd),
3407 [0x03] = SSSE3_OP(phaddsw),
3408 [0x04] = SSSE3_OP(pmaddubsw),
3409 [0x05] = SSSE3_OP(phsubw),
3410 [0x06] = SSSE3_OP(phsubd),
3411 [0x07] = SSSE3_OP(phsubsw),
3412 [0x08] = SSSE3_OP(psignb),
3413 [0x09] = SSSE3_OP(psignw),
3414 [0x0a] = SSSE3_OP(psignd),
3415 [0x0b] = SSSE3_OP(pmulhrsw),
3416 [0x10] = SSE41_OP(pblendvb),
3417 [0x14] = SSE41_OP(blendvps),
3418 [0x15] = SSE41_OP(blendvpd),
3419 [0x17] = SSE41_OP(ptest),
3420 [0x1c] = SSSE3_OP(pabsb),
3421 [0x1d] = SSSE3_OP(pabsw),
3422 [0x1e] = SSSE3_OP(pabsd),
3423 [0x20] = SSE41_OP(pmovsxbw),
3424 [0x21] = SSE41_OP(pmovsxbd),
3425 [0x22] = SSE41_OP(pmovsxbq),
3426 [0x23] = SSE41_OP(pmovsxwd),
3427 [0x24] = SSE41_OP(pmovsxwq),
3428 [0x25] = SSE41_OP(pmovsxdq),
3429 [0x28] = SSE41_OP(pmuldq),
3430 [0x29] = SSE41_OP(pcmpeqq),
3431 [0x2a] = SSE41_SPECIAL, /* movntqda */
3432 [0x2b] = SSE41_OP(packusdw),
3433 [0x30] = SSE41_OP(pmovzxbw),
3434 [0x31] = SSE41_OP(pmovzxbd),
3435 [0x32] = SSE41_OP(pmovzxbq),
3436 [0x33] = SSE41_OP(pmovzxwd),
3437 [0x34] = SSE41_OP(pmovzxwq),
3438 [0x35] = SSE41_OP(pmovzxdq),
3439 [0x37] = SSE42_OP(pcmpgtq),
3440 [0x38] = SSE41_OP(pminsb),
3441 [0x39] = SSE41_OP(pminsd),
3442 [0x3a] = SSE41_OP(pminuw),
3443 [0x3b] = SSE41_OP(pminud),
3444 [0x3c] = SSE41_OP(pmaxsb),
3445 [0x3d] = SSE41_OP(pmaxsd),
3446 [0x3e] = SSE41_OP(pmaxuw),
3447 [0x3f] = SSE41_OP(pmaxud),
3448 [0x40] = SSE41_OP(pmulld),
3449 [0x41] = SSE41_OP(phminposuw),
3450};
3451
3452static struct sse_op_helper_s sse_op_table7[256] = {
3453 [0x08] = SSE41_OP(roundps),
3454 [0x09] = SSE41_OP(roundpd),
3455 [0x0a] = SSE41_OP(roundss),
3456 [0x0b] = SSE41_OP(roundsd),
3457 [0x0c] = SSE41_OP(blendps),
3458 [0x0d] = SSE41_OP(blendpd),
3459 [0x0e] = SSE41_OP(pblendw),
3460 [0x0f] = SSSE3_OP(palignr),
3461 [0x14] = SSE41_SPECIAL, /* pextrb */
3462 [0x15] = SSE41_SPECIAL, /* pextrw */
3463 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3464 [0x17] = SSE41_SPECIAL, /* extractps */
3465 [0x20] = SSE41_SPECIAL, /* pinsrb */
3466 [0x21] = SSE41_SPECIAL, /* insertps */
3467 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3468 [0x40] = SSE41_OP(dpps),
3469 [0x41] = SSE41_OP(dppd),
3470 [0x42] = SSE41_OP(mpsadbw),
3471 [0x60] = SSE42_OP(pcmpestrm),
3472 [0x61] = SSE42_OP(pcmpestri),
3473 [0x62] = SSE42_OP(pcmpistrm),
3474 [0x63] = SSE42_OP(pcmpistri),
3475};
3476
3477static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3478{
3479 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3480 int modrm, mod, rm, reg, reg_addr, offset_addr;
3481 void *sse_op2;
3482
3483 b &= 0xff;
3484 if (s->prefix & PREFIX_DATA)
3485 b1 = 1;
3486 else if (s->prefix & PREFIX_REPZ)
3487 b1 = 2;
3488 else if (s->prefix & PREFIX_REPNZ)
3489 b1 = 3;
3490 else
3491 b1 = 0;
3492 sse_op2 = sse_op_table1[b][b1];
3493 if (!sse_op2)
3494 goto illegal_op;
3495 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3496 is_xmm = 1;
3497 } else {
3498 if (b1 == 0) {
3499 /* MMX case */
3500 is_xmm = 0;
3501 } else {
3502 is_xmm = 1;
3503 }
3504 }
3505 /* simple MMX/SSE operation */
3506 if (s->flags & HF_TS_MASK) {
3507 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3508 return;
3509 }
3510 if (s->flags & HF_EM_MASK) {
3511 illegal_op:
3512 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3513 return;
3514 }
3515 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3516 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3517 goto illegal_op;
3518 if (b == 0x0e) {
3519 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3520 goto illegal_op;
3521 /* femms */
3522 tcg_gen_helper_0_0(helper_emms);
3523 return;
3524 }
3525 if (b == 0x77) {
3526 /* emms */
3527 tcg_gen_helper_0_0(helper_emms);
3528 return;
3529 }
3530 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3531 the static cpu state) */
3532 if (!is_xmm) {
3533 tcg_gen_helper_0_0(helper_enter_mmx);
3534 }
3535
3536 modrm = ldub_code(s->pc++);
3537 reg = ((modrm >> 3) & 7);
3538 if (is_xmm)
3539 reg |= rex_r;
3540 mod = (modrm >> 6) & 3;
3541 if (sse_op2 == SSE_SPECIAL) {
3542 b |= (b1 << 8);
3543 switch(b) {
3544 case 0x0e7: /* movntq */
3545 if (mod == 3)
3546 goto illegal_op;
3547 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3548 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3549 break;
3550 case 0x1e7: /* movntdq */
3551 case 0x02b: /* movntps */
3552 case 0x12b: /* movntps */
3553 case 0x3f0: /* lddqu */
3554 if (mod == 3)
3555 goto illegal_op;
3556 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3557 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3558 break;
3559 case 0x6e: /* movd mm, ea */
3560#ifdef TARGET_X86_64
3561 if (s->dflag == 2) {
3562 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3563 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3564 } else
3565#endif
3566 {
3567 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3568 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3569 offsetof(CPUX86State,fpregs[reg].mmx));
3570 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3571 }
3572 break;
3573 case 0x16e: /* movd xmm, ea */
3574#ifdef TARGET_X86_64
3575 if (s->dflag == 2) {
3576 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3577 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3578 offsetof(CPUX86State,xmm_regs[reg]));
3579 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3580 } else
3581#endif
3582 {
3583 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3584 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3585 offsetof(CPUX86State,xmm_regs[reg]));
3586 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3587 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3588 }
3589 break;
3590 case 0x6f: /* movq mm, ea */
3591 if (mod != 3) {
3592 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3593 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3594 } else {
3595 rm = (modrm & 7);
3596 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3597 offsetof(CPUX86State,fpregs[rm].mmx));
3598 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3599 offsetof(CPUX86State,fpregs[reg].mmx));
3600 }
3601 break;
3602 case 0x010: /* movups */
3603 case 0x110: /* movupd */
3604 case 0x028: /* movaps */
3605 case 0x128: /* movapd */
3606 case 0x16f: /* movdqa xmm, ea */
3607 case 0x26f: /* movdqu xmm, ea */
3608 if (mod != 3) {
3609 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3610 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3611 } else {
3612 rm = (modrm & 7) | REX_B(s);
3613 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3614 offsetof(CPUX86State,xmm_regs[rm]));
3615 }
3616 break;
3617 case 0x210: /* movss xmm, ea */
3618 if (mod != 3) {
3619 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3620 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3621 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3622 gen_op_movl_T0_0();
3623 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3624 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3625 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3626 } else {
3627 rm = (modrm & 7) | REX_B(s);
3628 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3629 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3630 }
3631 break;
3632 case 0x310: /* movsd xmm, ea */
3633 if (mod != 3) {
3634 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3635 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3636 gen_op_movl_T0_0();
3637 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3638 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3639 } else {
3640 rm = (modrm & 7) | REX_B(s);
3641 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3642 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3643 }
3644 break;
3645 case 0x012: /* movlps */
3646 case 0x112: /* movlpd */
3647 if (mod != 3) {
3648 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3649 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3650 } else {
3651 /* movhlps */
3652 rm = (modrm & 7) | REX_B(s);
3653 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3654 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3655 }
3656 break;
3657 case 0x212: /* movsldup */
3658 if (mod != 3) {
3659 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3660 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3661 } else {
3662 rm = (modrm & 7) | REX_B(s);
3663 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3664 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3665 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3666 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3667 }
3668 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3669 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3670 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3671 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3672 break;
3673 case 0x312: /* movddup */
3674 if (mod != 3) {
3675 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3676 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3677 } else {
3678 rm = (modrm & 7) | REX_B(s);
3679 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3680 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3681 }
3682 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3683 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3684 break;
3685 case 0x016: /* movhps */
3686 case 0x116: /* movhpd */
3687 if (mod != 3) {
3688 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3689 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3690 } else {
3691 /* movlhps */
3692 rm = (modrm & 7) | REX_B(s);
3693 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3694 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3695 }
3696 break;
3697 case 0x216: /* movshdup */
3698 if (mod != 3) {
3699 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3700 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3701 } else {
3702 rm = (modrm & 7) | REX_B(s);
3703 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3704 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3705 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3706 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3707 }
3708 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3709 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3710 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3711 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3712 break;
3713 case 0x7e: /* movd ea, mm */
3714#ifdef TARGET_X86_64
3715 if (s->dflag == 2) {
3716 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3717 offsetof(CPUX86State,fpregs[reg].mmx));
3718 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3719 } else
3720#endif
3721 {
3722 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3723 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3724 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3725 }
3726 break;
3727 case 0x17e: /* movd ea, xmm */
3728#ifdef TARGET_X86_64
3729 if (s->dflag == 2) {
3730 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3731 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3732 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3733 } else
3734#endif
3735 {
3736 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3737 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3738 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3739 }
3740 break;
3741 case 0x27e: /* movq xmm, ea */
3742 if (mod != 3) {
3743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3744 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3745 } else {
3746 rm = (modrm & 7) | REX_B(s);
3747 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3748 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3749 }
3750 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3751 break;
3752 case 0x7f: /* movq ea, mm */
3753 if (mod != 3) {
3754 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3755 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3756 } else {
3757 rm = (modrm & 7);
3758 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3759 offsetof(CPUX86State,fpregs[reg].mmx));
3760 }
3761 break;
3762 case 0x011: /* movups */
3763 case 0x111: /* movupd */
3764 case 0x029: /* movaps */
3765 case 0x129: /* movapd */
3766 case 0x17f: /* movdqa ea, xmm */
3767 case 0x27f: /* movdqu ea, xmm */
3768 if (mod != 3) {
3769 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3770 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3771 } else {
3772 rm = (modrm & 7) | REX_B(s);
3773 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3774 offsetof(CPUX86State,xmm_regs[reg]));
3775 }
3776 break;
3777 case 0x211: /* movss ea, xmm */
3778 if (mod != 3) {
3779 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3780 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3781 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3782 } else {
3783 rm = (modrm & 7) | REX_B(s);
3784 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3785 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3786 }
3787 break;
3788 case 0x311: /* movsd ea, xmm */
3789 if (mod != 3) {
3790 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3791 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3792 } else {
3793 rm = (modrm & 7) | REX_B(s);
3794 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3795 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3796 }
3797 break;
3798 case 0x013: /* movlps */
3799 case 0x113: /* movlpd */
3800 if (mod != 3) {
3801 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3802 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3803 } else {
3804 goto illegal_op;
3805 }
3806 break;
3807 case 0x017: /* movhps */
3808 case 0x117: /* movhpd */
3809 if (mod != 3) {
3810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3811 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3812 } else {
3813 goto illegal_op;
3814 }
3815 break;
3816 case 0x71: /* shift mm, im */
3817 case 0x72:
3818 case 0x73:
3819 case 0x171: /* shift xmm, im */
3820 case 0x172:
3821 case 0x173:
3822 val = ldub_code(s->pc++);
3823 if (is_xmm) {
3824 gen_op_movl_T0_im(val);
3825 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3826 gen_op_movl_T0_0();
3827 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3828 op1_offset = offsetof(CPUX86State,xmm_t0);
3829 } else {
3830 gen_op_movl_T0_im(val);
3831 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3832 gen_op_movl_T0_0();
3833 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3834 op1_offset = offsetof(CPUX86State,mmx_t0);
3835 }
3836 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3837 if (!sse_op2)
3838 goto illegal_op;
3839 if (is_xmm) {
3840 rm = (modrm & 7) | REX_B(s);
3841 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3842 } else {
3843 rm = (modrm & 7);
3844 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3845 }
3846 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3847 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3848 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3849 break;
3850 case 0x050: /* movmskps */
3851 rm = (modrm & 7) | REX_B(s);
3852 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3853 offsetof(CPUX86State,xmm_regs[rm]));
3854 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3855 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3856 gen_op_mov_reg_T0(OT_LONG, reg);
3857 break;
3858 case 0x150: /* movmskpd */
3859 rm = (modrm & 7) | REX_B(s);
3860 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3861 offsetof(CPUX86State,xmm_regs[rm]));
3862 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3863 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3864 gen_op_mov_reg_T0(OT_LONG, reg);
3865 break;
3866 case 0x02a: /* cvtpi2ps */
3867 case 0x12a: /* cvtpi2pd */
3868 tcg_gen_helper_0_0(helper_enter_mmx);
3869 if (mod != 3) {
3870 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3871 op2_offset = offsetof(CPUX86State,mmx_t0);
3872 gen_ldq_env_A0(s->mem_index, op2_offset);
3873 } else {
3874 rm = (modrm & 7);
3875 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3876 }
3877 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3878 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3879 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3880 switch(b >> 8) {
3881 case 0x0:
3882 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3883 break;
3884 default:
3885 case 0x1:
3886 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3887 break;
3888 }
3889 break;
3890 case 0x22a: /* cvtsi2ss */
3891 case 0x32a: /* cvtsi2sd */
3892 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3893 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3894 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3895 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3896 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3897 if (ot == OT_LONG) {
3898 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3899 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3900 } else {
3901 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3902 }
3903 break;
3904 case 0x02c: /* cvttps2pi */
3905 case 0x12c: /* cvttpd2pi */
3906 case 0x02d: /* cvtps2pi */
3907 case 0x12d: /* cvtpd2pi */
3908 tcg_gen_helper_0_0(helper_enter_mmx);
3909 if (mod != 3) {
3910 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3911 op2_offset = offsetof(CPUX86State,xmm_t0);
3912 gen_ldo_env_A0(s->mem_index, op2_offset);
3913 } else {
3914 rm = (modrm & 7) | REX_B(s);
3915 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3916 }
3917 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3918 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3919 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3920 switch(b) {
3921 case 0x02c:
3922 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3923 break;
3924 case 0x12c:
3925 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3926 break;
3927 case 0x02d:
3928 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3929 break;
3930 case 0x12d:
3931 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3932 break;
3933 }
3934 break;
3935 case 0x22c: /* cvttss2si */
3936 case 0x32c: /* cvttsd2si */
3937 case 0x22d: /* cvtss2si */
3938 case 0x32d: /* cvtsd2si */
3939 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3940 if (mod != 3) {
3941 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3942 if ((b >> 8) & 1) {
3943 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3944 } else {
3945 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3946 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3947 }
3948 op2_offset = offsetof(CPUX86State,xmm_t0);
3949 } else {
3950 rm = (modrm & 7) | REX_B(s);
3951 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3952 }
3953 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3954 (b & 1) * 4];
3955 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3956 if (ot == OT_LONG) {
3957 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3958 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3959 } else {
3960 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3961 }
3962 gen_op_mov_reg_T0(ot, reg);
3963 break;
3964 case 0xc4: /* pinsrw */
3965 case 0x1c4:
3966 s->rip_offset = 1;
3967 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3968 val = ldub_code(s->pc++);
3969 if (b1) {
3970 val &= 7;
3971 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3972 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3973 } else {
3974 val &= 3;
3975 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3976 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3977 }
3978 break;
3979 case 0xc5: /* pextrw */
3980 case 0x1c5:
3981 if (mod != 3)
3982 goto illegal_op;
3983 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3984 val = ldub_code(s->pc++);
3985 if (b1) {
3986 val &= 7;
3987 rm = (modrm & 7) | REX_B(s);
3988 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3989 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3990 } else {
3991 val &= 3;
3992 rm = (modrm & 7);
3993 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3994 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3995 }
3996 reg = ((modrm >> 3) & 7) | rex_r;
3997 gen_op_mov_reg_T0(ot, reg);
3998 break;
3999 case 0x1d6: /* movq ea, xmm */
4000 if (mod != 3) {
4001 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4002 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4003 } else {
4004 rm = (modrm & 7) | REX_B(s);
4005 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4006 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4007 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4008 }
4009 break;
4010 case 0x2d6: /* movq2dq */
4011 tcg_gen_helper_0_0(helper_enter_mmx);
4012 rm = (modrm & 7);
4013 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4014 offsetof(CPUX86State,fpregs[rm].mmx));
4015 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4016 break;
4017 case 0x3d6: /* movdq2q */
4018 tcg_gen_helper_0_0(helper_enter_mmx);
4019 rm = (modrm & 7) | REX_B(s);
4020 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4021 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4022 break;
4023 case 0xd7: /* pmovmskb */
4024 case 0x1d7:
4025 if (mod != 3)
4026 goto illegal_op;
4027 if (b1) {
4028 rm = (modrm & 7) | REX_B(s);
4029 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4030 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4031 } else {
4032 rm = (modrm & 7);
4033 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4034 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4035 }
4036 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4037 reg = ((modrm >> 3) & 7) | rex_r;
4038 gen_op_mov_reg_T0(OT_LONG, reg);
4039 break;
4040 case 0x138:
4041 if (s->prefix & PREFIX_REPNZ)
4042 goto crc32;
4043 case 0x038:
4044 b = modrm;
4045 modrm = ldub_code(s->pc++);
4046 rm = modrm & 7;
4047 reg = ((modrm >> 3) & 7) | rex_r;
4048 mod = (modrm >> 6) & 3;
4049
4050 sse_op2 = sse_op_table6[b].op[b1];
4051 if (!sse_op2)
4052 goto illegal_op;
4053 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4054 goto illegal_op;
4055
4056 if (b1) {
4057 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4058 if (mod == 3) {
4059 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4060 } else {
4061 op2_offset = offsetof(CPUX86State,xmm_t0);
4062 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4063 switch (b) {
4064 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4065 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4066 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4067 gen_ldq_env_A0(s->mem_index, op2_offset +
4068 offsetof(XMMReg, XMM_Q(0)));
4069 break;
4070 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4071 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4072 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4073 (s->mem_index >> 2) - 1);
4074 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4075 offsetof(XMMReg, XMM_L(0)));
4076 break;
4077 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4078 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4079 (s->mem_index >> 2) - 1);
4080 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4081 offsetof(XMMReg, XMM_W(0)));
4082 break;
4083 case 0x2a: /* movntqda */
4084 gen_ldo_env_A0(s->mem_index, op1_offset);
4085 return;
4086 default:
4087 gen_ldo_env_A0(s->mem_index, op2_offset);
4088 }
4089 }
4090 } else {
4091 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4092 if (mod == 3) {
4093 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4094 } else {
4095 op2_offset = offsetof(CPUX86State,mmx_t0);
4096 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4097 gen_ldq_env_A0(s->mem_index, op2_offset);
4098 }
4099 }
4100 if (sse_op2 == SSE_SPECIAL)
4101 goto illegal_op;
4102
4103 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4104 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4105 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4106
4107 if (b == 0x17)
4108 s->cc_op = CC_OP_EFLAGS;
4109 break;
4110 case 0x338: /* crc32 */
4111 crc32:
4112 b = modrm;
4113 modrm = ldub_code(s->pc++);
4114 reg = ((modrm >> 3) & 7) | rex_r;
4115
4116 if (b != 0xf0 && b != 0xf1)
4117 goto illegal_op;
4118 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4119 goto illegal_op;
4120
4121 if (b == 0xf0)
4122 ot = OT_BYTE;
4123 else if (b == 0xf1 && s->dflag != 2)
4124 if (s->prefix & PREFIX_DATA)
4125 ot = OT_WORD;
4126 else
4127 ot = OT_LONG;
4128 else
4129 ot = OT_QUAD;
4130
4131 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4132 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4133 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4134 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4135 cpu_T[0], tcg_const_i32(8 << ot));
4136
4137 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4138 gen_op_mov_reg_T0(ot, reg);
4139 break;
4140 case 0x03a:
4141 case 0x13a:
4142 b = modrm;
4143 modrm = ldub_code(s->pc++);
4144 rm = modrm & 7;
4145 reg = ((modrm >> 3) & 7) | rex_r;
4146 mod = (modrm >> 6) & 3;
4147
4148 sse_op2 = sse_op_table7[b].op[b1];
4149 if (!sse_op2)
4150 goto illegal_op;
4151 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4152 goto illegal_op;
4153
4154 if (sse_op2 == SSE_SPECIAL) {
4155 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4156 rm = (modrm & 7) | REX_B(s);
4157 if (mod != 3)
4158 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4159 reg = ((modrm >> 3) & 7) | rex_r;
4160 val = ldub_code(s->pc++);
4161 switch (b) {
4162 case 0x14: /* pextrb */
4163 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4164 xmm_regs[reg].XMM_B(val & 15)));
4165 if (mod == 3)
4166 gen_op_mov_reg_T0(ot, rm);
4167 else
4168 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4169 (s->mem_index >> 2) - 1);
4170 break;
4171 case 0x15: /* pextrw */
4172 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4173 xmm_regs[reg].XMM_W(val & 7)));
4174 if (mod == 3)
4175 gen_op_mov_reg_T0(ot, rm);
4176 else
4177 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4178 (s->mem_index >> 2) - 1);
4179 break;
4180 case 0x16:
4181 if (ot == OT_LONG) { /* pextrd */
4182 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4183 offsetof(CPUX86State,
4184 xmm_regs[reg].XMM_L(val & 3)));
4185 if (mod == 3)
4186 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4187 else
4188 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4189 (s->mem_index >> 2) - 1);
4190 } else { /* pextrq */
4191 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4192 offsetof(CPUX86State,
4193 xmm_regs[reg].XMM_Q(val & 1)));
4194 if (mod == 3)
4195 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4196 else
4197 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4198 (s->mem_index >> 2) - 1);
4199 }
4200 break;
4201 case 0x17: /* extractps */
4202 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4203 xmm_regs[reg].XMM_L(val & 3)));
4204 if (mod == 3)
4205 gen_op_mov_reg_T0(ot, rm);
4206 else
4207 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4208 (s->mem_index >> 2) - 1);
4209 break;
4210 case 0x20: /* pinsrb */
4211 if (mod == 3)
4212 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4213 else
4214 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4215 (s->mem_index >> 2) - 1);
4216 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4217 xmm_regs[reg].XMM_B(val & 15)));
4218 break;
4219 case 0x21: /* insertps */
4220 if (mod == 3)
4221 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4222 offsetof(CPUX86State,xmm_regs[rm]
4223 .XMM_L((val >> 6) & 3)));
4224 else
4225 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4226 (s->mem_index >> 2) - 1);
4227 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4228 offsetof(CPUX86State,xmm_regs[reg]
4229 .XMM_L((val >> 4) & 3)));
4230 if ((val >> 0) & 1)
4231 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4232 cpu_env, offsetof(CPUX86State,
4233 xmm_regs[reg].XMM_L(0)));
4234 if ((val >> 1) & 1)
4235 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4236 cpu_env, offsetof(CPUX86State,
4237 xmm_regs[reg].XMM_L(1)));
4238 if ((val >> 2) & 1)
4239 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4240 cpu_env, offsetof(CPUX86State,
4241 xmm_regs[reg].XMM_L(2)));
4242 if ((val >> 3) & 1)
4243 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4244 cpu_env, offsetof(CPUX86State,
4245 xmm_regs[reg].XMM_L(3)));
4246 break;
4247 case 0x22:
4248 if (ot == OT_LONG) { /* pinsrd */
4249 if (mod == 3)
4250 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4251 else
4252 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4253 (s->mem_index >> 2) - 1);
4254 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4255 offsetof(CPUX86State,
4256 xmm_regs[reg].XMM_L(val & 3)));
4257 } else { /* pinsrq */
4258 if (mod == 3)
4259 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4260 else
4261 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4262 (s->mem_index >> 2) - 1);
4263 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4264 offsetof(CPUX86State,
4265 xmm_regs[reg].XMM_Q(val & 1)));
4266 }
4267 break;
4268 }
4269 return;
4270 }
4271
4272 if (b1) {
4273 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4274 if (mod == 3) {
4275 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4276 } else {
4277 op2_offset = offsetof(CPUX86State,xmm_t0);
4278 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4279 gen_ldo_env_A0(s->mem_index, op2_offset);
4280 }
4281 } else {
4282 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4283 if (mod == 3) {
4284 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4285 } else {
4286 op2_offset = offsetof(CPUX86State,mmx_t0);
4287 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4288 gen_ldq_env_A0(s->mem_index, op2_offset);
4289 }
4290 }
4291 val = ldub_code(s->pc++);
4292
4293 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4294 s->cc_op = CC_OP_EFLAGS;
4295
4296 if (s->dflag == 2)
4297 /* The helper must use entire 64-bit gp registers */
4298 val |= 1 << 8;
4299 }
4300
4301 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4302 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4303 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4304 break;
4305 default:
4306 goto illegal_op;
4307 }
4308 } else {
4309 /* generic MMX or SSE operation */
4310 switch(b) {
4311 case 0x70: /* pshufx insn */
4312 case 0xc6: /* pshufx insn */
4313 case 0xc2: /* compare insns */
4314 s->rip_offset = 1;
4315 break;
4316 default:
4317 break;
4318 }
4319 if (is_xmm) {
4320 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4321 if (mod != 3) {
4322 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4323 op2_offset = offsetof(CPUX86State,xmm_t0);
4324 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4325 b == 0xc2)) {
4326 /* specific case for SSE single instructions */
4327 if (b1 == 2) {
4328 /* 32 bit access */
4329 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4330 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4331 } else {
4332 /* 64 bit access */
4333 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4334 }
4335 } else {
4336 gen_ldo_env_A0(s->mem_index, op2_offset);
4337 }
4338 } else {
4339 rm = (modrm & 7) | REX_B(s);
4340 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4341 }
4342 } else {
4343 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4344 if (mod != 3) {
4345 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4346 op2_offset = offsetof(CPUX86State,mmx_t0);
4347 gen_ldq_env_A0(s->mem_index, op2_offset);
4348 } else {
4349 rm = (modrm & 7);
4350 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4351 }
4352 }
4353 switch(b) {
4354 case 0x0f: /* 3DNow! data insns */
4355 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4356 goto illegal_op;
4357 val = ldub_code(s->pc++);
4358 sse_op2 = sse_op_table5[val];
4359 if (!sse_op2)
4360 goto illegal_op;
4361 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4362 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4363 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4364 break;
4365 case 0x70: /* pshufx insn */
4366 case 0xc6: /* pshufx insn */
4367 val = ldub_code(s->pc++);
4368 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4369 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4370 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4371 break;
4372 case 0xc2:
4373 /* compare insns */
4374 val = ldub_code(s->pc++);
4375 if (val >= 8)
4376 goto illegal_op;
4377 sse_op2 = sse_op_table4[val][b1];
4378 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4379 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4380 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4381 break;
4382 case 0xf7:
4383 /* maskmov : we must prepare A0 */
4384 if (mod != 3)
4385 goto illegal_op;
4386#ifdef TARGET_X86_64
4387 if (s->aflag == 2) {
4388 gen_op_movq_A0_reg(R_EDI);
4389 } else
4390#endif
4391 {
4392 gen_op_movl_A0_reg(R_EDI);
4393 if (s->aflag == 0)
4394 gen_op_andl_A0_ffff();
4395 }
4396 gen_add_A0_ds_seg(s);
4397
4398 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4399 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4400 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4401 break;
4402 default:
4403 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4404 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4405 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4406 break;
4407 }
4408 if (b == 0x2e || b == 0x2f) {
4409 s->cc_op = CC_OP_EFLAGS;
4410 }
4411 }
4412}
4413
4414#ifdef VBOX
4415/* Checks if it's an invalid lock sequence. Only a few instructions
4416 can be used together with the lock prefix and of those only the
4417 form that write a memory operand. So, this is kind of annoying
4418 work to do...
4419 The AMD manual lists the following instructions.
4420 ADC
4421 ADD
4422 AND
4423 BTC
4424 BTR
4425 BTS
4426 CMPXCHG
4427 CMPXCHG8B
4428 CMPXCHG16B
4429 DEC
4430 INC
4431 NEG
4432 NOT
4433 OR
4434 SBB
4435 SUB
4436 XADD
4437 XCHG
4438 XOR */
4439static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4440{
4441 target_ulong pc = s->pc;
4442 int modrm, mod, op;
4443
4444 /* X={8,16,32,64} Y={16,32,64} */
4445 switch (b)
4446 {
4447 /* /2: ADC reg/memX, immX */
4448 /* /0: ADD reg/memX, immX */
4449 /* /4: AND reg/memX, immX */
4450 /* /1: OR reg/memX, immX */
4451 /* /3: SBB reg/memX, immX */
4452 /* /5: SUB reg/memX, immX */
4453 /* /6: XOR reg/memX, immX */
4454 case 0x80:
4455 case 0x81:
4456 case 0x83:
4457 modrm = ldub_code(pc++);
4458 op = (modrm >> 3) & 7;
4459 if (op == 7) /* /7: CMP */
4460 break;
4461 mod = (modrm >> 6) & 3;
4462 if (mod == 3) /* register destination */
4463 break;
4464 return false;
4465
4466 case 0x10: /* /r: ADC reg/mem8, reg8 */
4467 case 0x11: /* /r: ADC reg/memX, regY */
4468 case 0x00: /* /r: ADD reg/mem8, reg8 */
4469 case 0x01: /* /r: ADD reg/memX, regY */
4470 case 0x20: /* /r: AND reg/mem8, reg8 */
4471 case 0x21: /* /r: AND reg/memY, regY */
4472 case 0x08: /* /r: OR reg/mem8, reg8 */
4473 case 0x09: /* /r: OR reg/memY, regY */
4474 case 0x18: /* /r: SBB reg/mem8, reg8 */
4475 case 0x19: /* /r: SBB reg/memY, regY */
4476 case 0x28: /* /r: SUB reg/mem8, reg8 */
4477 case 0x29: /* /r: SUB reg/memY, regY */
4478 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4479 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4480 case 0x30: /* /r: XOR reg/mem8, reg8 */
4481 case 0x31: /* /r: XOR reg/memY, regY */
4482 modrm = ldub_code(pc++);
4483 mod = (modrm >> 6) & 3;
4484 if (mod == 3) /* register destination */
4485 break;
4486 return false;
4487
4488 /* /1: DEC reg/memX */
4489 /* /0: INC reg/memX */
4490 case 0xfe:
4491 case 0xff:
4492 modrm = ldub_code(pc++);
4493 mod = (modrm >> 6) & 3;
4494 if (mod == 3) /* register destination */
4495 break;
4496 return false;
4497
4498 /* /3: NEG reg/memX */
4499 /* /2: NOT reg/memX */
4500 case 0xf6:
4501 case 0xf7:
4502 modrm = ldub_code(pc++);
4503 mod = (modrm >> 6) & 3;
4504 if (mod == 3) /* register destination */
4505 break;
4506 return false;
4507
4508 case 0x0f:
4509 b = ldub_code(pc++);
4510 switch (b)
4511 {
4512 /* /7: BTC reg/memY, imm8 */
4513 /* /6: BTR reg/memY, imm8 */
4514 /* /5: BTS reg/memY, imm8 */
4515 case 0xba:
4516 modrm = ldub_code(pc++);
4517 op = (modrm >> 3) & 7;
4518 if (op < 5)
4519 break;
4520 mod = (modrm >> 6) & 3;
4521 if (mod == 3) /* register destination */
4522 break;
4523 return false;
4524
4525 case 0xbb: /* /r: BTC reg/memY, regY */
4526 case 0xb3: /* /r: BTR reg/memY, regY */
4527 case 0xab: /* /r: BTS reg/memY, regY */
4528 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4529 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4530 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4531 case 0xc1: /* /r: XADD reg/memY, regY */
4532 modrm = ldub_code(pc++);
4533 mod = (modrm >> 6) & 3;
4534 if (mod == 3) /* register destination */
4535 break;
4536 return false;
4537
4538 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4539 case 0xc7:
4540 modrm = ldub_code(pc++);
4541 op = (modrm >> 3) & 7;
4542 if (op != 1)
4543 break;
4544 return false;
4545 }
4546 break;
4547 }
4548
4549 /* illegal sequence. The s->pc is past the lock prefix and that
4550 is sufficient for the TB, I think. */
4551 Log(("illegal lock sequence %VGv (b=%#x)\n", pc_start, b));
4552 return true;
4553}
4554#endif /* VBOX */
4555
4556
4557/* convert one instruction. s->is_jmp is set if the translation must
4558 be stopped. Return the next pc value */
4559static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4560{
4561 int b, prefixes, aflag, dflag;
4562 int shift, ot;
4563 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4564 target_ulong next_eip, tval;
4565 int rex_w, rex_r;
4566
4567 if (unlikely(loglevel & CPU_LOG_TB_OP))
4568 tcg_gen_debug_insn_start(pc_start);
4569 s->pc = pc_start;
4570 prefixes = 0;
4571 aflag = s->code32;
4572 dflag = s->code32;
4573 s->override = -1;
4574 rex_w = -1;
4575 rex_r = 0;
4576#ifdef TARGET_X86_64
4577 s->rex_x = 0;
4578 s->rex_b = 0;
4579 x86_64_hregs = 0;
4580#endif
4581 s->rip_offset = 0; /* for relative ip address */
4582#ifdef VBOX
4583 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4584 gen_update_eip(pc_start - s->cs_base);
4585#endif
4586 next_byte:
4587 b = ldub_code(s->pc);
4588 s->pc++;
4589 /* check prefixes */
4590#ifdef TARGET_X86_64
4591 if (CODE64(s)) {
4592 switch (b) {
4593 case 0xf3:
4594 prefixes |= PREFIX_REPZ;
4595 goto next_byte;
4596 case 0xf2:
4597 prefixes |= PREFIX_REPNZ;
4598 goto next_byte;
4599 case 0xf0:
4600 prefixes |= PREFIX_LOCK;
4601 goto next_byte;
4602 case 0x2e:
4603 s->override = R_CS;
4604 goto next_byte;
4605 case 0x36:
4606 s->override = R_SS;
4607 goto next_byte;
4608 case 0x3e:
4609 s->override = R_DS;
4610 goto next_byte;
4611 case 0x26:
4612 s->override = R_ES;
4613 goto next_byte;
4614 case 0x64:
4615 s->override = R_FS;
4616 goto next_byte;
4617 case 0x65:
4618 s->override = R_GS;
4619 goto next_byte;
4620 case 0x66:
4621 prefixes |= PREFIX_DATA;
4622 goto next_byte;
4623 case 0x67:
4624 prefixes |= PREFIX_ADR;
4625 goto next_byte;
4626 case 0x40 ... 0x4f:
4627 /* REX prefix */
4628 rex_w = (b >> 3) & 1;
4629 rex_r = (b & 0x4) << 1;
4630 s->rex_x = (b & 0x2) << 2;
4631 REX_B(s) = (b & 0x1) << 3;
4632 x86_64_hregs = 1; /* select uniform byte register addressing */
4633 goto next_byte;
4634 }
4635 if (rex_w == 1) {
4636 /* 0x66 is ignored if rex.w is set */
4637 dflag = 2;
4638 } else {
4639 if (prefixes & PREFIX_DATA)
4640 dflag ^= 1;
4641 }
4642 if (!(prefixes & PREFIX_ADR))
4643 aflag = 2;
4644 } else
4645#endif
4646 {
4647 switch (b) {
4648 case 0xf3:
4649 prefixes |= PREFIX_REPZ;
4650 goto next_byte;
4651 case 0xf2:
4652 prefixes |= PREFIX_REPNZ;
4653 goto next_byte;
4654 case 0xf0:
4655 prefixes |= PREFIX_LOCK;
4656 goto next_byte;
4657 case 0x2e:
4658 s->override = R_CS;
4659 goto next_byte;
4660 case 0x36:
4661 s->override = R_SS;
4662 goto next_byte;
4663 case 0x3e:
4664 s->override = R_DS;
4665 goto next_byte;
4666 case 0x26:
4667 s->override = R_ES;
4668 goto next_byte;
4669 case 0x64:
4670 s->override = R_FS;
4671 goto next_byte;
4672 case 0x65:
4673 s->override = R_GS;
4674 goto next_byte;
4675 case 0x66:
4676 prefixes |= PREFIX_DATA;
4677 goto next_byte;
4678 case 0x67:
4679 prefixes |= PREFIX_ADR;
4680 goto next_byte;
4681 }
4682 if (prefixes & PREFIX_DATA)
4683 dflag ^= 1;
4684 if (prefixes & PREFIX_ADR)
4685 aflag ^= 1;
4686 }
4687
4688 s->prefix = prefixes;
4689 s->aflag = aflag;
4690 s->dflag = dflag;
4691
4692 /* lock generation */
4693#ifndef VBOX
4694 if (prefixes & PREFIX_LOCK)
4695 tcg_gen_helper_0_0(helper_lock);
4696#else /* VBOX */
4697 if (prefixes & PREFIX_LOCK) {
4698 if (is_invalid_lock_sequence(s, pc_start, b)) {
4699 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4700 return s->pc;
4701 }
4702 tcg_gen_helper_0_0(helper_lock);
4703 }
4704#endif /* VBOX */
4705
4706 /* now check op code */
4707 reswitch:
4708 switch(b) {
4709 case 0x0f:
4710 /**************************/
4711 /* extended op code */
4712 b = ldub_code(s->pc++) | 0x100;
4713 goto reswitch;
4714
4715 /**************************/
4716 /* arith & logic */
4717 case 0x00 ... 0x05:
4718 case 0x08 ... 0x0d:
4719 case 0x10 ... 0x15:
4720 case 0x18 ... 0x1d:
4721 case 0x20 ... 0x25:
4722 case 0x28 ... 0x2d:
4723 case 0x30 ... 0x35:
4724 case 0x38 ... 0x3d:
4725 {
4726 int op, f, val;
4727 op = (b >> 3) & 7;
4728 f = (b >> 1) & 3;
4729
4730 if ((b & 1) == 0)
4731 ot = OT_BYTE;
4732 else
4733 ot = dflag + OT_WORD;
4734
4735 switch(f) {
4736 case 0: /* OP Ev, Gv */
4737 modrm = ldub_code(s->pc++);
4738 reg = ((modrm >> 3) & 7) | rex_r;
4739 mod = (modrm >> 6) & 3;
4740 rm = (modrm & 7) | REX_B(s);
4741 if (mod != 3) {
4742 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4743 opreg = OR_TMP0;
4744 } else if (op == OP_XORL && rm == reg) {
4745 xor_zero:
4746 /* xor reg, reg optimisation */
4747 gen_op_movl_T0_0();
4748 s->cc_op = CC_OP_LOGICB + ot;
4749 gen_op_mov_reg_T0(ot, reg);
4750 gen_op_update1_cc();
4751 break;
4752 } else {
4753 opreg = rm;
4754 }
4755 gen_op_mov_TN_reg(ot, 1, reg);
4756 gen_op(s, op, ot, opreg);
4757 break;
4758 case 1: /* OP Gv, Ev */
4759 modrm = ldub_code(s->pc++);
4760 mod = (modrm >> 6) & 3;
4761 reg = ((modrm >> 3) & 7) | rex_r;
4762 rm = (modrm & 7) | REX_B(s);
4763 if (mod != 3) {
4764 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4765 gen_op_ld_T1_A0(ot + s->mem_index);
4766 } else if (op == OP_XORL && rm == reg) {
4767 goto xor_zero;
4768 } else {
4769 gen_op_mov_TN_reg(ot, 1, rm);
4770 }
4771 gen_op(s, op, ot, reg);
4772 break;
4773 case 2: /* OP A, Iv */
4774 val = insn_get(s, ot);
4775 gen_op_movl_T1_im(val);
4776 gen_op(s, op, ot, OR_EAX);
4777 break;
4778 }
4779 }
4780 break;
4781
4782 case 0x82:
4783 if (CODE64(s))
4784 goto illegal_op;
4785 case 0x80: /* GRP1 */
4786 case 0x81:
4787 case 0x83:
4788 {
4789 int val;
4790
4791 if ((b & 1) == 0)
4792 ot = OT_BYTE;
4793 else
4794 ot = dflag + OT_WORD;
4795
4796 modrm = ldub_code(s->pc++);
4797 mod = (modrm >> 6) & 3;
4798 rm = (modrm & 7) | REX_B(s);
4799 op = (modrm >> 3) & 7;
4800
4801 if (mod != 3) {
4802 if (b == 0x83)
4803 s->rip_offset = 1;
4804 else
4805 s->rip_offset = insn_const_size(ot);
4806 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4807 opreg = OR_TMP0;
4808 } else {
4809 opreg = rm;
4810 }
4811
4812 switch(b) {
4813 default:
4814 case 0x80:
4815 case 0x81:
4816 case 0x82:
4817 val = insn_get(s, ot);
4818 break;
4819 case 0x83:
4820 val = (int8_t)insn_get(s, OT_BYTE);
4821 break;
4822 }
4823 gen_op_movl_T1_im(val);
4824 gen_op(s, op, ot, opreg);
4825 }
4826 break;
4827
4828 /**************************/
4829 /* inc, dec, and other misc arith */
4830 case 0x40 ... 0x47: /* inc Gv */
4831 ot = dflag ? OT_LONG : OT_WORD;
4832 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4833 break;
4834 case 0x48 ... 0x4f: /* dec Gv */
4835 ot = dflag ? OT_LONG : OT_WORD;
4836 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4837 break;
4838 case 0xf6: /* GRP3 */
4839 case 0xf7:
4840 if ((b & 1) == 0)
4841 ot = OT_BYTE;
4842 else
4843 ot = dflag + OT_WORD;
4844
4845 modrm = ldub_code(s->pc++);
4846 mod = (modrm >> 6) & 3;
4847 rm = (modrm & 7) | REX_B(s);
4848 op = (modrm >> 3) & 7;
4849 if (mod != 3) {
4850 if (op == 0)
4851 s->rip_offset = insn_const_size(ot);
4852 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4853 gen_op_ld_T0_A0(ot + s->mem_index);
4854 } else {
4855 gen_op_mov_TN_reg(ot, 0, rm);
4856 }
4857
4858 switch(op) {
4859 case 0: /* test */
4860 val = insn_get(s, ot);
4861 gen_op_movl_T1_im(val);
4862 gen_op_testl_T0_T1_cc();
4863 s->cc_op = CC_OP_LOGICB + ot;
4864 break;
4865 case 2: /* not */
4866 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4867 if (mod != 3) {
4868 gen_op_st_T0_A0(ot + s->mem_index);
4869 } else {
4870 gen_op_mov_reg_T0(ot, rm);
4871 }
4872 break;
4873 case 3: /* neg */
4874 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4875 if (mod != 3) {
4876 gen_op_st_T0_A0(ot + s->mem_index);
4877 } else {
4878 gen_op_mov_reg_T0(ot, rm);
4879 }
4880 gen_op_update_neg_cc();
4881 s->cc_op = CC_OP_SUBB + ot;
4882 break;
4883 case 4: /* mul */
4884 switch(ot) {
4885 case OT_BYTE:
4886 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4887 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4888 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4889 /* XXX: use 32 bit mul which could be faster */
4890 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4891 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4892 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4893 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4894 s->cc_op = CC_OP_MULB;
4895 break;
4896 case OT_WORD:
4897 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4898 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4899 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4900 /* XXX: use 32 bit mul which could be faster */
4901 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4902 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4903 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4904 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4905 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4906 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4907 s->cc_op = CC_OP_MULW;
4908 break;
4909 default:
4910 case OT_LONG:
4911#ifdef TARGET_X86_64
4912 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4913 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4914 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4915 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4916 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4917 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4918 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4919 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4920 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4921#else
4922 {
4923 TCGv t0, t1;
4924 t0 = tcg_temp_new(TCG_TYPE_I64);
4925 t1 = tcg_temp_new(TCG_TYPE_I64);
4926 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4927 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4928 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4929 tcg_gen_mul_i64(t0, t0, t1);
4930 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4931 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4932 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4933 tcg_gen_shri_i64(t0, t0, 32);
4934 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4935 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4936 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4937 }
4938#endif
4939 s->cc_op = CC_OP_MULL;
4940 break;
4941#ifdef TARGET_X86_64
4942 case OT_QUAD:
4943 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
4944 s->cc_op = CC_OP_MULQ;
4945 break;
4946#endif
4947 }
4948 break;
4949 case 5: /* imul */
4950 switch(ot) {
4951 case OT_BYTE:
4952 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4953 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4954 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4955 /* XXX: use 32 bit mul which could be faster */
4956 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4957 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4958 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4959 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4960 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4961 s->cc_op = CC_OP_MULB;
4962 break;
4963 case OT_WORD:
4964 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4965 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4966 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4967 /* XXX: use 32 bit mul which could be faster */
4968 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4969 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4970 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4971 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4972 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4973 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4974 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4975 s->cc_op = CC_OP_MULW;
4976 break;
4977 default:
4978 case OT_LONG:
4979#ifdef TARGET_X86_64
4980 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4981 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4982 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4983 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4984 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4985 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4986 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4987 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4988 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4989 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4990#else
4991 {
4992 TCGv t0, t1;
4993 t0 = tcg_temp_new(TCG_TYPE_I64);
4994 t1 = tcg_temp_new(TCG_TYPE_I64);
4995 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4996 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4997 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4998 tcg_gen_mul_i64(t0, t0, t1);
4999 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5000 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5001 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5002 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5003 tcg_gen_shri_i64(t0, t0, 32);
5004 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5005 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5006 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5007 }
5008#endif
5009 s->cc_op = CC_OP_MULL;
5010 break;
5011#ifdef TARGET_X86_64
5012 case OT_QUAD:
5013 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5014 s->cc_op = CC_OP_MULQ;
5015 break;
5016#endif
5017 }
5018 break;
5019 case 6: /* div */
5020 switch(ot) {
5021 case OT_BYTE:
5022 gen_jmp_im(pc_start - s->cs_base);
5023 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5024 break;
5025 case OT_WORD:
5026 gen_jmp_im(pc_start - s->cs_base);
5027 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5028 break;
5029 default:
5030 case OT_LONG:
5031 gen_jmp_im(pc_start - s->cs_base);
5032 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5033 break;
5034#ifdef TARGET_X86_64
5035 case OT_QUAD:
5036 gen_jmp_im(pc_start - s->cs_base);
5037 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5038 break;
5039#endif
5040 }
5041 break;
5042 case 7: /* idiv */
5043 switch(ot) {
5044 case OT_BYTE:
5045 gen_jmp_im(pc_start - s->cs_base);
5046 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5047 break;
5048 case OT_WORD:
5049 gen_jmp_im(pc_start - s->cs_base);
5050 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5051 break;
5052 default:
5053 case OT_LONG:
5054 gen_jmp_im(pc_start - s->cs_base);
5055 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5056 break;
5057#ifdef TARGET_X86_64
5058 case OT_QUAD:
5059 gen_jmp_im(pc_start - s->cs_base);
5060 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5061 break;
5062#endif
5063 }
5064 break;
5065 default:
5066 goto illegal_op;
5067 }
5068 break;
5069
5070 case 0xfe: /* GRP4 */
5071 case 0xff: /* GRP5 */
5072 if ((b & 1) == 0)
5073 ot = OT_BYTE;
5074 else
5075 ot = dflag + OT_WORD;
5076
5077 modrm = ldub_code(s->pc++);
5078 mod = (modrm >> 6) & 3;
5079 rm = (modrm & 7) | REX_B(s);
5080 op = (modrm >> 3) & 7;
5081 if (op >= 2 && b == 0xfe) {
5082 goto illegal_op;
5083 }
5084 if (CODE64(s)) {
5085 if (op == 2 || op == 4) {
5086 /* operand size for jumps is 64 bit */
5087 ot = OT_QUAD;
5088 } else if (op == 3 || op == 5) {
5089 /* for call calls, the operand is 16 or 32 bit, even
5090 in long mode */
5091 ot = dflag ? OT_LONG : OT_WORD;
5092 } else if (op == 6) {
5093 /* default push size is 64 bit */
5094 ot = dflag ? OT_QUAD : OT_WORD;
5095 }
5096 }
5097 if (mod != 3) {
5098 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5099 if (op >= 2 && op != 3 && op != 5)
5100 gen_op_ld_T0_A0(ot + s->mem_index);
5101 } else {
5102 gen_op_mov_TN_reg(ot, 0, rm);
5103 }
5104
5105 switch(op) {
5106 case 0: /* inc Ev */
5107 if (mod != 3)
5108 opreg = OR_TMP0;
5109 else
5110 opreg = rm;
5111 gen_inc(s, ot, opreg, 1);
5112 break;
5113 case 1: /* dec Ev */
5114 if (mod != 3)
5115 opreg = OR_TMP0;
5116 else
5117 opreg = rm;
5118 gen_inc(s, ot, opreg, -1);
5119 break;
5120 case 2: /* call Ev */
5121 /* XXX: optimize if memory (no 'and' is necessary) */
5122#ifdef VBOX_WITH_CALL_RECORD
5123 if (s->record_call)
5124 gen_op_record_call();
5125#endif
5126 if (s->dflag == 0)
5127 gen_op_andl_T0_ffff();
5128 next_eip = s->pc - s->cs_base;
5129 gen_movtl_T1_im(next_eip);
5130 gen_push_T1(s);
5131 gen_op_jmp_T0();
5132 gen_eob(s);
5133 break;
5134 case 3: /* lcall Ev */
5135 gen_op_ld_T1_A0(ot + s->mem_index);
5136 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5137 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5138 do_lcall:
5139 if (s->pe && !s->vm86) {
5140 if (s->cc_op != CC_OP_DYNAMIC)
5141 gen_op_set_cc_op(s->cc_op);
5142 gen_jmp_im(pc_start - s->cs_base);
5143 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5144 tcg_gen_helper_0_4(helper_lcall_protected,
5145 cpu_tmp2_i32, cpu_T[1],
5146 tcg_const_i32(dflag),
5147 tcg_const_i32(s->pc - pc_start));
5148 } else {
5149 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5150 tcg_gen_helper_0_4(helper_lcall_real,
5151 cpu_tmp2_i32, cpu_T[1],
5152 tcg_const_i32(dflag),
5153 tcg_const_i32(s->pc - s->cs_base));
5154 }
5155 gen_eob(s);
5156 break;
5157 case 4: /* jmp Ev */
5158 if (s->dflag == 0)
5159 gen_op_andl_T0_ffff();
5160 gen_op_jmp_T0();
5161 gen_eob(s);
5162 break;
5163 case 5: /* ljmp Ev */
5164 gen_op_ld_T1_A0(ot + s->mem_index);
5165 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5166 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5167 do_ljmp:
5168 if (s->pe && !s->vm86) {
5169 if (s->cc_op != CC_OP_DYNAMIC)
5170 gen_op_set_cc_op(s->cc_op);
5171 gen_jmp_im(pc_start - s->cs_base);
5172 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5173 tcg_gen_helper_0_3(helper_ljmp_protected,
5174 cpu_tmp2_i32,
5175 cpu_T[1],
5176 tcg_const_i32(s->pc - pc_start));
5177 } else {
5178 gen_op_movl_seg_T0_vm(R_CS);
5179 gen_op_movl_T0_T1();
5180 gen_op_jmp_T0();
5181 }
5182 gen_eob(s);
5183 break;
5184 case 6: /* push Ev */
5185 gen_push_T0(s);
5186 break;
5187 default:
5188 goto illegal_op;
5189 }
5190 break;
5191
5192 case 0x84: /* test Ev, Gv */
5193 case 0x85:
5194 if ((b & 1) == 0)
5195 ot = OT_BYTE;
5196 else
5197 ot = dflag + OT_WORD;
5198
5199 modrm = ldub_code(s->pc++);
5200 mod = (modrm >> 6) & 3;
5201 rm = (modrm & 7) | REX_B(s);
5202 reg = ((modrm >> 3) & 7) | rex_r;
5203
5204 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5205 gen_op_mov_TN_reg(ot, 1, reg);
5206 gen_op_testl_T0_T1_cc();
5207 s->cc_op = CC_OP_LOGICB + ot;
5208 break;
5209
5210 case 0xa8: /* test eAX, Iv */
5211 case 0xa9:
5212 if ((b & 1) == 0)
5213 ot = OT_BYTE;
5214 else
5215 ot = dflag + OT_WORD;
5216 val = insn_get(s, ot);
5217
5218 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5219 gen_op_movl_T1_im(val);
5220 gen_op_testl_T0_T1_cc();
5221 s->cc_op = CC_OP_LOGICB + ot;
5222 break;
5223
5224 case 0x98: /* CWDE/CBW */
5225#ifdef TARGET_X86_64
5226 if (dflag == 2) {
5227 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5228 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5229 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5230 } else
5231#endif
5232 if (dflag == 1) {
5233 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5234 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5235 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5236 } else {
5237 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5238 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5239 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5240 }
5241 break;
5242 case 0x99: /* CDQ/CWD */
5243#ifdef TARGET_X86_64
5244 if (dflag == 2) {
5245 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5246 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5247 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5248 } else
5249#endif
5250 if (dflag == 1) {
5251 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5252 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5253 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5254 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5255 } else {
5256 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5257 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5258 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5259 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5260 }
5261 break;
5262 case 0x1af: /* imul Gv, Ev */
5263 case 0x69: /* imul Gv, Ev, I */
5264 case 0x6b:
5265 ot = dflag + OT_WORD;
5266 modrm = ldub_code(s->pc++);
5267 reg = ((modrm >> 3) & 7) | rex_r;
5268 if (b == 0x69)
5269 s->rip_offset = insn_const_size(ot);
5270 else if (b == 0x6b)
5271 s->rip_offset = 1;
5272 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5273 if (b == 0x69) {
5274 val = insn_get(s, ot);
5275 gen_op_movl_T1_im(val);
5276 } else if (b == 0x6b) {
5277 val = (int8_t)insn_get(s, OT_BYTE);
5278 gen_op_movl_T1_im(val);
5279 } else {
5280 gen_op_mov_TN_reg(ot, 1, reg);
5281 }
5282
5283#ifdef TARGET_X86_64
5284 if (ot == OT_QUAD) {
5285 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5286 } else
5287#endif
5288 if (ot == OT_LONG) {
5289#ifdef TARGET_X86_64
5290 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5291 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5292 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5293 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5294 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5295 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5296#else
5297 {
5298 TCGv t0, t1;
5299 t0 = tcg_temp_new(TCG_TYPE_I64);
5300 t1 = tcg_temp_new(TCG_TYPE_I64);
5301 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5302 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5303 tcg_gen_mul_i64(t0, t0, t1);
5304 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5305 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5306 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5307 tcg_gen_shri_i64(t0, t0, 32);
5308 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5309 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5310 }
5311#endif
5312 } else {
5313 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5314 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5315 /* XXX: use 32 bit mul which could be faster */
5316 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5317 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5318 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5319 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5320 }
5321 gen_op_mov_reg_T0(ot, reg);
5322 s->cc_op = CC_OP_MULB + ot;
5323 break;
5324 case 0x1c0:
5325 case 0x1c1: /* xadd Ev, Gv */
5326 if ((b & 1) == 0)
5327 ot = OT_BYTE;
5328 else
5329 ot = dflag + OT_WORD;
5330 modrm = ldub_code(s->pc++);
5331 reg = ((modrm >> 3) & 7) | rex_r;
5332 mod = (modrm >> 6) & 3;
5333 if (mod == 3) {
5334 rm = (modrm & 7) | REX_B(s);
5335 gen_op_mov_TN_reg(ot, 0, reg);
5336 gen_op_mov_TN_reg(ot, 1, rm);
5337 gen_op_addl_T0_T1();
5338 gen_op_mov_reg_T1(ot, reg);
5339 gen_op_mov_reg_T0(ot, rm);
5340 } else {
5341 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5342 gen_op_mov_TN_reg(ot, 0, reg);
5343 gen_op_ld_T1_A0(ot + s->mem_index);
5344 gen_op_addl_T0_T1();
5345 gen_op_st_T0_A0(ot + s->mem_index);
5346 gen_op_mov_reg_T1(ot, reg);
5347 }
5348 gen_op_update2_cc();
5349 s->cc_op = CC_OP_ADDB + ot;
5350 break;
5351 case 0x1b0:
5352 case 0x1b1: /* cmpxchg Ev, Gv */
5353 {
5354 int label1, label2;
5355 TCGv t0, t1, t2, a0;
5356
5357 if ((b & 1) == 0)
5358 ot = OT_BYTE;
5359 else
5360 ot = dflag + OT_WORD;
5361 modrm = ldub_code(s->pc++);
5362 reg = ((modrm >> 3) & 7) | rex_r;
5363 mod = (modrm >> 6) & 3;
5364 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5365 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5366 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5367 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5368 gen_op_mov_v_reg(ot, t1, reg);
5369 if (mod == 3) {
5370 rm = (modrm & 7) | REX_B(s);
5371 gen_op_mov_v_reg(ot, t0, rm);
5372 } else {
5373 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5374 tcg_gen_mov_tl(a0, cpu_A0);
5375 gen_op_ld_v(ot + s->mem_index, t0, a0);
5376 rm = 0; /* avoid warning */
5377 }
5378 label1 = gen_new_label();
5379 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5380 tcg_gen_sub_tl(t2, t2, t0);
5381 gen_extu(ot, t2);
5382 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5383 if (mod == 3) {
5384 label2 = gen_new_label();
5385 gen_op_mov_reg_v(ot, R_EAX, t0);
5386 tcg_gen_br(label2);
5387 gen_set_label(label1);
5388 gen_op_mov_reg_v(ot, rm, t1);
5389 gen_set_label(label2);
5390 } else {
5391 tcg_gen_mov_tl(t1, t0);
5392 gen_op_mov_reg_v(ot, R_EAX, t0);
5393 gen_set_label(label1);
5394 /* always store */
5395 gen_op_st_v(ot + s->mem_index, t1, a0);
5396 }
5397 tcg_gen_mov_tl(cpu_cc_src, t0);
5398 tcg_gen_mov_tl(cpu_cc_dst, t2);
5399 s->cc_op = CC_OP_SUBB + ot;
5400 tcg_temp_free(t0);
5401 tcg_temp_free(t1);
5402 tcg_temp_free(t2);
5403 tcg_temp_free(a0);
5404 }
5405 break;
5406 case 0x1c7: /* cmpxchg8b */
5407 modrm = ldub_code(s->pc++);
5408 mod = (modrm >> 6) & 3;
5409 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5410 goto illegal_op;
5411#ifdef TARGET_X86_64
5412 if (dflag == 2) {
5413 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5414 goto illegal_op;
5415 gen_jmp_im(pc_start - s->cs_base);
5416 if (s->cc_op != CC_OP_DYNAMIC)
5417 gen_op_set_cc_op(s->cc_op);
5418 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5419 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5420 } else
5421#endif
5422 {
5423 if (!(s->cpuid_features & CPUID_CX8))
5424 goto illegal_op;
5425 gen_jmp_im(pc_start - s->cs_base);
5426 if (s->cc_op != CC_OP_DYNAMIC)
5427 gen_op_set_cc_op(s->cc_op);
5428 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5429 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5430 }
5431 s->cc_op = CC_OP_EFLAGS;
5432 break;
5433
5434 /**************************/
5435 /* push/pop */
5436 case 0x50 ... 0x57: /* push */
5437 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5438 gen_push_T0(s);
5439 break;
5440 case 0x58 ... 0x5f: /* pop */
5441 if (CODE64(s)) {
5442 ot = dflag ? OT_QUAD : OT_WORD;
5443 } else {
5444 ot = dflag + OT_WORD;
5445 }
5446 gen_pop_T0(s);
5447 /* NOTE: order is important for pop %sp */
5448 gen_pop_update(s);
5449 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5450 break;
5451 case 0x60: /* pusha */
5452 if (CODE64(s))
5453 goto illegal_op;
5454 gen_pusha(s);
5455 break;
5456 case 0x61: /* popa */
5457 if (CODE64(s))
5458 goto illegal_op;
5459 gen_popa(s);
5460 break;
5461 case 0x68: /* push Iv */
5462 case 0x6a:
5463 if (CODE64(s)) {
5464 ot = dflag ? OT_QUAD : OT_WORD;
5465 } else {
5466 ot = dflag + OT_WORD;
5467 }
5468 if (b == 0x68)
5469 val = insn_get(s, ot);
5470 else
5471 val = (int8_t)insn_get(s, OT_BYTE);
5472 gen_op_movl_T0_im(val);
5473 gen_push_T0(s);
5474 break;
5475 case 0x8f: /* pop Ev */
5476 if (CODE64(s)) {
5477 ot = dflag ? OT_QUAD : OT_WORD;
5478 } else {
5479 ot = dflag + OT_WORD;
5480 }
5481 modrm = ldub_code(s->pc++);
5482 mod = (modrm >> 6) & 3;
5483 gen_pop_T0(s);
5484 if (mod == 3) {
5485 /* NOTE: order is important for pop %sp */
5486 gen_pop_update(s);
5487 rm = (modrm & 7) | REX_B(s);
5488 gen_op_mov_reg_T0(ot, rm);
5489 } else {
5490 /* NOTE: order is important too for MMU exceptions */
5491 s->popl_esp_hack = 1 << ot;
5492 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5493 s->popl_esp_hack = 0;
5494 gen_pop_update(s);
5495 }
5496 break;
5497 case 0xc8: /* enter */
5498 {
5499 int level;
5500 val = lduw_code(s->pc);
5501 s->pc += 2;
5502 level = ldub_code(s->pc++);
5503 gen_enter(s, val, level);
5504 }
5505 break;
5506 case 0xc9: /* leave */
5507 /* XXX: exception not precise (ESP is updated before potential exception) */
5508 if (CODE64(s)) {
5509 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5510 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5511 } else if (s->ss32) {
5512 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5513 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5514 } else {
5515 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5516 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5517 }
5518 gen_pop_T0(s);
5519 if (CODE64(s)) {
5520 ot = dflag ? OT_QUAD : OT_WORD;
5521 } else {
5522 ot = dflag + OT_WORD;
5523 }
5524 gen_op_mov_reg_T0(ot, R_EBP);
5525 gen_pop_update(s);
5526 break;
5527 case 0x06: /* push es */
5528 case 0x0e: /* push cs */
5529 case 0x16: /* push ss */
5530 case 0x1e: /* push ds */
5531 if (CODE64(s))
5532 goto illegal_op;
5533 gen_op_movl_T0_seg(b >> 3);
5534 gen_push_T0(s);
5535 break;
5536 case 0x1a0: /* push fs */
5537 case 0x1a8: /* push gs */
5538 gen_op_movl_T0_seg((b >> 3) & 7);
5539 gen_push_T0(s);
5540 break;
5541 case 0x07: /* pop es */
5542 case 0x17: /* pop ss */
5543 case 0x1f: /* pop ds */
5544 if (CODE64(s))
5545 goto illegal_op;
5546 reg = b >> 3;
5547 gen_pop_T0(s);
5548 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5549 gen_pop_update(s);
5550 if (reg == R_SS) {
5551 /* if reg == SS, inhibit interrupts/trace. */
5552 /* If several instructions disable interrupts, only the
5553 _first_ does it */
5554 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5555 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5556 s->tf = 0;
5557 }
5558 if (s->is_jmp) {
5559 gen_jmp_im(s->pc - s->cs_base);
5560 gen_eob(s);
5561 }
5562 break;
5563 case 0x1a1: /* pop fs */
5564 case 0x1a9: /* pop gs */
5565 gen_pop_T0(s);
5566 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5567 gen_pop_update(s);
5568 if (s->is_jmp) {
5569 gen_jmp_im(s->pc - s->cs_base);
5570 gen_eob(s);
5571 }
5572 break;
5573
5574 /**************************/
5575 /* mov */
5576 case 0x88:
5577 case 0x89: /* mov Gv, Ev */
5578 if ((b & 1) == 0)
5579 ot = OT_BYTE;
5580 else
5581 ot = dflag + OT_WORD;
5582 modrm = ldub_code(s->pc++);
5583 reg = ((modrm >> 3) & 7) | rex_r;
5584
5585 /* generate a generic store */
5586 gen_ldst_modrm(s, modrm, ot, reg, 1);
5587 break;
5588 case 0xc6:
5589 case 0xc7: /* mov Ev, Iv */
5590 if ((b & 1) == 0)
5591 ot = OT_BYTE;
5592 else
5593 ot = dflag + OT_WORD;
5594 modrm = ldub_code(s->pc++);
5595 mod = (modrm >> 6) & 3;
5596 if (mod != 3) {
5597 s->rip_offset = insn_const_size(ot);
5598 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5599 }
5600 val = insn_get(s, ot);
5601 gen_op_movl_T0_im(val);
5602 if (mod != 3)
5603 gen_op_st_T0_A0(ot + s->mem_index);
5604 else
5605 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5606 break;
5607 case 0x8a:
5608 case 0x8b: /* mov Ev, Gv */
5609#ifdef VBOX /* dtrace hot fix */
5610 if (prefixes & PREFIX_LOCK)
5611 goto illegal_op;
5612#endif
5613 if ((b & 1) == 0)
5614 ot = OT_BYTE;
5615 else
5616 ot = OT_WORD + dflag;
5617 modrm = ldub_code(s->pc++);
5618 reg = ((modrm >> 3) & 7) | rex_r;
5619
5620 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5621 gen_op_mov_reg_T0(ot, reg);
5622 break;
5623 case 0x8e: /* mov seg, Gv */
5624 modrm = ldub_code(s->pc++);
5625 reg = (modrm >> 3) & 7;
5626 if (reg >= 6 || reg == R_CS)
5627 goto illegal_op;
5628 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5629 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5630 if (reg == R_SS) {
5631 /* if reg == SS, inhibit interrupts/trace */
5632 /* If several instructions disable interrupts, only the
5633 _first_ does it */
5634 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5635 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5636 s->tf = 0;
5637 }
5638 if (s->is_jmp) {
5639 gen_jmp_im(s->pc - s->cs_base);
5640 gen_eob(s);
5641 }
5642 break;
5643 case 0x8c: /* mov Gv, seg */
5644 modrm = ldub_code(s->pc++);
5645 reg = (modrm >> 3) & 7;
5646 mod = (modrm >> 6) & 3;
5647 if (reg >= 6)
5648 goto illegal_op;
5649 gen_op_movl_T0_seg(reg);
5650 if (mod == 3)
5651 ot = OT_WORD + dflag;
5652 else
5653 ot = OT_WORD;
5654 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5655 break;
5656
5657 case 0x1b6: /* movzbS Gv, Eb */
5658 case 0x1b7: /* movzwS Gv, Eb */
5659 case 0x1be: /* movsbS Gv, Eb */
5660 case 0x1bf: /* movswS Gv, Eb */
5661 {
5662 int d_ot;
5663 /* d_ot is the size of destination */
5664 d_ot = dflag + OT_WORD;
5665 /* ot is the size of source */
5666 ot = (b & 1) + OT_BYTE;
5667 modrm = ldub_code(s->pc++);
5668 reg = ((modrm >> 3) & 7) | rex_r;
5669 mod = (modrm >> 6) & 3;
5670 rm = (modrm & 7) | REX_B(s);
5671
5672 if (mod == 3) {
5673 gen_op_mov_TN_reg(ot, 0, rm);
5674 switch(ot | (b & 8)) {
5675 case OT_BYTE:
5676 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5677 break;
5678 case OT_BYTE | 8:
5679 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5680 break;
5681 case OT_WORD:
5682 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5683 break;
5684 default:
5685 case OT_WORD | 8:
5686 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5687 break;
5688 }
5689 gen_op_mov_reg_T0(d_ot, reg);
5690 } else {
5691 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5692 if (b & 8) {
5693 gen_op_lds_T0_A0(ot + s->mem_index);
5694 } else {
5695 gen_op_ldu_T0_A0(ot + s->mem_index);
5696 }
5697 gen_op_mov_reg_T0(d_ot, reg);
5698 }
5699 }
5700 break;
5701
5702 case 0x8d: /* lea */
5703 ot = dflag + OT_WORD;
5704 modrm = ldub_code(s->pc++);
5705 mod = (modrm >> 6) & 3;
5706 if (mod == 3)
5707 goto illegal_op;
5708 reg = ((modrm >> 3) & 7) | rex_r;
5709 /* we must ensure that no segment is added */
5710 s->override = -1;
5711 val = s->addseg;
5712 s->addseg = 0;
5713 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5714 s->addseg = val;
5715 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5716 break;
5717
5718 case 0xa0: /* mov EAX, Ov */
5719 case 0xa1:
5720 case 0xa2: /* mov Ov, EAX */
5721 case 0xa3:
5722 {
5723 target_ulong offset_addr;
5724
5725 if ((b & 1) == 0)
5726 ot = OT_BYTE;
5727 else
5728 ot = dflag + OT_WORD;
5729#ifdef TARGET_X86_64
5730 if (s->aflag == 2) {
5731 offset_addr = ldq_code(s->pc);
5732 s->pc += 8;
5733 gen_op_movq_A0_im(offset_addr);
5734 } else
5735#endif
5736 {
5737 if (s->aflag) {
5738 offset_addr = insn_get(s, OT_LONG);
5739 } else {
5740 offset_addr = insn_get(s, OT_WORD);
5741 }
5742 gen_op_movl_A0_im(offset_addr);
5743 }
5744 gen_add_A0_ds_seg(s);
5745 if ((b & 2) == 0) {
5746 gen_op_ld_T0_A0(ot + s->mem_index);
5747 gen_op_mov_reg_T0(ot, R_EAX);
5748 } else {
5749 gen_op_mov_TN_reg(ot, 0, R_EAX);
5750 gen_op_st_T0_A0(ot + s->mem_index);
5751 }
5752 }
5753 break;
5754 case 0xd7: /* xlat */
5755#ifdef TARGET_X86_64
5756 if (s->aflag == 2) {
5757 gen_op_movq_A0_reg(R_EBX);
5758 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5759 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5760 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5761 } else
5762#endif
5763 {
5764 gen_op_movl_A0_reg(R_EBX);
5765 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5766 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5767 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5768 if (s->aflag == 0)
5769 gen_op_andl_A0_ffff();
5770 else
5771 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5772 }
5773 gen_add_A0_ds_seg(s);
5774 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5775 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5776 break;
5777 case 0xb0 ... 0xb7: /* mov R, Ib */
5778 val = insn_get(s, OT_BYTE);
5779 gen_op_movl_T0_im(val);
5780 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5781 break;
5782 case 0xb8 ... 0xbf: /* mov R, Iv */
5783#ifdef TARGET_X86_64
5784 if (dflag == 2) {
5785 uint64_t tmp;
5786 /* 64 bit case */
5787 tmp = ldq_code(s->pc);
5788 s->pc += 8;
5789 reg = (b & 7) | REX_B(s);
5790 gen_movtl_T0_im(tmp);
5791 gen_op_mov_reg_T0(OT_QUAD, reg);
5792 } else
5793#endif
5794 {
5795 ot = dflag ? OT_LONG : OT_WORD;
5796 val = insn_get(s, ot);
5797 reg = (b & 7) | REX_B(s);
5798 gen_op_movl_T0_im(val);
5799 gen_op_mov_reg_T0(ot, reg);
5800 }
5801 break;
5802
5803 case 0x91 ... 0x97: /* xchg R, EAX */
5804 ot = dflag + OT_WORD;
5805 reg = (b & 7) | REX_B(s);
5806 rm = R_EAX;
5807 goto do_xchg_reg;
5808 case 0x86:
5809 case 0x87: /* xchg Ev, Gv */
5810 if ((b & 1) == 0)
5811 ot = OT_BYTE;
5812 else
5813 ot = dflag + OT_WORD;
5814 modrm = ldub_code(s->pc++);
5815 reg = ((modrm >> 3) & 7) | rex_r;
5816 mod = (modrm >> 6) & 3;
5817 if (mod == 3) {
5818 rm = (modrm & 7) | REX_B(s);
5819 do_xchg_reg:
5820 gen_op_mov_TN_reg(ot, 0, reg);
5821 gen_op_mov_TN_reg(ot, 1, rm);
5822 gen_op_mov_reg_T0(ot, rm);
5823 gen_op_mov_reg_T1(ot, reg);
5824 } else {
5825 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5826 gen_op_mov_TN_reg(ot, 0, reg);
5827 /* for xchg, lock is implicit */
5828 if (!(prefixes & PREFIX_LOCK))
5829 tcg_gen_helper_0_0(helper_lock);
5830 gen_op_ld_T1_A0(ot + s->mem_index);
5831 gen_op_st_T0_A0(ot + s->mem_index);
5832 if (!(prefixes & PREFIX_LOCK))
5833 tcg_gen_helper_0_0(helper_unlock);
5834 gen_op_mov_reg_T1(ot, reg);
5835 }
5836 break;
5837 case 0xc4: /* les Gv */
5838 if (CODE64(s))
5839 goto illegal_op;
5840 op = R_ES;
5841 goto do_lxx;
5842 case 0xc5: /* lds Gv */
5843 if (CODE64(s))
5844 goto illegal_op;
5845 op = R_DS;
5846 goto do_lxx;
5847 case 0x1b2: /* lss Gv */
5848 op = R_SS;
5849 goto do_lxx;
5850 case 0x1b4: /* lfs Gv */
5851 op = R_FS;
5852 goto do_lxx;
5853 case 0x1b5: /* lgs Gv */
5854 op = R_GS;
5855 do_lxx:
5856 ot = dflag ? OT_LONG : OT_WORD;
5857 modrm = ldub_code(s->pc++);
5858 reg = ((modrm >> 3) & 7) | rex_r;
5859 mod = (modrm >> 6) & 3;
5860 if (mod == 3)
5861 goto illegal_op;
5862 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5863 gen_op_ld_T1_A0(ot + s->mem_index);
5864 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5865 /* load the segment first to handle exceptions properly */
5866 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5867 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5868 /* then put the data */
5869 gen_op_mov_reg_T1(ot, reg);
5870 if (s->is_jmp) {
5871 gen_jmp_im(s->pc - s->cs_base);
5872 gen_eob(s);
5873 }
5874 break;
5875
5876 /************************/
5877 /* shifts */
5878 case 0xc0:
5879 case 0xc1:
5880 /* shift Ev,Ib */
5881 shift = 2;
5882 grp2:
5883 {
5884 if ((b & 1) == 0)
5885 ot = OT_BYTE;
5886 else
5887 ot = dflag + OT_WORD;
5888
5889 modrm = ldub_code(s->pc++);
5890 mod = (modrm >> 6) & 3;
5891 op = (modrm >> 3) & 7;
5892
5893 if (mod != 3) {
5894 if (shift == 2) {
5895 s->rip_offset = 1;
5896 }
5897 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5898 opreg = OR_TMP0;
5899 } else {
5900 opreg = (modrm & 7) | REX_B(s);
5901 }
5902
5903 /* simpler op */
5904 if (shift == 0) {
5905 gen_shift(s, op, ot, opreg, OR_ECX);
5906 } else {
5907 if (shift == 2) {
5908 shift = ldub_code(s->pc++);
5909 }
5910 gen_shifti(s, op, ot, opreg, shift);
5911 }
5912 }
5913 break;
5914 case 0xd0:
5915 case 0xd1:
5916 /* shift Ev,1 */
5917 shift = 1;
5918 goto grp2;
5919 case 0xd2:
5920 case 0xd3:
5921 /* shift Ev,cl */
5922 shift = 0;
5923 goto grp2;
5924
5925 case 0x1a4: /* shld imm */
5926 op = 0;
5927 shift = 1;
5928 goto do_shiftd;
5929 case 0x1a5: /* shld cl */
5930 op = 0;
5931 shift = 0;
5932 goto do_shiftd;
5933 case 0x1ac: /* shrd imm */
5934 op = 1;
5935 shift = 1;
5936 goto do_shiftd;
5937 case 0x1ad: /* shrd cl */
5938 op = 1;
5939 shift = 0;
5940 do_shiftd:
5941 ot = dflag + OT_WORD;
5942 modrm = ldub_code(s->pc++);
5943 mod = (modrm >> 6) & 3;
5944 rm = (modrm & 7) | REX_B(s);
5945 reg = ((modrm >> 3) & 7) | rex_r;
5946 if (mod != 3) {
5947 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5948 opreg = OR_TMP0;
5949 } else {
5950 opreg = rm;
5951 }
5952 gen_op_mov_TN_reg(ot, 1, reg);
5953
5954 if (shift) {
5955 val = ldub_code(s->pc++);
5956 tcg_gen_movi_tl(cpu_T3, val);
5957 } else {
5958 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
5959 }
5960 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5961 break;
5962
5963 /************************/
5964 /* floats */
5965 case 0xd8 ... 0xdf:
5966 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5967 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5968 /* XXX: what to do if illegal op ? */
5969 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5970 break;
5971 }
5972 modrm = ldub_code(s->pc++);
5973 mod = (modrm >> 6) & 3;
5974 rm = modrm & 7;
5975 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5976 if (mod != 3) {
5977 /* memory op */
5978 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5979 switch(op) {
5980 case 0x00 ... 0x07: /* fxxxs */
5981 case 0x10 ... 0x17: /* fixxxl */
5982 case 0x20 ... 0x27: /* fxxxl */
5983 case 0x30 ... 0x37: /* fixxx */
5984 {
5985 int op1;
5986 op1 = op & 7;
5987
5988 switch(op >> 4) {
5989 case 0:
5990 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5991 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5992 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
5993 break;
5994 case 1:
5995 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5996 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5997 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5998 break;
5999 case 2:
6000 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6001 (s->mem_index >> 2) - 1);
6002 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6003 break;
6004 case 3:
6005 default:
6006 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6007 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6008 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6009 break;
6010 }
6011
6012 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6013 if (op1 == 3) {
6014 /* fcomp needs pop */
6015 tcg_gen_helper_0_0(helper_fpop);
6016 }
6017 }
6018 break;
6019 case 0x08: /* flds */
6020 case 0x0a: /* fsts */
6021 case 0x0b: /* fstps */
6022 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6023 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6024 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6025 switch(op & 7) {
6026 case 0:
6027 switch(op >> 4) {
6028 case 0:
6029 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6030 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6031 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6032 break;
6033 case 1:
6034 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6035 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6036 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6037 break;
6038 case 2:
6039 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6040 (s->mem_index >> 2) - 1);
6041 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6042 break;
6043 case 3:
6044 default:
6045 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6046 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6047 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6048 break;
6049 }
6050 break;
6051 case 1:
6052 /* XXX: the corresponding CPUID bit must be tested ! */
6053 switch(op >> 4) {
6054 case 1:
6055 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6056 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6057 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6058 break;
6059 case 2:
6060 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6061 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6062 (s->mem_index >> 2) - 1);
6063 break;
6064 case 3:
6065 default:
6066 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6067 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6068 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6069 break;
6070 }
6071 tcg_gen_helper_0_0(helper_fpop);
6072 break;
6073 default:
6074 switch(op >> 4) {
6075 case 0:
6076 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6077 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6078 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6079 break;
6080 case 1:
6081 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6082 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6083 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6084 break;
6085 case 2:
6086 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6087 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6088 (s->mem_index >> 2) - 1);
6089 break;
6090 case 3:
6091 default:
6092 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6093 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6094 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6095 break;
6096 }
6097 if ((op & 7) == 3)
6098 tcg_gen_helper_0_0(helper_fpop);
6099 break;
6100 }
6101 break;
6102 case 0x0c: /* fldenv mem */
6103 if (s->cc_op != CC_OP_DYNAMIC)
6104 gen_op_set_cc_op(s->cc_op);
6105 gen_jmp_im(pc_start - s->cs_base);
6106 tcg_gen_helper_0_2(helper_fldenv,
6107 cpu_A0, tcg_const_i32(s->dflag));
6108 break;
6109 case 0x0d: /* fldcw mem */
6110 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6111 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6112 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6113 break;
6114 case 0x0e: /* fnstenv mem */
6115 if (s->cc_op != CC_OP_DYNAMIC)
6116 gen_op_set_cc_op(s->cc_op);
6117 gen_jmp_im(pc_start - s->cs_base);
6118 tcg_gen_helper_0_2(helper_fstenv,
6119 cpu_A0, tcg_const_i32(s->dflag));
6120 break;
6121 case 0x0f: /* fnstcw mem */
6122 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6123 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6124 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6125 break;
6126 case 0x1d: /* fldt mem */
6127 if (s->cc_op != CC_OP_DYNAMIC)
6128 gen_op_set_cc_op(s->cc_op);
6129 gen_jmp_im(pc_start - s->cs_base);
6130 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6131 break;
6132 case 0x1f: /* fstpt mem */
6133 if (s->cc_op != CC_OP_DYNAMIC)
6134 gen_op_set_cc_op(s->cc_op);
6135 gen_jmp_im(pc_start - s->cs_base);
6136 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6137 tcg_gen_helper_0_0(helper_fpop);
6138 break;
6139 case 0x2c: /* frstor mem */
6140 if (s->cc_op != CC_OP_DYNAMIC)
6141 gen_op_set_cc_op(s->cc_op);
6142 gen_jmp_im(pc_start - s->cs_base);
6143 tcg_gen_helper_0_2(helper_frstor,
6144 cpu_A0, tcg_const_i32(s->dflag));
6145 break;
6146 case 0x2e: /* fnsave mem */
6147 if (s->cc_op != CC_OP_DYNAMIC)
6148 gen_op_set_cc_op(s->cc_op);
6149 gen_jmp_im(pc_start - s->cs_base);
6150 tcg_gen_helper_0_2(helper_fsave,
6151 cpu_A0, tcg_const_i32(s->dflag));
6152 break;
6153 case 0x2f: /* fnstsw mem */
6154 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6155 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6156 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6157 break;
6158 case 0x3c: /* fbld */
6159 if (s->cc_op != CC_OP_DYNAMIC)
6160 gen_op_set_cc_op(s->cc_op);
6161 gen_jmp_im(pc_start - s->cs_base);
6162 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6163 break;
6164 case 0x3e: /* fbstp */
6165 if (s->cc_op != CC_OP_DYNAMIC)
6166 gen_op_set_cc_op(s->cc_op);
6167 gen_jmp_im(pc_start - s->cs_base);
6168 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6169 tcg_gen_helper_0_0(helper_fpop);
6170 break;
6171 case 0x3d: /* fildll */
6172 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6173 (s->mem_index >> 2) - 1);
6174 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6175 break;
6176 case 0x3f: /* fistpll */
6177 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6178 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6179 (s->mem_index >> 2) - 1);
6180 tcg_gen_helper_0_0(helper_fpop);
6181 break;
6182 default:
6183 goto illegal_op;
6184 }
6185 } else {
6186 /* register float ops */
6187 opreg = rm;
6188
6189 switch(op) {
6190 case 0x08: /* fld sti */
6191 tcg_gen_helper_0_0(helper_fpush);
6192 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6193 break;
6194 case 0x09: /* fxchg sti */
6195 case 0x29: /* fxchg4 sti, undocumented op */
6196 case 0x39: /* fxchg7 sti, undocumented op */
6197 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6198 break;
6199 case 0x0a: /* grp d9/2 */
6200 switch(rm) {
6201 case 0: /* fnop */
6202 /* check exceptions (FreeBSD FPU probe) */
6203 if (s->cc_op != CC_OP_DYNAMIC)
6204 gen_op_set_cc_op(s->cc_op);
6205 gen_jmp_im(pc_start - s->cs_base);
6206 tcg_gen_helper_0_0(helper_fwait);
6207 break;
6208 default:
6209 goto illegal_op;
6210 }
6211 break;
6212 case 0x0c: /* grp d9/4 */
6213 switch(rm) {
6214 case 0: /* fchs */
6215 tcg_gen_helper_0_0(helper_fchs_ST0);
6216 break;
6217 case 1: /* fabs */
6218 tcg_gen_helper_0_0(helper_fabs_ST0);
6219 break;
6220 case 4: /* ftst */
6221 tcg_gen_helper_0_0(helper_fldz_FT0);
6222 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6223 break;
6224 case 5: /* fxam */
6225 tcg_gen_helper_0_0(helper_fxam_ST0);
6226 break;
6227 default:
6228 goto illegal_op;
6229 }
6230 break;
6231 case 0x0d: /* grp d9/5 */
6232 {
6233 switch(rm) {
6234 case 0:
6235 tcg_gen_helper_0_0(helper_fpush);
6236 tcg_gen_helper_0_0(helper_fld1_ST0);
6237 break;
6238 case 1:
6239 tcg_gen_helper_0_0(helper_fpush);
6240 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6241 break;
6242 case 2:
6243 tcg_gen_helper_0_0(helper_fpush);
6244 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6245 break;
6246 case 3:
6247 tcg_gen_helper_0_0(helper_fpush);
6248 tcg_gen_helper_0_0(helper_fldpi_ST0);
6249 break;
6250 case 4:
6251 tcg_gen_helper_0_0(helper_fpush);
6252 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6253 break;
6254 case 5:
6255 tcg_gen_helper_0_0(helper_fpush);
6256 tcg_gen_helper_0_0(helper_fldln2_ST0);
6257 break;
6258 case 6:
6259 tcg_gen_helper_0_0(helper_fpush);
6260 tcg_gen_helper_0_0(helper_fldz_ST0);
6261 break;
6262 default:
6263 goto illegal_op;
6264 }
6265 }
6266 break;
6267 case 0x0e: /* grp d9/6 */
6268 switch(rm) {
6269 case 0: /* f2xm1 */
6270 tcg_gen_helper_0_0(helper_f2xm1);
6271 break;
6272 case 1: /* fyl2x */
6273 tcg_gen_helper_0_0(helper_fyl2x);
6274 break;
6275 case 2: /* fptan */
6276 tcg_gen_helper_0_0(helper_fptan);
6277 break;
6278 case 3: /* fpatan */
6279 tcg_gen_helper_0_0(helper_fpatan);
6280 break;
6281 case 4: /* fxtract */
6282 tcg_gen_helper_0_0(helper_fxtract);
6283 break;
6284 case 5: /* fprem1 */
6285 tcg_gen_helper_0_0(helper_fprem1);
6286 break;
6287 case 6: /* fdecstp */
6288 tcg_gen_helper_0_0(helper_fdecstp);
6289 break;
6290 default:
6291 case 7: /* fincstp */
6292 tcg_gen_helper_0_0(helper_fincstp);
6293 break;
6294 }
6295 break;
6296 case 0x0f: /* grp d9/7 */
6297 switch(rm) {
6298 case 0: /* fprem */
6299 tcg_gen_helper_0_0(helper_fprem);
6300 break;
6301 case 1: /* fyl2xp1 */
6302 tcg_gen_helper_0_0(helper_fyl2xp1);
6303 break;
6304 case 2: /* fsqrt */
6305 tcg_gen_helper_0_0(helper_fsqrt);
6306 break;
6307 case 3: /* fsincos */
6308 tcg_gen_helper_0_0(helper_fsincos);
6309 break;
6310 case 5: /* fscale */
6311 tcg_gen_helper_0_0(helper_fscale);
6312 break;
6313 case 4: /* frndint */
6314 tcg_gen_helper_0_0(helper_frndint);
6315 break;
6316 case 6: /* fsin */
6317 tcg_gen_helper_0_0(helper_fsin);
6318 break;
6319 default:
6320 case 7: /* fcos */
6321 tcg_gen_helper_0_0(helper_fcos);
6322 break;
6323 }
6324 break;
6325 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6326 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6327 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6328 {
6329 int op1;
6330
6331 op1 = op & 7;
6332 if (op >= 0x20) {
6333 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6334 if (op >= 0x30)
6335 tcg_gen_helper_0_0(helper_fpop);
6336 } else {
6337 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6338 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6339 }
6340 }
6341 break;
6342 case 0x02: /* fcom */
6343 case 0x22: /* fcom2, undocumented op */
6344 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6345 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6346 break;
6347 case 0x03: /* fcomp */
6348 case 0x23: /* fcomp3, undocumented op */
6349 case 0x32: /* fcomp5, undocumented op */
6350 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6351 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6352 tcg_gen_helper_0_0(helper_fpop);
6353 break;
6354 case 0x15: /* da/5 */
6355 switch(rm) {
6356 case 1: /* fucompp */
6357 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6358 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6359 tcg_gen_helper_0_0(helper_fpop);
6360 tcg_gen_helper_0_0(helper_fpop);
6361 break;
6362 default:
6363 goto illegal_op;
6364 }
6365 break;
6366 case 0x1c:
6367 switch(rm) {
6368 case 0: /* feni (287 only, just do nop here) */
6369 break;
6370 case 1: /* fdisi (287 only, just do nop here) */
6371 break;
6372 case 2: /* fclex */
6373 tcg_gen_helper_0_0(helper_fclex);
6374 break;
6375 case 3: /* fninit */
6376 tcg_gen_helper_0_0(helper_fninit);
6377 break;
6378 case 4: /* fsetpm (287 only, just do nop here) */
6379 break;
6380 default:
6381 goto illegal_op;
6382 }
6383 break;
6384 case 0x1d: /* fucomi */
6385 if (s->cc_op != CC_OP_DYNAMIC)
6386 gen_op_set_cc_op(s->cc_op);
6387 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6388 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6389 s->cc_op = CC_OP_EFLAGS;
6390 break;
6391 case 0x1e: /* fcomi */
6392 if (s->cc_op != CC_OP_DYNAMIC)
6393 gen_op_set_cc_op(s->cc_op);
6394 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6395 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6396 s->cc_op = CC_OP_EFLAGS;
6397 break;
6398 case 0x28: /* ffree sti */
6399 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6400 break;
6401 case 0x2a: /* fst sti */
6402 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6403 break;
6404 case 0x2b: /* fstp sti */
6405 case 0x0b: /* fstp1 sti, undocumented op */
6406 case 0x3a: /* fstp8 sti, undocumented op */
6407 case 0x3b: /* fstp9 sti, undocumented op */
6408 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6409 tcg_gen_helper_0_0(helper_fpop);
6410 break;
6411 case 0x2c: /* fucom st(i) */
6412 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6413 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6414 break;
6415 case 0x2d: /* fucomp st(i) */
6416 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6417 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6418 tcg_gen_helper_0_0(helper_fpop);
6419 break;
6420 case 0x33: /* de/3 */
6421 switch(rm) {
6422 case 1: /* fcompp */
6423 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6424 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6425 tcg_gen_helper_0_0(helper_fpop);
6426 tcg_gen_helper_0_0(helper_fpop);
6427 break;
6428 default:
6429 goto illegal_op;
6430 }
6431 break;
6432 case 0x38: /* ffreep sti, undocumented op */
6433 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6434 tcg_gen_helper_0_0(helper_fpop);
6435 break;
6436 case 0x3c: /* df/4 */
6437 switch(rm) {
6438 case 0:
6439 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6440 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6441 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6442 break;
6443 default:
6444 goto illegal_op;
6445 }
6446 break;
6447 case 0x3d: /* fucomip */
6448 if (s->cc_op != CC_OP_DYNAMIC)
6449 gen_op_set_cc_op(s->cc_op);
6450 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6451 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6452 tcg_gen_helper_0_0(helper_fpop);
6453 s->cc_op = CC_OP_EFLAGS;
6454 break;
6455 case 0x3e: /* fcomip */
6456 if (s->cc_op != CC_OP_DYNAMIC)
6457 gen_op_set_cc_op(s->cc_op);
6458 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6459 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6460 tcg_gen_helper_0_0(helper_fpop);
6461 s->cc_op = CC_OP_EFLAGS;
6462 break;
6463 case 0x10 ... 0x13: /* fcmovxx */
6464 case 0x18 ... 0x1b:
6465 {
6466 int op1, l1;
6467 static const uint8_t fcmov_cc[8] = {
6468 (JCC_B << 1),
6469 (JCC_Z << 1),
6470 (JCC_BE << 1),
6471 (JCC_P << 1),
6472 };
6473 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6474 l1 = gen_new_label();
6475 gen_jcc1(s, s->cc_op, op1, l1);
6476 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6477 gen_set_label(l1);
6478 }
6479 break;
6480 default:
6481 goto illegal_op;
6482 }
6483 }
6484 break;
6485 /************************/
6486 /* string ops */
6487
6488 case 0xa4: /* movsS */
6489 case 0xa5:
6490 if ((b & 1) == 0)
6491 ot = OT_BYTE;
6492 else
6493 ot = dflag + OT_WORD;
6494
6495 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6496 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6497 } else {
6498 gen_movs(s, ot);
6499 }
6500 break;
6501
6502 case 0xaa: /* stosS */
6503 case 0xab:
6504 if ((b & 1) == 0)
6505 ot = OT_BYTE;
6506 else
6507 ot = dflag + OT_WORD;
6508
6509 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6510 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6511 } else {
6512 gen_stos(s, ot);
6513 }
6514 break;
6515 case 0xac: /* lodsS */
6516 case 0xad:
6517 if ((b & 1) == 0)
6518 ot = OT_BYTE;
6519 else
6520 ot = dflag + OT_WORD;
6521 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6522 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6523 } else {
6524 gen_lods(s, ot);
6525 }
6526 break;
6527 case 0xae: /* scasS */
6528 case 0xaf:
6529 if ((b & 1) == 0)
6530 ot = OT_BYTE;
6531 else
6532 ot = dflag + OT_WORD;
6533 if (prefixes & PREFIX_REPNZ) {
6534 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6535 } else if (prefixes & PREFIX_REPZ) {
6536 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6537 } else {
6538 gen_scas(s, ot);
6539 s->cc_op = CC_OP_SUBB + ot;
6540 }
6541 break;
6542
6543 case 0xa6: /* cmpsS */
6544 case 0xa7:
6545 if ((b & 1) == 0)
6546 ot = OT_BYTE;
6547 else
6548 ot = dflag + OT_WORD;
6549 if (prefixes & PREFIX_REPNZ) {
6550 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6551 } else if (prefixes & PREFIX_REPZ) {
6552 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6553 } else {
6554 gen_cmps(s, ot);
6555 s->cc_op = CC_OP_SUBB + ot;
6556 }
6557 break;
6558 case 0x6c: /* insS */
6559 case 0x6d:
6560 if ((b & 1) == 0)
6561 ot = OT_BYTE;
6562 else
6563 ot = dflag ? OT_LONG : OT_WORD;
6564 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6565 gen_op_andl_T0_ffff();
6566 gen_check_io(s, ot, pc_start - s->cs_base,
6567 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6568 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6569 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6570 } else {
6571 gen_ins(s, ot);
6572 if (use_icount) {
6573 gen_jmp(s, s->pc - s->cs_base);
6574 }
6575 }
6576 break;
6577 case 0x6e: /* outsS */
6578 case 0x6f:
6579 if ((b & 1) == 0)
6580 ot = OT_BYTE;
6581 else
6582 ot = dflag ? OT_LONG : OT_WORD;
6583 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6584 gen_op_andl_T0_ffff();
6585 gen_check_io(s, ot, pc_start - s->cs_base,
6586 svm_is_rep(prefixes) | 4);
6587 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6588 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6589 } else {
6590 gen_outs(s, ot);
6591 if (use_icount) {
6592 gen_jmp(s, s->pc - s->cs_base);
6593 }
6594 }
6595 break;
6596
6597 /************************/
6598 /* port I/O */
6599
6600 case 0xe4:
6601 case 0xe5:
6602 if ((b & 1) == 0)
6603 ot = OT_BYTE;
6604 else
6605 ot = dflag ? OT_LONG : OT_WORD;
6606 val = ldub_code(s->pc++);
6607 gen_op_movl_T0_im(val);
6608 gen_check_io(s, ot, pc_start - s->cs_base,
6609 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6610 if (use_icount)
6611 gen_io_start();
6612 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6613 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6614 gen_op_mov_reg_T1(ot, R_EAX);
6615 if (use_icount) {
6616 gen_io_end();
6617 gen_jmp(s, s->pc - s->cs_base);
6618 }
6619 break;
6620 case 0xe6:
6621 case 0xe7:
6622 if ((b & 1) == 0)
6623 ot = OT_BYTE;
6624 else
6625 ot = dflag ? OT_LONG : OT_WORD;
6626 val = ldub_code(s->pc++);
6627 gen_op_movl_T0_im(val);
6628 gen_check_io(s, ot, pc_start - s->cs_base,
6629 svm_is_rep(prefixes));
6630#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6631 if (val == 0x80)
6632 break;
6633#endif /* VBOX */
6634 gen_op_mov_TN_reg(ot, 1, R_EAX);
6635
6636 if (use_icount)
6637 gen_io_start();
6638 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6639 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6640 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6641 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6642 if (use_icount) {
6643 gen_io_end();
6644 gen_jmp(s, s->pc - s->cs_base);
6645 }
6646 break;
6647 case 0xec:
6648 case 0xed:
6649 if ((b & 1) == 0)
6650 ot = OT_BYTE;
6651 else
6652 ot = dflag ? OT_LONG : OT_WORD;
6653 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6654 gen_op_andl_T0_ffff();
6655 gen_check_io(s, ot, pc_start - s->cs_base,
6656 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6657 if (use_icount)
6658 gen_io_start();
6659 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6660 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6661 gen_op_mov_reg_T1(ot, R_EAX);
6662 if (use_icount) {
6663 gen_io_end();
6664 gen_jmp(s, s->pc - s->cs_base);
6665 }
6666 break;
6667 case 0xee:
6668 case 0xef:
6669 if ((b & 1) == 0)
6670 ot = OT_BYTE;
6671 else
6672 ot = dflag ? OT_LONG : OT_WORD;
6673 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6674 gen_op_andl_T0_ffff();
6675 gen_check_io(s, ot, pc_start - s->cs_base,
6676 svm_is_rep(prefixes));
6677 gen_op_mov_TN_reg(ot, 1, R_EAX);
6678
6679 if (use_icount)
6680 gen_io_start();
6681 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6682 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6683 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6684 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6685 if (use_icount) {
6686 gen_io_end();
6687 gen_jmp(s, s->pc - s->cs_base);
6688 }
6689 break;
6690
6691 /************************/
6692 /* control */
6693 case 0xc2: /* ret im */
6694 val = ldsw_code(s->pc);
6695 s->pc += 2;
6696 gen_pop_T0(s);
6697 if (CODE64(s) && s->dflag)
6698 s->dflag = 2;
6699 gen_stack_update(s, val + (2 << s->dflag));
6700 if (s->dflag == 0)
6701 gen_op_andl_T0_ffff();
6702 gen_op_jmp_T0();
6703 gen_eob(s);
6704 break;
6705 case 0xc3: /* ret */
6706 gen_pop_T0(s);
6707 gen_pop_update(s);
6708 if (s->dflag == 0)
6709 gen_op_andl_T0_ffff();
6710 gen_op_jmp_T0();
6711 gen_eob(s);
6712 break;
6713 case 0xca: /* lret im */
6714 val = ldsw_code(s->pc);
6715 s->pc += 2;
6716 do_lret:
6717 if (s->pe && !s->vm86) {
6718 if (s->cc_op != CC_OP_DYNAMIC)
6719 gen_op_set_cc_op(s->cc_op);
6720 gen_jmp_im(pc_start - s->cs_base);
6721 tcg_gen_helper_0_2(helper_lret_protected,
6722 tcg_const_i32(s->dflag),
6723 tcg_const_i32(val));
6724 } else {
6725 gen_stack_A0(s);
6726 /* pop offset */
6727 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6728 if (s->dflag == 0)
6729 gen_op_andl_T0_ffff();
6730 /* NOTE: keeping EIP updated is not a problem in case of
6731 exception */
6732 gen_op_jmp_T0();
6733 /* pop selector */
6734 gen_op_addl_A0_im(2 << s->dflag);
6735 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6736 gen_op_movl_seg_T0_vm(R_CS);
6737 /* add stack offset */
6738 gen_stack_update(s, val + (4 << s->dflag));
6739 }
6740 gen_eob(s);
6741 break;
6742 case 0xcb: /* lret */
6743 val = 0;
6744 goto do_lret;
6745 case 0xcf: /* iret */
6746 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6747 if (!s->pe) {
6748 /* real mode */
6749 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6750 s->cc_op = CC_OP_EFLAGS;
6751 } else if (s->vm86) {
6752#ifdef VBOX
6753 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6754#else
6755 if (s->iopl != 3) {
6756#endif
6757 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6758 } else {
6759 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6760 s->cc_op = CC_OP_EFLAGS;
6761 }
6762 } else {
6763 if (s->cc_op != CC_OP_DYNAMIC)
6764 gen_op_set_cc_op(s->cc_op);
6765 gen_jmp_im(pc_start - s->cs_base);
6766 tcg_gen_helper_0_2(helper_iret_protected,
6767 tcg_const_i32(s->dflag),
6768 tcg_const_i32(s->pc - s->cs_base));
6769 s->cc_op = CC_OP_EFLAGS;
6770 }
6771 gen_eob(s);
6772 break;
6773 case 0xe8: /* call im */
6774 {
6775 if (dflag)
6776 tval = (int32_t)insn_get(s, OT_LONG);
6777 else
6778 tval = (int16_t)insn_get(s, OT_WORD);
6779 next_eip = s->pc - s->cs_base;
6780 tval += next_eip;
6781 if (s->dflag == 0)
6782 tval &= 0xffff;
6783 gen_movtl_T0_im(next_eip);
6784 gen_push_T0(s);
6785 gen_jmp(s, tval);
6786 }
6787 break;
6788 case 0x9a: /* lcall im */
6789 {
6790 unsigned int selector, offset;
6791
6792 if (CODE64(s))
6793 goto illegal_op;
6794 ot = dflag ? OT_LONG : OT_WORD;
6795 offset = insn_get(s, ot);
6796 selector = insn_get(s, OT_WORD);
6797
6798 gen_op_movl_T0_im(selector);
6799 gen_op_movl_T1_imu(offset);
6800 }
6801 goto do_lcall;
6802 case 0xe9: /* jmp im */
6803 if (dflag)
6804 tval = (int32_t)insn_get(s, OT_LONG);
6805 else
6806 tval = (int16_t)insn_get(s, OT_WORD);
6807 tval += s->pc - s->cs_base;
6808 if (s->dflag == 0)
6809 tval &= 0xffff;
6810 gen_jmp(s, tval);
6811 break;
6812 case 0xea: /* ljmp im */
6813 {
6814 unsigned int selector, offset;
6815
6816 if (CODE64(s))
6817 goto illegal_op;
6818 ot = dflag ? OT_LONG : OT_WORD;
6819 offset = insn_get(s, ot);
6820 selector = insn_get(s, OT_WORD);
6821
6822 gen_op_movl_T0_im(selector);
6823 gen_op_movl_T1_imu(offset);
6824 }
6825 goto do_ljmp;
6826 case 0xeb: /* jmp Jb */
6827 tval = (int8_t)insn_get(s, OT_BYTE);
6828 tval += s->pc - s->cs_base;
6829 if (s->dflag == 0)
6830 tval &= 0xffff;
6831 gen_jmp(s, tval);
6832 break;
6833 case 0x70 ... 0x7f: /* jcc Jb */
6834 tval = (int8_t)insn_get(s, OT_BYTE);
6835 goto do_jcc;
6836 case 0x180 ... 0x18f: /* jcc Jv */
6837 if (dflag) {
6838 tval = (int32_t)insn_get(s, OT_LONG);
6839 } else {
6840 tval = (int16_t)insn_get(s, OT_WORD);
6841 }
6842 do_jcc:
6843 next_eip = s->pc - s->cs_base;
6844 tval += next_eip;
6845 if (s->dflag == 0)
6846 tval &= 0xffff;
6847 gen_jcc(s, b, tval, next_eip);
6848 break;
6849
6850 case 0x190 ... 0x19f: /* setcc Gv */
6851 modrm = ldub_code(s->pc++);
6852 gen_setcc(s, b);
6853 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6854 break;
6855 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6856 {
6857 int l1;
6858 TCGv t0;
6859
6860 ot = dflag + OT_WORD;
6861 modrm = ldub_code(s->pc++);
6862 reg = ((modrm >> 3) & 7) | rex_r;
6863 mod = (modrm >> 6) & 3;
6864 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6865 if (mod != 3) {
6866 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6867 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6868 } else {
6869 rm = (modrm & 7) | REX_B(s);
6870 gen_op_mov_v_reg(ot, t0, rm);
6871 }
6872#ifdef TARGET_X86_64
6873 if (ot == OT_LONG) {
6874 /* XXX: specific Intel behaviour ? */
6875 l1 = gen_new_label();
6876 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6877 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6878 gen_set_label(l1);
6879 tcg_gen_movi_tl(cpu_tmp0, 0);
6880 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6881 } else
6882#endif
6883 {
6884 l1 = gen_new_label();
6885 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6886 gen_op_mov_reg_v(ot, reg, t0);
6887 gen_set_label(l1);
6888 }
6889 tcg_temp_free(t0);
6890 }
6891 break;
6892
6893 /************************/
6894 /* flags */
6895 case 0x9c: /* pushf */
6896 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6897#ifdef VBOX
6898 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6899#else
6900 if (s->vm86 && s->iopl != 3) {
6901#endif
6902 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6903 } else {
6904 if (s->cc_op != CC_OP_DYNAMIC)
6905 gen_op_set_cc_op(s->cc_op);
6906#ifdef VBOX
6907 if (s->vm86 && s->vme && s->iopl != 3)
6908 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6909 else
6910#endif
6911 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6912 gen_push_T0(s);
6913 }
6914 break;
6915 case 0x9d: /* popf */
6916 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6917#ifdef VBOX
6918 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6919#else
6920 if (s->vm86 && s->iopl != 3) {
6921#endif
6922 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6923 } else {
6924 gen_pop_T0(s);
6925 if (s->cpl == 0) {
6926 if (s->dflag) {
6927 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6928 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6929 } else {
6930 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6931 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6932 }
6933 } else {
6934 if (s->cpl <= s->iopl) {
6935 if (s->dflag) {
6936 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6937 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6938 } else {
6939 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6940 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6941 }
6942 } else {
6943 if (s->dflag) {
6944 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6945 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6946 } else {
6947#ifdef VBOX
6948 if (s->vm86 && s->vme)
6949 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
6950 else
6951#endif
6952 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6953 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6954 }
6955 }
6956 }
6957 gen_pop_update(s);
6958 s->cc_op = CC_OP_EFLAGS;
6959 /* abort translation because TF flag may change */
6960 gen_jmp_im(s->pc - s->cs_base);
6961 gen_eob(s);
6962 }
6963 break;
6964 case 0x9e: /* sahf */
6965 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6966 goto illegal_op;
6967 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6968 if (s->cc_op != CC_OP_DYNAMIC)
6969 gen_op_set_cc_op(s->cc_op);
6970 gen_compute_eflags(cpu_cc_src);
6971 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6972 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6973 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6974 s->cc_op = CC_OP_EFLAGS;
6975 break;
6976 case 0x9f: /* lahf */
6977 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6978 goto illegal_op;
6979 if (s->cc_op != CC_OP_DYNAMIC)
6980 gen_op_set_cc_op(s->cc_op);
6981 gen_compute_eflags(cpu_T[0]);
6982 /* Note: gen_compute_eflags() only gives the condition codes */
6983 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6984 gen_op_mov_reg_T0(OT_BYTE, R_AH);
6985 break;
6986 case 0xf5: /* cmc */
6987 if (s->cc_op != CC_OP_DYNAMIC)
6988 gen_op_set_cc_op(s->cc_op);
6989 gen_compute_eflags(cpu_cc_src);
6990 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6991 s->cc_op = CC_OP_EFLAGS;
6992 break;
6993 case 0xf8: /* clc */
6994 if (s->cc_op != CC_OP_DYNAMIC)
6995 gen_op_set_cc_op(s->cc_op);
6996 gen_compute_eflags(cpu_cc_src);
6997 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6998 s->cc_op = CC_OP_EFLAGS;
6999 break;
7000 case 0xf9: /* stc */
7001 if (s->cc_op != CC_OP_DYNAMIC)
7002 gen_op_set_cc_op(s->cc_op);
7003 gen_compute_eflags(cpu_cc_src);
7004 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7005 s->cc_op = CC_OP_EFLAGS;
7006 break;
7007 case 0xfc: /* cld */
7008 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7009 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7010 break;
7011 case 0xfd: /* std */
7012 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7013 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7014 break;
7015
7016 /************************/
7017 /* bit operations */
7018 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7019 ot = dflag + OT_WORD;
7020 modrm = ldub_code(s->pc++);
7021 op = (modrm >> 3) & 7;
7022 mod = (modrm >> 6) & 3;
7023 rm = (modrm & 7) | REX_B(s);
7024 if (mod != 3) {
7025 s->rip_offset = 1;
7026 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7027 gen_op_ld_T0_A0(ot + s->mem_index);
7028 } else {
7029 gen_op_mov_TN_reg(ot, 0, rm);
7030 }
7031 /* load shift */
7032 val = ldub_code(s->pc++);
7033 gen_op_movl_T1_im(val);
7034 if (op < 4)
7035 goto illegal_op;
7036 op -= 4;
7037 goto bt_op;
7038 case 0x1a3: /* bt Gv, Ev */
7039 op = 0;
7040 goto do_btx;
7041 case 0x1ab: /* bts */
7042 op = 1;
7043 goto do_btx;
7044 case 0x1b3: /* btr */
7045 op = 2;
7046 goto do_btx;
7047 case 0x1bb: /* btc */
7048 op = 3;
7049 do_btx:
7050 ot = dflag + OT_WORD;
7051 modrm = ldub_code(s->pc++);
7052 reg = ((modrm >> 3) & 7) | rex_r;
7053 mod = (modrm >> 6) & 3;
7054 rm = (modrm & 7) | REX_B(s);
7055 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7056 if (mod != 3) {
7057 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7058 /* specific case: we need to add a displacement */
7059 gen_exts(ot, cpu_T[1]);
7060 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7061 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7062 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7063 gen_op_ld_T0_A0(ot + s->mem_index);
7064 } else {
7065 gen_op_mov_TN_reg(ot, 0, rm);
7066 }
7067 bt_op:
7068 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7069 switch(op) {
7070 case 0:
7071 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7072 tcg_gen_movi_tl(cpu_cc_dst, 0);
7073 break;
7074 case 1:
7075 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7076 tcg_gen_movi_tl(cpu_tmp0, 1);
7077 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7078 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7079 break;
7080 case 2:
7081 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7082 tcg_gen_movi_tl(cpu_tmp0, 1);
7083 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7084 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7085 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7086 break;
7087 default:
7088 case 3:
7089 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7090 tcg_gen_movi_tl(cpu_tmp0, 1);
7091 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7092 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7093 break;
7094 }
7095 s->cc_op = CC_OP_SARB + ot;
7096 if (op != 0) {
7097 if (mod != 3)
7098 gen_op_st_T0_A0(ot + s->mem_index);
7099 else
7100 gen_op_mov_reg_T0(ot, rm);
7101 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7102 tcg_gen_movi_tl(cpu_cc_dst, 0);
7103 }
7104 break;
7105 case 0x1bc: /* bsf */
7106 case 0x1bd: /* bsr */
7107 {
7108 int label1;
7109 TCGv t0;
7110
7111 ot = dflag + OT_WORD;
7112 modrm = ldub_code(s->pc++);
7113 reg = ((modrm >> 3) & 7) | rex_r;
7114 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7115 gen_extu(ot, cpu_T[0]);
7116 label1 = gen_new_label();
7117 tcg_gen_movi_tl(cpu_cc_dst, 0);
7118 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7119 tcg_gen_mov_tl(t0, cpu_T[0]);
7120 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7121 if (b & 1) {
7122 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7123 } else {
7124 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7125 }
7126 gen_op_mov_reg_T0(ot, reg);
7127 tcg_gen_movi_tl(cpu_cc_dst, 1);
7128 gen_set_label(label1);
7129 tcg_gen_discard_tl(cpu_cc_src);
7130 s->cc_op = CC_OP_LOGICB + ot;
7131 tcg_temp_free(t0);
7132 }
7133 break;
7134 /************************/
7135 /* bcd */
7136 case 0x27: /* daa */
7137 if (CODE64(s))
7138 goto illegal_op;
7139 if (s->cc_op != CC_OP_DYNAMIC)
7140 gen_op_set_cc_op(s->cc_op);
7141 tcg_gen_helper_0_0(helper_daa);
7142 s->cc_op = CC_OP_EFLAGS;
7143 break;
7144 case 0x2f: /* das */
7145 if (CODE64(s))
7146 goto illegal_op;
7147 if (s->cc_op != CC_OP_DYNAMIC)
7148 gen_op_set_cc_op(s->cc_op);
7149 tcg_gen_helper_0_0(helper_das);
7150 s->cc_op = CC_OP_EFLAGS;
7151 break;
7152 case 0x37: /* aaa */
7153 if (CODE64(s))
7154 goto illegal_op;
7155 if (s->cc_op != CC_OP_DYNAMIC)
7156 gen_op_set_cc_op(s->cc_op);
7157 tcg_gen_helper_0_0(helper_aaa);
7158 s->cc_op = CC_OP_EFLAGS;
7159 break;
7160 case 0x3f: /* aas */
7161 if (CODE64(s))
7162 goto illegal_op;
7163 if (s->cc_op != CC_OP_DYNAMIC)
7164 gen_op_set_cc_op(s->cc_op);
7165 tcg_gen_helper_0_0(helper_aas);
7166 s->cc_op = CC_OP_EFLAGS;
7167 break;
7168 case 0xd4: /* aam */
7169 if (CODE64(s))
7170 goto illegal_op;
7171 val = ldub_code(s->pc++);
7172 if (val == 0) {
7173 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7174 } else {
7175 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7176 s->cc_op = CC_OP_LOGICB;
7177 }
7178 break;
7179 case 0xd5: /* aad */
7180 if (CODE64(s))
7181 goto illegal_op;
7182 val = ldub_code(s->pc++);
7183 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7184 s->cc_op = CC_OP_LOGICB;
7185 break;
7186 /************************/
7187 /* misc */
7188 case 0x90: /* nop */
7189 /* XXX: xchg + rex handling */
7190 /* XXX: correct lock test for all insn */
7191 if (prefixes & PREFIX_LOCK)
7192 goto illegal_op;
7193 if (prefixes & PREFIX_REPZ) {
7194 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7195 }
7196 break;
7197 case 0x9b: /* fwait */
7198 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7199 (HF_MP_MASK | HF_TS_MASK)) {
7200 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7201 } else {
7202 if (s->cc_op != CC_OP_DYNAMIC)
7203 gen_op_set_cc_op(s->cc_op);
7204 gen_jmp_im(pc_start - s->cs_base);
7205 tcg_gen_helper_0_0(helper_fwait);
7206 }
7207 break;
7208 case 0xcc: /* int3 */
7209#ifdef VBOX
7210 if (s->vm86 && s->iopl != 3 && !s->vme) {
7211 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7212 } else
7213#endif
7214 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7215 break;
7216 case 0xcd: /* int N */
7217 val = ldub_code(s->pc++);
7218#ifdef VBOX
7219 if (s->vm86 && s->iopl != 3 && !s->vme) {
7220#else
7221 if (s->vm86 && s->iopl != 3) {
7222#endif
7223 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7224 } else {
7225 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7226 }
7227 break;
7228 case 0xce: /* into */
7229 if (CODE64(s))
7230 goto illegal_op;
7231 if (s->cc_op != CC_OP_DYNAMIC)
7232 gen_op_set_cc_op(s->cc_op);
7233 gen_jmp_im(pc_start - s->cs_base);
7234 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7235 break;
7236 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7237 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7238#if 1
7239 gen_debug(s, pc_start - s->cs_base);
7240#else
7241 /* start debug */
7242 tb_flush(cpu_single_env);
7243 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7244#endif
7245 break;
7246 case 0xfa: /* cli */
7247 if (!s->vm86) {
7248 if (s->cpl <= s->iopl) {
7249 tcg_gen_helper_0_0(helper_cli);
7250 } else {
7251 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7252 }
7253 } else {
7254 if (s->iopl == 3) {
7255 tcg_gen_helper_0_0(helper_cli);
7256#ifdef VBOX
7257 } else if (s->iopl != 3 && s->vme) {
7258 tcg_gen_helper_0_0(helper_cli_vme);
7259#endif
7260 } else {
7261 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7262 }
7263 }
7264 break;
7265 case 0xfb: /* sti */
7266 if (!s->vm86) {
7267 if (s->cpl <= s->iopl) {
7268 gen_sti:
7269 tcg_gen_helper_0_0(helper_sti);
7270 /* interruptions are enabled only the first insn after sti */
7271 /* If several instructions disable interrupts, only the
7272 _first_ does it */
7273 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7274 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7275 /* give a chance to handle pending irqs */
7276 gen_jmp_im(s->pc - s->cs_base);
7277 gen_eob(s);
7278 } else {
7279 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7280 }
7281 } else {
7282 if (s->iopl == 3) {
7283 goto gen_sti;
7284#ifdef VBOX
7285 } else if (s->iopl != 3 && s->vme) {
7286 tcg_gen_helper_0_0(helper_sti_vme);
7287 /* give a chance to handle pending irqs */
7288 gen_jmp_im(s->pc - s->cs_base);
7289 gen_eob(s);
7290#endif
7291 } else {
7292 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7293 }
7294 }
7295 break;
7296 case 0x62: /* bound */
7297 if (CODE64(s))
7298 goto illegal_op;
7299 ot = dflag ? OT_LONG : OT_WORD;
7300 modrm = ldub_code(s->pc++);
7301 reg = (modrm >> 3) & 7;
7302 mod = (modrm >> 6) & 3;
7303 if (mod == 3)
7304 goto illegal_op;
7305 gen_op_mov_TN_reg(ot, 0, reg);
7306 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7307 gen_jmp_im(pc_start - s->cs_base);
7308 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7309 if (ot == OT_WORD)
7310 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7311 else
7312 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7313 break;
7314 case 0x1c8 ... 0x1cf: /* bswap reg */
7315 reg = (b & 7) | REX_B(s);
7316#ifdef TARGET_X86_64
7317 if (dflag == 2) {
7318 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7319 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7320 gen_op_mov_reg_T0(OT_QUAD, reg);
7321 } else
7322 {
7323 TCGv tmp0;
7324 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7325
7326 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7327 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7328 tcg_gen_bswap_i32(tmp0, tmp0);
7329 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7330 gen_op_mov_reg_T0(OT_LONG, reg);
7331 }
7332#else
7333 {
7334 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7335 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7336 gen_op_mov_reg_T0(OT_LONG, reg);
7337 }
7338#endif
7339 break;
7340 case 0xd6: /* salc */
7341 if (CODE64(s))
7342 goto illegal_op;
7343 if (s->cc_op != CC_OP_DYNAMIC)
7344 gen_op_set_cc_op(s->cc_op);
7345 gen_compute_eflags_c(cpu_T[0]);
7346 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7347 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7348 break;
7349 case 0xe0: /* loopnz */
7350 case 0xe1: /* loopz */
7351 case 0xe2: /* loop */
7352 case 0xe3: /* jecxz */
7353 {
7354 int l1, l2, l3;
7355
7356 tval = (int8_t)insn_get(s, OT_BYTE);
7357 next_eip = s->pc - s->cs_base;
7358 tval += next_eip;
7359 if (s->dflag == 0)
7360 tval &= 0xffff;
7361
7362 l1 = gen_new_label();
7363 l2 = gen_new_label();
7364 l3 = gen_new_label();
7365 b &= 3;
7366 switch(b) {
7367 case 0: /* loopnz */
7368 case 1: /* loopz */
7369 if (s->cc_op != CC_OP_DYNAMIC)
7370 gen_op_set_cc_op(s->cc_op);
7371 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7372 gen_op_jz_ecx(s->aflag, l3);
7373 gen_compute_eflags(cpu_tmp0);
7374 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7375 if (b == 0) {
7376 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7377 } else {
7378 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7379 }
7380 break;
7381 case 2: /* loop */
7382 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7383 gen_op_jnz_ecx(s->aflag, l1);
7384 break;
7385 default:
7386 case 3: /* jcxz */
7387 gen_op_jz_ecx(s->aflag, l1);
7388 break;
7389 }
7390
7391 gen_set_label(l3);
7392 gen_jmp_im(next_eip);
7393 tcg_gen_br(l2);
7394
7395 gen_set_label(l1);
7396 gen_jmp_im(tval);
7397 gen_set_label(l2);
7398 gen_eob(s);
7399 }
7400 break;
7401 case 0x130: /* wrmsr */
7402 case 0x132: /* rdmsr */
7403 if (s->cpl != 0) {
7404 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7405 } else {
7406 if (s->cc_op != CC_OP_DYNAMIC)
7407 gen_op_set_cc_op(s->cc_op);
7408 gen_jmp_im(pc_start - s->cs_base);
7409 if (b & 2) {
7410 tcg_gen_helper_0_0(helper_rdmsr);
7411 } else {
7412 tcg_gen_helper_0_0(helper_wrmsr);
7413 }
7414 }
7415 break;
7416 case 0x131: /* rdtsc */
7417 if (s->cc_op != CC_OP_DYNAMIC)
7418 gen_op_set_cc_op(s->cc_op);
7419 gen_jmp_im(pc_start - s->cs_base);
7420 if (use_icount)
7421 gen_io_start();
7422 tcg_gen_helper_0_0(helper_rdtsc);
7423 if (use_icount) {
7424 gen_io_end();
7425 gen_jmp(s, s->pc - s->cs_base);
7426 }
7427 break;
7428 case 0x133: /* rdpmc */
7429 if (s->cc_op != CC_OP_DYNAMIC)
7430 gen_op_set_cc_op(s->cc_op);
7431 gen_jmp_im(pc_start - s->cs_base);
7432 tcg_gen_helper_0_0(helper_rdpmc);
7433 break;
7434 case 0x134: /* sysenter */
7435#ifndef VBOX
7436 /* For Intel SYSENTER is valid on 64-bit */
7437 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7438#else
7439 /** @todo: make things right */
7440 if (CODE64(s))
7441#endif
7442 goto illegal_op;
7443 if (!s->pe) {
7444 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7445 } else {
7446 if (s->cc_op != CC_OP_DYNAMIC) {
7447 gen_op_set_cc_op(s->cc_op);
7448 s->cc_op = CC_OP_DYNAMIC;
7449 }
7450 gen_jmp_im(pc_start - s->cs_base);
7451 tcg_gen_helper_0_0(helper_sysenter);
7452 gen_eob(s);
7453 }
7454 break;
7455 case 0x135: /* sysexit */
7456#ifndef VBOX
7457 /* For Intel SYSEXIT is valid on 64-bit */
7458 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7459#else
7460 /** @todo: make things right */
7461 if (CODE64(s))
7462#endif
7463 goto illegal_op;
7464 if (!s->pe) {
7465 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7466 } else {
7467 if (s->cc_op != CC_OP_DYNAMIC) {
7468 gen_op_set_cc_op(s->cc_op);
7469 s->cc_op = CC_OP_DYNAMIC;
7470 }
7471 gen_jmp_im(pc_start - s->cs_base);
7472 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7473 gen_eob(s);
7474 }
7475 break;
7476#ifdef TARGET_X86_64
7477 case 0x105: /* syscall */
7478 /* XXX: is it usable in real mode ? */
7479 if (s->cc_op != CC_OP_DYNAMIC) {
7480 gen_op_set_cc_op(s->cc_op);
7481 s->cc_op = CC_OP_DYNAMIC;
7482 }
7483 gen_jmp_im(pc_start - s->cs_base);
7484 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7485 gen_eob(s);
7486 break;
7487 case 0x107: /* sysret */
7488 if (!s->pe) {
7489 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7490 } else {
7491 if (s->cc_op != CC_OP_DYNAMIC) {
7492 gen_op_set_cc_op(s->cc_op);
7493 s->cc_op = CC_OP_DYNAMIC;
7494 }
7495 gen_jmp_im(pc_start - s->cs_base);
7496 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7497 /* condition codes are modified only in long mode */
7498 if (s->lma)
7499 s->cc_op = CC_OP_EFLAGS;
7500 gen_eob(s);
7501 }
7502 break;
7503#endif
7504 case 0x1a2: /* cpuid */
7505 if (s->cc_op != CC_OP_DYNAMIC)
7506 gen_op_set_cc_op(s->cc_op);
7507 gen_jmp_im(pc_start - s->cs_base);
7508 tcg_gen_helper_0_0(helper_cpuid);
7509 break;
7510 case 0xf4: /* hlt */
7511 if (s->cpl != 0) {
7512 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7513 } else {
7514 if (s->cc_op != CC_OP_DYNAMIC)
7515 gen_op_set_cc_op(s->cc_op);
7516 gen_jmp_im(pc_start - s->cs_base);
7517 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7518 s->is_jmp = 3;
7519 }
7520 break;
7521 case 0x100:
7522 modrm = ldub_code(s->pc++);
7523 mod = (modrm >> 6) & 3;
7524 op = (modrm >> 3) & 7;
7525 switch(op) {
7526 case 0: /* sldt */
7527 if (!s->pe || s->vm86)
7528 goto illegal_op;
7529 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7530 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7531 ot = OT_WORD;
7532 if (mod == 3)
7533 ot += s->dflag;
7534 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7535 break;
7536 case 2: /* lldt */
7537 if (!s->pe || s->vm86)
7538 goto illegal_op;
7539 if (s->cpl != 0) {
7540 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7541 } else {
7542 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7543 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7544 gen_jmp_im(pc_start - s->cs_base);
7545 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7546 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7547 }
7548 break;
7549 case 1: /* str */
7550 if (!s->pe || s->vm86)
7551 goto illegal_op;
7552 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7553 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7554 ot = OT_WORD;
7555 if (mod == 3)
7556 ot += s->dflag;
7557 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7558 break;
7559 case 3: /* ltr */
7560 if (!s->pe || s->vm86)
7561 goto illegal_op;
7562 if (s->cpl != 0) {
7563 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7564 } else {
7565 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7566 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7567 gen_jmp_im(pc_start - s->cs_base);
7568 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7569 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7570 }
7571 break;
7572 case 4: /* verr */
7573 case 5: /* verw */
7574 if (!s->pe || s->vm86)
7575 goto illegal_op;
7576 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7577 if (s->cc_op != CC_OP_DYNAMIC)
7578 gen_op_set_cc_op(s->cc_op);
7579 if (op == 4)
7580 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7581 else
7582 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7583 s->cc_op = CC_OP_EFLAGS;
7584 break;
7585 default:
7586 goto illegal_op;
7587 }
7588 break;
7589 case 0x101:
7590 modrm = ldub_code(s->pc++);
7591 mod = (modrm >> 6) & 3;
7592 op = (modrm >> 3) & 7;
7593 rm = modrm & 7;
7594 switch(op) {
7595 case 0: /* sgdt */
7596 if (mod == 3)
7597 goto illegal_op;
7598 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7599 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7600 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7601 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7602 gen_add_A0_im(s, 2);
7603 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7604 if (!s->dflag)
7605 gen_op_andl_T0_im(0xffffff);
7606 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7607 break;
7608 case 1:
7609 if (mod == 3) {
7610 switch (rm) {
7611 case 0: /* monitor */
7612 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7613 s->cpl != 0)
7614 goto illegal_op;
7615 if (s->cc_op != CC_OP_DYNAMIC)
7616 gen_op_set_cc_op(s->cc_op);
7617 gen_jmp_im(pc_start - s->cs_base);
7618#ifdef TARGET_X86_64
7619 if (s->aflag == 2) {
7620 gen_op_movq_A0_reg(R_EAX);
7621 } else
7622#endif
7623 {
7624 gen_op_movl_A0_reg(R_EAX);
7625 if (s->aflag == 0)
7626 gen_op_andl_A0_ffff();
7627 }
7628 gen_add_A0_ds_seg(s);
7629 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7630 break;
7631 case 1: /* mwait */
7632 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7633 s->cpl != 0)
7634 goto illegal_op;
7635 if (s->cc_op != CC_OP_DYNAMIC) {
7636 gen_op_set_cc_op(s->cc_op);
7637 s->cc_op = CC_OP_DYNAMIC;
7638 }
7639 gen_jmp_im(pc_start - s->cs_base);
7640 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7641 gen_eob(s);
7642 break;
7643 default:
7644 goto illegal_op;
7645 }
7646 } else { /* sidt */
7647 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7648 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7649 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7650 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7651 gen_add_A0_im(s, 2);
7652 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7653 if (!s->dflag)
7654 gen_op_andl_T0_im(0xffffff);
7655 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7656 }
7657 break;
7658 case 2: /* lgdt */
7659 case 3: /* lidt */
7660 if (mod == 3) {
7661 if (s->cc_op != CC_OP_DYNAMIC)
7662 gen_op_set_cc_op(s->cc_op);
7663 gen_jmp_im(pc_start - s->cs_base);
7664 switch(rm) {
7665 case 0: /* VMRUN */
7666 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7667 goto illegal_op;
7668 if (s->cpl != 0) {
7669 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7670 break;
7671 } else {
7672 tcg_gen_helper_0_2(helper_vmrun,
7673 tcg_const_i32(s->aflag),
7674 tcg_const_i32(s->pc - pc_start));
7675 tcg_gen_exit_tb(0);
7676 s->is_jmp = 3;
7677 }
7678 break;
7679 case 1: /* VMMCALL */
7680 if (!(s->flags & HF_SVME_MASK))
7681 goto illegal_op;
7682 tcg_gen_helper_0_0(helper_vmmcall);
7683 break;
7684 case 2: /* VMLOAD */
7685 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7686 goto illegal_op;
7687 if (s->cpl != 0) {
7688 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7689 break;
7690 } else {
7691 tcg_gen_helper_0_1(helper_vmload,
7692 tcg_const_i32(s->aflag));
7693 }
7694 break;
7695 case 3: /* VMSAVE */
7696 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7697 goto illegal_op;
7698 if (s->cpl != 0) {
7699 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7700 break;
7701 } else {
7702 tcg_gen_helper_0_1(helper_vmsave,
7703 tcg_const_i32(s->aflag));
7704 }
7705 break;
7706 case 4: /* STGI */
7707 if ((!(s->flags & HF_SVME_MASK) &&
7708 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7709 !s->pe)
7710 goto illegal_op;
7711 if (s->cpl != 0) {
7712 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7713 break;
7714 } else {
7715 tcg_gen_helper_0_0(helper_stgi);
7716 }
7717 break;
7718 case 5: /* CLGI */
7719 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7720 goto illegal_op;
7721 if (s->cpl != 0) {
7722 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7723 break;
7724 } else {
7725 tcg_gen_helper_0_0(helper_clgi);
7726 }
7727 break;
7728 case 6: /* SKINIT */
7729 if ((!(s->flags & HF_SVME_MASK) &&
7730 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7731 !s->pe)
7732 goto illegal_op;
7733 tcg_gen_helper_0_0(helper_skinit);
7734 break;
7735 case 7: /* INVLPGA */
7736 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7737 goto illegal_op;
7738 if (s->cpl != 0) {
7739 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7740 break;
7741 } else {
7742 tcg_gen_helper_0_1(helper_invlpga,
7743 tcg_const_i32(s->aflag));
7744 }
7745 break;
7746 default:
7747 goto illegal_op;
7748 }
7749 } else if (s->cpl != 0) {
7750 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7751 } else {
7752 gen_svm_check_intercept(s, pc_start,
7753 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7754 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7755 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7756 gen_add_A0_im(s, 2);
7757 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7758 if (!s->dflag)
7759 gen_op_andl_T0_im(0xffffff);
7760 if (op == 2) {
7761 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7762 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7763 } else {
7764 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7765 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7766 }
7767 }
7768 break;
7769 case 4: /* smsw */
7770 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7771 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7772 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7773 break;
7774 case 6: /* lmsw */
7775 if (s->cpl != 0) {
7776 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7777 } else {
7778 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7779 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7780 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7781 gen_jmp_im(s->pc - s->cs_base);
7782 gen_eob(s);
7783 }
7784 break;
7785 case 7: /* invlpg */
7786 if (s->cpl != 0) {
7787 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7788 } else {
7789 if (mod == 3) {
7790#ifdef TARGET_X86_64
7791 if (CODE64(s) && rm == 0) {
7792 /* swapgs */
7793 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7794 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7795 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7796 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7797 } else
7798#endif
7799 {
7800 goto illegal_op;
7801 }
7802 } else {
7803 if (s->cc_op != CC_OP_DYNAMIC)
7804 gen_op_set_cc_op(s->cc_op);
7805 gen_jmp_im(pc_start - s->cs_base);
7806 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7807 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7808 gen_jmp_im(s->pc - s->cs_base);
7809 gen_eob(s);
7810 }
7811 }
7812 break;
7813 default:
7814 goto illegal_op;
7815 }
7816 break;
7817 case 0x108: /* invd */
7818 case 0x109: /* wbinvd */
7819 if (s->cpl != 0) {
7820 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7821 } else {
7822 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7823 /* nothing to do */
7824 }
7825 break;
7826 case 0x63: /* arpl or movslS (x86_64) */
7827#ifdef TARGET_X86_64
7828 if (CODE64(s)) {
7829 int d_ot;
7830 /* d_ot is the size of destination */
7831 d_ot = dflag + OT_WORD;
7832
7833 modrm = ldub_code(s->pc++);
7834 reg = ((modrm >> 3) & 7) | rex_r;
7835 mod = (modrm >> 6) & 3;
7836 rm = (modrm & 7) | REX_B(s);
7837
7838 if (mod == 3) {
7839 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7840 /* sign extend */
7841 if (d_ot == OT_QUAD)
7842 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7843 gen_op_mov_reg_T0(d_ot, reg);
7844 } else {
7845 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7846 if (d_ot == OT_QUAD) {
7847 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7848 } else {
7849 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7850 }
7851 gen_op_mov_reg_T0(d_ot, reg);
7852 }
7853 } else
7854#endif
7855 {
7856 int label1;
7857 TCGv t0, t1, t2;
7858
7859 if (!s->pe || s->vm86)
7860 goto illegal_op;
7861 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7862 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7863 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7864 ot = OT_WORD;
7865 modrm = ldub_code(s->pc++);
7866 reg = (modrm >> 3) & 7;
7867 mod = (modrm >> 6) & 3;
7868 rm = modrm & 7;
7869#ifdef VBOX /* Fix for obvious bug - T1 needs to be loaded */
7870 /** @todo: how to do that right? */
7871 //gen_op_mov_TN_reg[ot][1][reg]();
7872#endif
7873 if (mod != 3) {
7874 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7875 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7876 } else {
7877 gen_op_mov_v_reg(ot, t0, rm);
7878 }
7879 gen_op_mov_v_reg(ot, t1, reg);
7880 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7881 tcg_gen_andi_tl(t1, t1, 3);
7882 tcg_gen_movi_tl(t2, 0);
7883 label1 = gen_new_label();
7884 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7885 tcg_gen_andi_tl(t0, t0, ~3);
7886 tcg_gen_or_tl(t0, t0, t1);
7887 tcg_gen_movi_tl(t2, CC_Z);
7888 gen_set_label(label1);
7889 if (mod != 3) {
7890 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7891 } else {
7892 gen_op_mov_reg_v(ot, rm, t0);
7893 }
7894 if (s->cc_op != CC_OP_DYNAMIC)
7895 gen_op_set_cc_op(s->cc_op);
7896 gen_compute_eflags(cpu_cc_src);
7897 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7898 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7899 s->cc_op = CC_OP_EFLAGS;
7900 tcg_temp_free(t0);
7901 tcg_temp_free(t1);
7902 tcg_temp_free(t2);
7903 }
7904 break;
7905 case 0x102: /* lar */
7906 case 0x103: /* lsl */
7907 {
7908 int label1;
7909 TCGv t0;
7910 if (!s->pe || s->vm86)
7911 goto illegal_op;
7912 ot = dflag ? OT_LONG : OT_WORD;
7913 modrm = ldub_code(s->pc++);
7914 reg = ((modrm >> 3) & 7) | rex_r;
7915 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7916 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7917 if (s->cc_op != CC_OP_DYNAMIC)
7918 gen_op_set_cc_op(s->cc_op);
7919 if (b == 0x102)
7920 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
7921 else
7922 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
7923 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7924 label1 = gen_new_label();
7925 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7926 gen_op_mov_reg_v(ot, reg, t0);
7927 gen_set_label(label1);
7928 s->cc_op = CC_OP_EFLAGS;
7929 tcg_temp_free(t0);
7930 }
7931 break;
7932 case 0x118:
7933 modrm = ldub_code(s->pc++);
7934 mod = (modrm >> 6) & 3;
7935 op = (modrm >> 3) & 7;
7936 switch(op) {
7937 case 0: /* prefetchnta */
7938 case 1: /* prefetchnt0 */
7939 case 2: /* prefetchnt0 */
7940 case 3: /* prefetchnt0 */
7941 if (mod == 3)
7942 goto illegal_op;
7943 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7944 /* nothing more to do */
7945 break;
7946 default: /* nop (multi byte) */
7947 gen_nop_modrm(s, modrm);
7948 break;
7949 }
7950 break;
7951 case 0x119 ... 0x11f: /* nop (multi byte) */
7952 modrm = ldub_code(s->pc++);
7953 gen_nop_modrm(s, modrm);
7954 break;
7955 case 0x120: /* mov reg, crN */
7956 case 0x122: /* mov crN, reg */
7957 if (s->cpl != 0) {
7958 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7959 } else {
7960 modrm = ldub_code(s->pc++);
7961 if ((modrm & 0xc0) != 0xc0)
7962 goto illegal_op;
7963 rm = (modrm & 7) | REX_B(s);
7964 reg = ((modrm >> 3) & 7) | rex_r;
7965 if (CODE64(s))
7966 ot = OT_QUAD;
7967 else
7968 ot = OT_LONG;
7969 switch(reg) {
7970 case 0:
7971 case 2:
7972 case 3:
7973 case 4:
7974 case 8:
7975 if (s->cc_op != CC_OP_DYNAMIC)
7976 gen_op_set_cc_op(s->cc_op);
7977 gen_jmp_im(pc_start - s->cs_base);
7978 if (b & 2) {
7979 gen_op_mov_TN_reg(ot, 0, rm);
7980 tcg_gen_helper_0_2(helper_write_crN,
7981 tcg_const_i32(reg), cpu_T[0]);
7982 gen_jmp_im(s->pc - s->cs_base);
7983 gen_eob(s);
7984 } else {
7985 tcg_gen_helper_1_1(helper_read_crN,
7986 cpu_T[0], tcg_const_i32(reg));
7987 gen_op_mov_reg_T0(ot, rm);
7988 }
7989 break;
7990 default:
7991 goto illegal_op;
7992 }
7993 }
7994 break;
7995 case 0x121: /* mov reg, drN */
7996 case 0x123: /* mov drN, reg */
7997 if (s->cpl != 0) {
7998 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7999 } else {
8000 modrm = ldub_code(s->pc++);
8001 if ((modrm & 0xc0) != 0xc0)
8002 goto illegal_op;
8003 rm = (modrm & 7) | REX_B(s);
8004 reg = ((modrm >> 3) & 7) | rex_r;
8005 if (CODE64(s))
8006 ot = OT_QUAD;
8007 else
8008 ot = OT_LONG;
8009 /* XXX: do it dynamically with CR4.DE bit */
8010 if (reg == 4 || reg == 5 || reg >= 8)
8011 goto illegal_op;
8012 if (b & 2) {
8013 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8014 gen_op_mov_TN_reg(ot, 0, rm);
8015 tcg_gen_helper_0_2(helper_movl_drN_T0,
8016 tcg_const_i32(reg), cpu_T[0]);
8017 gen_jmp_im(s->pc - s->cs_base);
8018 gen_eob(s);
8019 } else {
8020 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8021 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8022 gen_op_mov_reg_T0(ot, rm);
8023 }
8024 }
8025 break;
8026 case 0x106: /* clts */
8027 if (s->cpl != 0) {
8028 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8029 } else {
8030 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8031 tcg_gen_helper_0_0(helper_clts);
8032 /* abort block because static cpu state changed */
8033 gen_jmp_im(s->pc - s->cs_base);
8034 gen_eob(s);
8035 }
8036 break;
8037 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8038 case 0x1c3: /* MOVNTI reg, mem */
8039 if (!(s->cpuid_features & CPUID_SSE2))
8040 goto illegal_op;
8041 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8042 modrm = ldub_code(s->pc++);
8043 mod = (modrm >> 6) & 3;
8044 if (mod == 3)
8045 goto illegal_op;
8046 reg = ((modrm >> 3) & 7) | rex_r;
8047 /* generate a generic store */
8048 gen_ldst_modrm(s, modrm, ot, reg, 1);
8049 break;
8050 case 0x1ae:
8051 modrm = ldub_code(s->pc++);
8052 mod = (modrm >> 6) & 3;
8053 op = (modrm >> 3) & 7;
8054 switch(op) {
8055 case 0: /* fxsave */
8056 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8057 (s->flags & HF_EM_MASK))
8058 goto illegal_op;
8059 if (s->flags & HF_TS_MASK) {
8060 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8061 break;
8062 }
8063 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8064 if (s->cc_op != CC_OP_DYNAMIC)
8065 gen_op_set_cc_op(s->cc_op);
8066 gen_jmp_im(pc_start - s->cs_base);
8067 tcg_gen_helper_0_2(helper_fxsave,
8068 cpu_A0, tcg_const_i32((s->dflag == 2)));
8069 break;
8070 case 1: /* fxrstor */
8071 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8072 (s->flags & HF_EM_MASK))
8073 goto illegal_op;
8074 if (s->flags & HF_TS_MASK) {
8075 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8076 break;
8077 }
8078 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8079 if (s->cc_op != CC_OP_DYNAMIC)
8080 gen_op_set_cc_op(s->cc_op);
8081 gen_jmp_im(pc_start - s->cs_base);
8082 tcg_gen_helper_0_2(helper_fxrstor,
8083 cpu_A0, tcg_const_i32((s->dflag == 2)));
8084 break;
8085 case 2: /* ldmxcsr */
8086 case 3: /* stmxcsr */
8087 if (s->flags & HF_TS_MASK) {
8088 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8089 break;
8090 }
8091 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8092 mod == 3)
8093 goto illegal_op;
8094 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8095 if (op == 2) {
8096 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8097 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8098 } else {
8099 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8100 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8101 }
8102 break;
8103 case 5: /* lfence */
8104 case 6: /* mfence */
8105 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8106 goto illegal_op;
8107 break;
8108 case 7: /* sfence / clflush */
8109 if ((modrm & 0xc7) == 0xc0) {
8110 /* sfence */
8111 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8112 if (!(s->cpuid_features & CPUID_SSE))
8113 goto illegal_op;
8114 } else {
8115 /* clflush */
8116 if (!(s->cpuid_features & CPUID_CLFLUSH))
8117 goto illegal_op;
8118 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8119 }
8120 break;
8121 default:
8122 goto illegal_op;
8123 }
8124 break;
8125 case 0x10d: /* 3DNow! prefetch(w) */
8126 modrm = ldub_code(s->pc++);
8127 mod = (modrm >> 6) & 3;
8128 if (mod == 3)
8129 goto illegal_op;
8130 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8131 /* ignore for now */
8132 break;
8133 case 0x1aa: /* rsm */
8134 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8135 if (!(s->flags & HF_SMM_MASK))
8136 goto illegal_op;
8137 if (s->cc_op != CC_OP_DYNAMIC) {
8138 gen_op_set_cc_op(s->cc_op);
8139 s->cc_op = CC_OP_DYNAMIC;
8140 }
8141 gen_jmp_im(s->pc - s->cs_base);
8142 tcg_gen_helper_0_0(helper_rsm);
8143 gen_eob(s);
8144 break;
8145 case 0x1b8: /* SSE4.2 popcnt */
8146 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8147 PREFIX_REPZ)
8148 goto illegal_op;
8149 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8150 goto illegal_op;
8151
8152 modrm = ldub_code(s->pc++);
8153 reg = ((modrm >> 3) & 7);
8154
8155 if (s->prefix & PREFIX_DATA)
8156 ot = OT_WORD;
8157 else if (s->dflag != 2)
8158 ot = OT_LONG;
8159 else
8160 ot = OT_QUAD;
8161
8162 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8163 tcg_gen_helper_1_2(helper_popcnt,
8164 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8165 gen_op_mov_reg_T0(ot, reg);
8166
8167 s->cc_op = CC_OP_EFLAGS;
8168 break;
8169 case 0x10e ... 0x10f:
8170 /* 3DNow! instructions, ignore prefixes */
8171 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8172 case 0x110 ... 0x117:
8173 case 0x128 ... 0x12f:
8174 case 0x138 ... 0x13a:
8175 case 0x150 ... 0x177:
8176 case 0x17c ... 0x17f:
8177 case 0x1c2:
8178 case 0x1c4 ... 0x1c6:
8179 case 0x1d0 ... 0x1fe:
8180 gen_sse(s, b, pc_start, rex_r);
8181 break;
8182 default:
8183 goto illegal_op;
8184 }
8185 /* lock generation */
8186 if (s->prefix & PREFIX_LOCK)
8187 tcg_gen_helper_0_0(helper_unlock);
8188 return s->pc;
8189 illegal_op:
8190 if (s->prefix & PREFIX_LOCK)
8191 tcg_gen_helper_0_0(helper_unlock);
8192 /* XXX: ensure that no lock was generated */
8193 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8194 return s->pc;
8195}
8196
8197void optimize_flags_init(void)
8198{
8199#ifndef VBOX
8200#if TCG_TARGET_REG_BITS == 32
8201 assert(sizeof(CCTable) == (1 << 3));
8202#else
8203 assert(sizeof(CCTable) == (1 << 4));
8204#endif
8205#endif
8206 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8207 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8208 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8209 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8210 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8211 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8212 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8213 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8214 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8215
8216 /* register helpers */
8217
8218#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8219#include "helper.h"
8220}
8221
8222/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8223 basic block 'tb'. If search_pc is TRUE, also generate PC
8224 information for each intermediate instruction. */
8225#ifndef VBOX
8226static inline void gen_intermediate_code_internal(CPUState *env,
8227#else /* VBOX */
8228DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8229#endif /* VBOX */
8230 TranslationBlock *tb,
8231 int search_pc)
8232{
8233 DisasContext dc1, *dc = &dc1;
8234 target_ulong pc_ptr;
8235 uint16_t *gen_opc_end;
8236 int j, lj, cflags;
8237 uint64_t flags;
8238 target_ulong pc_start;
8239 target_ulong cs_base;
8240 int num_insns;
8241 int max_insns;
8242
8243 /* generate intermediate code */
8244 pc_start = tb->pc;
8245 cs_base = tb->cs_base;
8246 flags = tb->flags;
8247 cflags = tb->cflags;
8248
8249 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8250 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8251 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8252 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8253 dc->f_st = 0;
8254 dc->vm86 = (flags >> VM_SHIFT) & 1;
8255#ifdef VBOX_WITH_CALL_RECORD
8256 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8257 if ( !(env->state & CPU_RAW_RING0)
8258 && (env->cr[0] & CR0_PG_MASK)
8259 && !(env->eflags & X86_EFL_IF)
8260 && dc->code32)
8261 dc->record_call = 1;
8262 else
8263 dc->record_call = 0;
8264#endif
8265 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8266 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8267 dc->tf = (flags >> TF_SHIFT) & 1;
8268 dc->singlestep_enabled = env->singlestep_enabled;
8269 dc->cc_op = CC_OP_DYNAMIC;
8270 dc->cs_base = cs_base;
8271 dc->tb = tb;
8272 dc->popl_esp_hack = 0;
8273 /* select memory access functions */
8274 dc->mem_index = 0;
8275 if (flags & HF_SOFTMMU_MASK) {
8276 if (dc->cpl == 3)
8277 dc->mem_index = 2 * 4;
8278 else
8279 dc->mem_index = 1 * 4;
8280 }
8281 dc->cpuid_features = env->cpuid_features;
8282 dc->cpuid_ext_features = env->cpuid_ext_features;
8283 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8284 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8285#ifdef TARGET_X86_64
8286 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8287 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8288#endif
8289 dc->flags = flags;
8290 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8291 (flags & HF_INHIBIT_IRQ_MASK)
8292#ifndef CONFIG_SOFTMMU
8293 || (flags & HF_SOFTMMU_MASK)
8294#endif
8295 );
8296#if 0
8297 /* check addseg logic */
8298 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8299 printf("ERROR addseg\n");
8300#endif
8301
8302 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8303 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8304 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8305 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8306
8307 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8308 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8309 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8310 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8311 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8312 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8313 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8314 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8315 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8316
8317 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8318
8319 dc->is_jmp = DISAS_NEXT;
8320 pc_ptr = pc_start;
8321 lj = -1;
8322 num_insns = 0;
8323 max_insns = tb->cflags & CF_COUNT_MASK;
8324 if (max_insns == 0)
8325 max_insns = CF_COUNT_MASK;
8326
8327 gen_icount_start();
8328 for(;;) {
8329 if (env->nb_breakpoints > 0) {
8330 for(j = 0; j < env->nb_breakpoints; j++) {
8331 if (env->breakpoints[j] == pc_ptr) {
8332 gen_debug(dc, pc_ptr - dc->cs_base);
8333 break;
8334 }
8335 }
8336 }
8337 if (search_pc) {
8338 j = gen_opc_ptr - gen_opc_buf;
8339 if (lj < j) {
8340 lj++;
8341 while (lj < j)
8342 gen_opc_instr_start[lj++] = 0;
8343 }
8344 gen_opc_pc[lj] = pc_ptr;
8345 gen_opc_cc_op[lj] = dc->cc_op;
8346 gen_opc_instr_start[lj] = 1;
8347 gen_opc_icount[lj] = num_insns;
8348 }
8349 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8350 gen_io_start();
8351
8352 pc_ptr = disas_insn(dc, pc_ptr);
8353 num_insns++;
8354 /* stop translation if indicated */
8355 if (dc->is_jmp)
8356 break;
8357#ifdef VBOX
8358#ifdef DEBUG
8359/*
8360 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8361 {
8362 //should never happen as the jump to the patch code terminates the translation block
8363 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8364 }
8365*/
8366#endif
8367 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8368 {
8369 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8370 gen_jmp_im(pc_ptr - dc->cs_base);
8371 gen_eob(dc);
8372 break;
8373 }
8374#endif /* VBOX */
8375
8376 /* if single step mode, we generate only one instruction and
8377 generate an exception */
8378 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8379 the flag and abort the translation to give the irqs a
8380 change to be happen */
8381 if (dc->tf || dc->singlestep_enabled ||
8382 (flags & HF_INHIBIT_IRQ_MASK)) {
8383 gen_jmp_im(pc_ptr - dc->cs_base);
8384 gen_eob(dc);
8385 break;
8386 }
8387 /* if too long translation, stop generation too */
8388 if (gen_opc_ptr >= gen_opc_end ||
8389 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8390 num_insns >= max_insns) {
8391 gen_jmp_im(pc_ptr - dc->cs_base);
8392 gen_eob(dc);
8393 break;
8394 }
8395 }
8396 if (tb->cflags & CF_LAST_IO)
8397 gen_io_end();
8398 gen_icount_end(tb, num_insns);
8399 *gen_opc_ptr = INDEX_op_end;
8400 /* we don't forget to fill the last values */
8401 if (search_pc) {
8402 j = gen_opc_ptr - gen_opc_buf;
8403 lj++;
8404 while (lj <= j)
8405 gen_opc_instr_start[lj++] = 0;
8406 }
8407
8408#ifdef DEBUG_DISAS
8409 if (loglevel & CPU_LOG_TB_CPU) {
8410 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8411 }
8412 if (loglevel & CPU_LOG_TB_IN_ASM) {
8413 int disas_flags;
8414 fprintf(logfile, "----------------\n");
8415 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8416#ifdef TARGET_X86_64
8417 if (dc->code64)
8418 disas_flags = 2;
8419 else
8420#endif
8421 disas_flags = !dc->code32;
8422 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8423 fprintf(logfile, "\n");
8424 }
8425#endif
8426
8427 if (!search_pc) {
8428 tb->size = pc_ptr - pc_start;
8429 tb->icount = num_insns;
8430 }
8431}
8432
8433void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8434{
8435 gen_intermediate_code_internal(env, tb, 0);
8436}
8437
8438void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8439{
8440 gen_intermediate_code_internal(env, tb, 1);
8441}
8442
8443void gen_pc_load(CPUState *env, TranslationBlock *tb,
8444 unsigned long searched_pc, int pc_pos, void *puc)
8445{
8446 int cc_op;
8447#ifdef DEBUG_DISAS
8448 if (loglevel & CPU_LOG_TB_OP) {
8449 int i;
8450 fprintf(logfile, "RESTORE:\n");
8451 for(i = 0;i <= pc_pos; i++) {
8452 if (gen_opc_instr_start[i]) {
8453 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8454 }
8455 }
8456 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8457 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8458 (uint32_t)tb->cs_base);
8459 }
8460#endif
8461 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8462 cc_op = gen_opc_cc_op[pc_pos];
8463 if (cc_op != CC_OP_DYNAMIC)
8464 env->cc_op = cc_op;
8465}
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette