VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 13755

最後變更 在這個檔案從13755是 13672,由 vboxsync 提交於 16 年 前

improved external events check performance, synced couple VBOX-specific
recompiler checks

  • 屬性 svn:eol-style 設為 native
檔案大小: 273.3 KB
 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int record_call; /* record calls for CSAM or not? */
148#endif
149 int cpl;
150 int iopl;
151 int tf; /* TF cpu flag */
152 int singlestep_enabled; /* "hardware" single step enabled */
153 int jmp_opt; /* use direct block chaining for direct jumps */
154 int mem_index; /* select memory access functions */
155 uint64_t flags; /* all execution flags */
156 struct TranslationBlock *tb;
157 int popl_esp_hack; /* for correct popl with esp base handling */
158 int rip_offset; /* only used in x86_64, but left for simplicity */
159 int cpuid_features;
160 int cpuid_ext_features;
161 int cpuid_ext2_features;
162 int cpuid_ext3_features;
163} DisasContext;
164
165static void gen_eob(DisasContext *s);
166static void gen_jmp(DisasContext *s, target_ulong eip);
167static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
168
169#ifdef VBOX
170static void gen_check_external_event();
171#endif
172
173/* i386 arith/logic operations */
174enum {
175 OP_ADDL,
176 OP_ORL,
177 OP_ADCL,
178 OP_SBBL,
179 OP_ANDL,
180 OP_SUBL,
181 OP_XORL,
182 OP_CMPL,
183};
184
185/* i386 shift ops */
186enum {
187 OP_ROL,
188 OP_ROR,
189 OP_RCL,
190 OP_RCR,
191 OP_SHL,
192 OP_SHR,
193 OP_SHL1, /* undocumented */
194 OP_SAR = 7,
195};
196
197enum {
198 JCC_O,
199 JCC_B,
200 JCC_Z,
201 JCC_BE,
202 JCC_S,
203 JCC_P,
204 JCC_L,
205 JCC_LE,
206};
207
208/* operand size */
209enum {
210 OT_BYTE = 0,
211 OT_WORD,
212 OT_LONG,
213 OT_QUAD,
214};
215
216enum {
217 /* I386 int registers */
218 OR_EAX, /* MUST be even numbered */
219 OR_ECX,
220 OR_EDX,
221 OR_EBX,
222 OR_ESP,
223 OR_EBP,
224 OR_ESI,
225 OR_EDI,
226
227 OR_TMP0 = 16, /* temporary operand register */
228 OR_TMP1,
229 OR_A0, /* temporary register used when doing address evaluation */
230};
231
232#ifndef VBOX
233static inline void gen_op_movl_T0_0(void)
234#else /* VBOX */
235DECLINLINE(void) gen_op_movl_T0_0(void)
236#endif /* VBOX */
237{
238 tcg_gen_movi_tl(cpu_T[0], 0);
239}
240
241#ifndef VBOX
242static inline void gen_op_movl_T0_im(int32_t val)
243#else /* VBOX */
244DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
245#endif /* VBOX */
246{
247 tcg_gen_movi_tl(cpu_T[0], val);
248}
249
250#ifndef VBOX
251static inline void gen_op_movl_T0_imu(uint32_t val)
252#else /* VBOX */
253DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
254#endif /* VBOX */
255{
256 tcg_gen_movi_tl(cpu_T[0], val);
257}
258
259#ifndef VBOX
260static inline void gen_op_movl_T1_im(int32_t val)
261#else /* VBOX */
262DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
263#endif /* VBOX */
264{
265 tcg_gen_movi_tl(cpu_T[1], val);
266}
267
268#ifndef VBOX
269static inline void gen_op_movl_T1_imu(uint32_t val)
270#else /* VBOX */
271DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
272#endif /* VBOX */
273{
274 tcg_gen_movi_tl(cpu_T[1], val);
275}
276
277#ifndef VBOX
278static inline void gen_op_movl_A0_im(uint32_t val)
279#else /* VBOX */
280DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
281#endif /* VBOX */
282{
283 tcg_gen_movi_tl(cpu_A0, val);
284}
285
286#ifdef TARGET_X86_64
287#ifndef VBOX
288static inline void gen_op_movq_A0_im(int64_t val)
289#else /* VBOX */
290DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
291#endif /* VBOX */
292{
293 tcg_gen_movi_tl(cpu_A0, val);
294}
295#endif
296
297#ifndef VBOX
298static inline void gen_movtl_T0_im(target_ulong val)
299#else /* VBOX */
300DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
301#endif /* VBOX */
302{
303 tcg_gen_movi_tl(cpu_T[0], val);
304}
305
306#ifndef VBOX
307static inline void gen_movtl_T1_im(target_ulong val)
308#else /* VBOX */
309DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
310#endif /* VBOX */
311{
312 tcg_gen_movi_tl(cpu_T[1], val);
313}
314
315#ifndef VBOX
316static inline void gen_op_andl_T0_ffff(void)
317#else /* VBOX */
318DECLINLINE(void) gen_op_andl_T0_ffff(void)
319#endif /* VBOX */
320{
321 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
322}
323
324#ifndef VBOX
325static inline void gen_op_andl_T0_im(uint32_t val)
326#else /* VBOX */
327DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
328#endif /* VBOX */
329{
330 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
331}
332
333#ifndef VBOX
334static inline void gen_op_movl_T0_T1(void)
335#else /* VBOX */
336DECLINLINE(void) gen_op_movl_T0_T1(void)
337#endif /* VBOX */
338{
339 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
340}
341
342#ifndef VBOX
343static inline void gen_op_andl_A0_ffff(void)
344#else /* VBOX */
345DECLINLINE(void) gen_op_andl_A0_ffff(void)
346#endif /* VBOX */
347{
348 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
349}
350
351#ifdef TARGET_X86_64
352
353#define NB_OP_SIZES 4
354
355#else /* !TARGET_X86_64 */
356
357#define NB_OP_SIZES 3
358
359#endif /* !TARGET_X86_64 */
360
361#if defined(WORDS_BIGENDIAN)
362#define REG_B_OFFSET (sizeof(target_ulong) - 1)
363#define REG_H_OFFSET (sizeof(target_ulong) - 2)
364#define REG_W_OFFSET (sizeof(target_ulong) - 2)
365#define REG_L_OFFSET (sizeof(target_ulong) - 4)
366#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
367#else
368#define REG_B_OFFSET 0
369#define REG_H_OFFSET 1
370#define REG_W_OFFSET 0
371#define REG_L_OFFSET 0
372#define REG_LH_OFFSET 4
373#endif
374
375#ifndef VBOX
376static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
377#else /* VBOX */
378DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#endif /* VBOX */
380{
381 switch(ot) {
382 case OT_BYTE:
383 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
384 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
385 } else {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
387 }
388 break;
389 case OT_WORD:
390 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
391 break;
392#ifdef TARGET_X86_64
393 case OT_LONG:
394 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
395 /* high part of register set to zero */
396 tcg_gen_movi_tl(cpu_tmp0, 0);
397 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
398 break;
399 default:
400 case OT_QUAD:
401 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
402 break;
403#else
404 default:
405 case OT_LONG:
406 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
407 break;
408#endif
409 }
410}
411
412#ifndef VBOX
413static inline void gen_op_mov_reg_T0(int ot, int reg)
414#else /* VBOX */
415DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
416#endif /* VBOX */
417{
418 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
419}
420
421#ifndef VBOX
422static inline void gen_op_mov_reg_T1(int ot, int reg)
423#else /* VBOX */
424DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
425#endif /* VBOX */
426{
427 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
428}
429
430#ifndef VBOX
431static inline void gen_op_mov_reg_A0(int size, int reg)
432#else /* VBOX */
433DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
434#endif /* VBOX */
435{
436 switch(size) {
437 case 0:
438 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
439 break;
440#ifdef TARGET_X86_64
441 case 1:
442 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
443 /* high part of register set to zero */
444 tcg_gen_movi_tl(cpu_tmp0, 0);
445 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
446 break;
447 default:
448 case 2:
449 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
450 break;
451#else
452 default:
453 case 1:
454 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
455 break;
456#endif
457 }
458}
459
460#ifndef VBOX
461static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
462#else /* VBOX */
463DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#endif /* VBOX */
465{
466 switch(ot) {
467 case OT_BYTE:
468 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
469 goto std_case;
470 } else {
471 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
472 }
473 break;
474 default:
475 std_case:
476 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
477 break;
478 }
479}
480
481#ifndef VBOX
482static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
483#else /* VBOX */
484DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
485#endif /* VBOX */
486{
487 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
488}
489
490#ifndef VBOX
491static inline void gen_op_movl_A0_reg(int reg)
492#else /* VBOX */
493DECLINLINE(void) gen_op_movl_A0_reg(int reg)
494#endif /* VBOX */
495{
496 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
497}
498
499#ifndef VBOX
500static inline void gen_op_addl_A0_im(int32_t val)
501#else /* VBOX */
502DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
503#endif /* VBOX */
504{
505 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
506#ifdef TARGET_X86_64
507 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
508#endif
509}
510
511#ifdef TARGET_X86_64
512#ifndef VBOX
513static inline void gen_op_addq_A0_im(int64_t val)
514#else /* VBOX */
515DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
516#endif /* VBOX */
517{
518 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
519}
520#endif
521
522static void gen_add_A0_im(DisasContext *s, int val)
523{
524#ifdef TARGET_X86_64
525 if (CODE64(s))
526 gen_op_addq_A0_im(val);
527 else
528#endif
529 gen_op_addl_A0_im(val);
530}
531
532#ifndef VBOX
533static inline void gen_op_addl_T0_T1(void)
534#else /* VBOX */
535DECLINLINE(void) gen_op_addl_T0_T1(void)
536#endif /* VBOX */
537{
538 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
539}
540
541#ifndef VBOX
542static inline void gen_op_jmp_T0(void)
543#else /* VBOX */
544DECLINLINE(void) gen_op_jmp_T0(void)
545#endif /* VBOX */
546{
547 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
548}
549
550#ifndef VBOX
551static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
552#else /* VBOX */
553DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
554#endif /* VBOX */
555{
556 switch(size) {
557 case 0:
558 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
559 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
560 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
561 break;
562 case 1:
563 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
564 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
565#ifdef TARGET_X86_64
566 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
567#endif
568 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
569 break;
570#ifdef TARGET_X86_64
571 case 2:
572 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
573 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
574 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
575 break;
576#endif
577 }
578}
579
580#ifndef VBOX
581static inline void gen_op_add_reg_T0(int size, int reg)
582#else /* VBOX */
583DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
584#endif /* VBOX */
585{
586 switch(size) {
587 case 0:
588 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
589 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
590 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
591 break;
592 case 1:
593 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
594 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
595#ifdef TARGET_X86_64
596 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
597#endif
598 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
599 break;
600#ifdef TARGET_X86_64
601 case 2:
602 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
603 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
604 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
605 break;
606#endif
607 }
608}
609
610#ifndef VBOX
611static inline void gen_op_set_cc_op(int32_t val)
612#else /* VBOX */
613DECLINLINE(void) gen_op_set_cc_op(int32_t val)
614#endif /* VBOX */
615{
616 tcg_gen_movi_i32(cpu_cc_op, val);
617}
618
619#ifndef VBOX
620static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
621#else /* VBOX */
622DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
623#endif /* VBOX */
624{
625 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
626 if (shift != 0)
627 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
628 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
629#ifdef TARGET_X86_64
630 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
631#endif
632}
633
634#ifndef VBOX
635static inline void gen_op_movl_A0_seg(int reg)
636#else /* VBOX */
637DECLINLINE(void) gen_op_movl_A0_seg(int reg)
638#endif /* VBOX */
639{
640 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
641}
642
643#ifndef VBOX
644static inline void gen_op_addl_A0_seg(int reg)
645#else /* VBOX */
646DECLINLINE(void) gen_op_addl_A0_seg(int reg)
647#endif /* VBOX */
648{
649 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
650 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
651#ifdef TARGET_X86_64
652 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
653#endif
654}
655
656#ifdef TARGET_X86_64
657#ifndef VBOX
658static inline void gen_op_movq_A0_seg(int reg)
659#else /* VBOX */
660DECLINLINE(void) gen_op_movq_A0_seg(int reg)
661#endif /* VBOX */
662{
663 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
664}
665
666#ifndef VBOX
667static inline void gen_op_addq_A0_seg(int reg)
668#else /* VBOX */
669DECLINLINE(void) gen_op_addq_A0_seg(int reg)
670#endif /* VBOX */
671{
672 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
673 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
674}
675
676#ifndef VBOX
677static inline void gen_op_movq_A0_reg(int reg)
678#else /* VBOX */
679DECLINLINE(void) gen_op_movq_A0_reg(int reg)
680#endif /* VBOX */
681{
682 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
683}
684
685#ifndef VBOX
686static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
687#else /* VBOX */
688DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
689#endif /* VBOX */
690{
691 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
692 if (shift != 0)
693 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
694 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
695}
696#endif
697
698#ifndef VBOX
699static inline void gen_op_lds_T0_A0(int idx)
700#else /* VBOX */
701DECLINLINE(void) gen_op_lds_T0_A0(int idx)
702#endif /* VBOX */
703{
704 int mem_index = (idx >> 2) - 1;
705 switch(idx & 3) {
706 case 0:
707 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
708 break;
709 case 1:
710 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
711 break;
712 default:
713 case 2:
714 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
715 break;
716 }
717}
718
719#ifndef VBOX
720static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
721#else /* VBOX */
722DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
723#endif /* VBOX */
724{
725 int mem_index = (idx >> 2) - 1;
726 switch(idx & 3) {
727 case 0:
728 tcg_gen_qemu_ld8u(t0, a0, mem_index);
729 break;
730 case 1:
731 tcg_gen_qemu_ld16u(t0, a0, mem_index);
732 break;
733 case 2:
734 tcg_gen_qemu_ld32u(t0, a0, mem_index);
735 break;
736 default:
737 case 3:
738 tcg_gen_qemu_ld64(t0, a0, mem_index);
739 break;
740 }
741}
742
743/* XXX: always use ldu or lds */
744#ifndef VBOX
745static inline void gen_op_ld_T0_A0(int idx)
746#else /* VBOX */
747DECLINLINE(void) gen_op_ld_T0_A0(int idx)
748#endif /* VBOX */
749{
750 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
751}
752
753#ifndef VBOX
754static inline void gen_op_ldu_T0_A0(int idx)
755#else /* VBOX */
756DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
757#endif /* VBOX */
758{
759 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
760}
761
762#ifndef VBOX
763static inline void gen_op_ld_T1_A0(int idx)
764#else /* VBOX */
765DECLINLINE(void) gen_op_ld_T1_A0(int idx)
766#endif /* VBOX */
767{
768 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
769}
770
771#ifndef VBOX
772static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
773#else /* VBOX */
774DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
775#endif /* VBOX */
776{
777 int mem_index = (idx >> 2) - 1;
778 switch(idx & 3) {
779 case 0:
780 tcg_gen_qemu_st8(t0, a0, mem_index);
781 break;
782 case 1:
783 tcg_gen_qemu_st16(t0, a0, mem_index);
784 break;
785 case 2:
786 tcg_gen_qemu_st32(t0, a0, mem_index);
787 break;
788 default:
789 case 3:
790 tcg_gen_qemu_st64(t0, a0, mem_index);
791 break;
792 }
793}
794
795#ifndef VBOX
796static inline void gen_op_st_T0_A0(int idx)
797#else /* VBOX */
798DECLINLINE(void) gen_op_st_T0_A0(int idx)
799#endif /* VBOX */
800{
801 gen_op_st_v(idx, cpu_T[0], cpu_A0);
802}
803
804#ifndef VBOX
805static inline void gen_op_st_T1_A0(int idx)
806#else /* VBOX */
807DECLINLINE(void) gen_op_st_T1_A0(int idx)
808#endif /* VBOX */
809{
810 gen_op_st_v(idx, cpu_T[1], cpu_A0);
811}
812
813#ifndef VBOX
814static inline void gen_jmp_im(target_ulong pc)
815#else /* VBOX */
816DECLINLINE(void) gen_jmp_im(target_ulong pc)
817#endif /* VBOX */
818{
819 tcg_gen_movi_tl(cpu_tmp0, pc);
820 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
821}
822
823#ifdef VBOX
824static void gen_check_external_event()
825{
826 int skip_label;
827 TCGv t0;
828
829 skip_label = gen_new_label();
830 /* t0 = tcg_temp_local_new(TCG_TYPE_TL); */
831 t0 = cpu_tmp0;
832
833 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
834 /* Keep in sync with helper_check_external_event() */
835 tcg_gen_andi_tl(t0, t0,
836 CPU_INTERRUPT_EXTERNAL_EXIT
837 | CPU_INTERRUPT_EXTERNAL_TIMER
838 | CPU_INTERRUPT_EXTERNAL_DMA
839 | CPU_INTERRUPT_EXTERNAL_HARD);
840 /** @todo: predict branch as taken */
841 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
842 /* tcg_temp_free(t0); */
843
844 tcg_gen_helper_0_0(helper_check_external_event);
845
846 gen_set_label(skip_label);
847}
848
849#ifndef VBOX
850static inline void gen_update_eip(target_ulong pc)
851#else /* VBOX */
852DECLINLINE(void) gen_update_eip(target_ulong pc)
853#endif /* VBOX */
854{
855 gen_jmp_im(pc);
856
857}
858#endif
859
860#ifndef VBOX
861static inline void gen_string_movl_A0_ESI(DisasContext *s)
862#else /* VBOX */
863DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
864#endif /* VBOX */
865{
866 int override;
867
868 override = s->override;
869#ifdef TARGET_X86_64
870 if (s->aflag == 2) {
871 if (override >= 0) {
872 gen_op_movq_A0_seg(override);
873 gen_op_addq_A0_reg_sN(0, R_ESI);
874 } else {
875 gen_op_movq_A0_reg(R_ESI);
876 }
877 } else
878#endif
879 if (s->aflag) {
880 /* 32 bit address */
881 if (s->addseg && override < 0)
882 override = R_DS;
883 if (override >= 0) {
884 gen_op_movl_A0_seg(override);
885 gen_op_addl_A0_reg_sN(0, R_ESI);
886 } else {
887 gen_op_movl_A0_reg(R_ESI);
888 }
889 } else {
890 /* 16 address, always override */
891 if (override < 0)
892 override = R_DS;
893 gen_op_movl_A0_reg(R_ESI);
894 gen_op_andl_A0_ffff();
895 gen_op_addl_A0_seg(override);
896 }
897}
898
899#ifndef VBOX
900static inline void gen_string_movl_A0_EDI(DisasContext *s)
901#else /* VBOX */
902DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
903#endif /* VBOX */
904{
905#ifdef TARGET_X86_64
906 if (s->aflag == 2) {
907 gen_op_movq_A0_reg(R_EDI);
908 } else
909#endif
910 if (s->aflag) {
911 if (s->addseg) {
912 gen_op_movl_A0_seg(R_ES);
913 gen_op_addl_A0_reg_sN(0, R_EDI);
914 } else {
915 gen_op_movl_A0_reg(R_EDI);
916 }
917 } else {
918 gen_op_movl_A0_reg(R_EDI);
919 gen_op_andl_A0_ffff();
920 gen_op_addl_A0_seg(R_ES);
921 }
922}
923
924#ifndef VBOX
925static inline void gen_op_movl_T0_Dshift(int ot)
926#else /* VBOX */
927DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
928#endif /* VBOX */
929{
930 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
931 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
932};
933
934static void gen_extu(int ot, TCGv reg)
935{
936 switch(ot) {
937 case OT_BYTE:
938 tcg_gen_ext8u_tl(reg, reg);
939 break;
940 case OT_WORD:
941 tcg_gen_ext16u_tl(reg, reg);
942 break;
943 case OT_LONG:
944 tcg_gen_ext32u_tl(reg, reg);
945 break;
946 default:
947 break;
948 }
949}
950
951static void gen_exts(int ot, TCGv reg)
952{
953 switch(ot) {
954 case OT_BYTE:
955 tcg_gen_ext8s_tl(reg, reg);
956 break;
957 case OT_WORD:
958 tcg_gen_ext16s_tl(reg, reg);
959 break;
960 case OT_LONG:
961 tcg_gen_ext32s_tl(reg, reg);
962 break;
963 default:
964 break;
965 }
966}
967
968#ifndef VBOX
969static inline void gen_op_jnz_ecx(int size, int label1)
970#else /* VBOX */
971DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
972#endif /* VBOX */
973{
974 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
975 gen_extu(size + 1, cpu_tmp0);
976 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
977}
978
979#ifndef VBOX
980static inline void gen_op_jz_ecx(int size, int label1)
981#else /* VBOX */
982DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
983#endif /* VBOX */
984{
985 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
986 gen_extu(size + 1, cpu_tmp0);
987 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
988}
989
990static void *helper_in_func[3] = {
991 helper_inb,
992 helper_inw,
993 helper_inl,
994};
995
996static void *helper_out_func[3] = {
997 helper_outb,
998 helper_outw,
999 helper_outl,
1000};
1001
1002static void *gen_check_io_func[3] = {
1003 helper_check_iob,
1004 helper_check_iow,
1005 helper_check_iol,
1006};
1007
1008static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1009 uint32_t svm_flags)
1010{
1011 int state_saved;
1012 target_ulong next_eip;
1013
1014 state_saved = 0;
1015 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1016 if (s->cc_op != CC_OP_DYNAMIC)
1017 gen_op_set_cc_op(s->cc_op);
1018 gen_jmp_im(cur_eip);
1019 state_saved = 1;
1020 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1021 tcg_gen_helper_0_1(gen_check_io_func[ot],
1022 cpu_tmp2_i32);
1023 }
1024 if(s->flags & HF_SVMI_MASK) {
1025 if (!state_saved) {
1026 if (s->cc_op != CC_OP_DYNAMIC)
1027 gen_op_set_cc_op(s->cc_op);
1028 gen_jmp_im(cur_eip);
1029 state_saved = 1;
1030 }
1031 svm_flags |= (1 << (4 + ot));
1032 next_eip = s->pc - s->cs_base;
1033 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1034 tcg_gen_helper_0_3(helper_svm_check_io,
1035 cpu_tmp2_i32,
1036 tcg_const_i32(svm_flags),
1037 tcg_const_i32(next_eip - cur_eip));
1038 }
1039}
1040
1041#ifndef VBOX
1042static inline void gen_movs(DisasContext *s, int ot)
1043#else /* VBOX */
1044DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1045#endif /* VBOX */
1046{
1047 gen_string_movl_A0_ESI(s);
1048 gen_op_ld_T0_A0(ot + s->mem_index);
1049 gen_string_movl_A0_EDI(s);
1050 gen_op_st_T0_A0(ot + s->mem_index);
1051 gen_op_movl_T0_Dshift(ot);
1052 gen_op_add_reg_T0(s->aflag, R_ESI);
1053 gen_op_add_reg_T0(s->aflag, R_EDI);
1054}
1055
1056#ifndef VBOX
1057static inline void gen_update_cc_op(DisasContext *s)
1058#else /* VBOX */
1059DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1060#endif /* VBOX */
1061{
1062 if (s->cc_op != CC_OP_DYNAMIC) {
1063 gen_op_set_cc_op(s->cc_op);
1064 s->cc_op = CC_OP_DYNAMIC;
1065 }
1066}
1067
1068static void gen_op_update1_cc(void)
1069{
1070 tcg_gen_discard_tl(cpu_cc_src);
1071 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1072}
1073
1074static void gen_op_update2_cc(void)
1075{
1076 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1077 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1078}
1079
1080#ifndef VBOX
1081static inline void gen_op_cmpl_T0_T1_cc(void)
1082#else /* VBOX */
1083DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1084#endif /* VBOX */
1085{
1086 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1087 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1088}
1089
1090#ifndef VBOX
1091static inline void gen_op_testl_T0_T1_cc(void)
1092#else /* VBOX */
1093DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1094#endif /* VBOX */
1095{
1096 tcg_gen_discard_tl(cpu_cc_src);
1097 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1098}
1099
1100static void gen_op_update_neg_cc(void)
1101{
1102 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1103 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1104}
1105
1106/* compute eflags.C to reg */
1107static void gen_compute_eflags_c(TCGv reg)
1108{
1109#if TCG_TARGET_REG_BITS == 32
1110 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1111 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1112 (long)cc_table + offsetof(CCTable, compute_c));
1113 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1114 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1115 1, &cpu_tmp2_i32, 0, NULL);
1116#else
1117 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1118 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1119 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1120 (long)cc_table + offsetof(CCTable, compute_c));
1121 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1122 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1123 1, &cpu_tmp2_i32, 0, NULL);
1124#endif
1125 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1126}
1127
1128/* compute all eflags to cc_src */
1129static void gen_compute_eflags(TCGv reg)
1130{
1131#if TCG_TARGET_REG_BITS == 32
1132 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1133 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1134 (long)cc_table + offsetof(CCTable, compute_all));
1135 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1136 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1137 1, &cpu_tmp2_i32, 0, NULL);
1138#else
1139 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1140 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1141 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1142 (long)cc_table + offsetof(CCTable, compute_all));
1143 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1144 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1145 1, &cpu_tmp2_i32, 0, NULL);
1146#endif
1147 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1148}
1149
1150#ifndef VBOX
1151static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1152#else /* VBOX */
1153DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1154#endif /* VBOX */
1155{
1156 if (s->cc_op != CC_OP_DYNAMIC)
1157 gen_op_set_cc_op(s->cc_op);
1158 switch(jcc_op) {
1159 case JCC_O:
1160 gen_compute_eflags(cpu_T[0]);
1161 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1162 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1163 break;
1164 case JCC_B:
1165 gen_compute_eflags_c(cpu_T[0]);
1166 break;
1167 case JCC_Z:
1168 gen_compute_eflags(cpu_T[0]);
1169 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1170 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1171 break;
1172 case JCC_BE:
1173 gen_compute_eflags(cpu_tmp0);
1174 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1175 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1176 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1177 break;
1178 case JCC_S:
1179 gen_compute_eflags(cpu_T[0]);
1180 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1181 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1182 break;
1183 case JCC_P:
1184 gen_compute_eflags(cpu_T[0]);
1185 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1186 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1187 break;
1188 case JCC_L:
1189 gen_compute_eflags(cpu_tmp0);
1190 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1191 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1192 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1193 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1194 break;
1195 default:
1196 case JCC_LE:
1197 gen_compute_eflags(cpu_tmp0);
1198 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1199 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1200 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1201 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1202 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1203 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1204 break;
1205 }
1206}
1207
1208/* return true if setcc_slow is not needed (WARNING: must be kept in
1209 sync with gen_jcc1) */
1210static int is_fast_jcc_case(DisasContext *s, int b)
1211{
1212 int jcc_op;
1213 jcc_op = (b >> 1) & 7;
1214 switch(s->cc_op) {
1215 /* we optimize the cmp/jcc case */
1216 case CC_OP_SUBB:
1217 case CC_OP_SUBW:
1218 case CC_OP_SUBL:
1219 case CC_OP_SUBQ:
1220 if (jcc_op == JCC_O || jcc_op == JCC_P)
1221 goto slow_jcc;
1222 break;
1223
1224 /* some jumps are easy to compute */
1225 case CC_OP_ADDB:
1226 case CC_OP_ADDW:
1227 case CC_OP_ADDL:
1228 case CC_OP_ADDQ:
1229
1230 case CC_OP_LOGICB:
1231 case CC_OP_LOGICW:
1232 case CC_OP_LOGICL:
1233 case CC_OP_LOGICQ:
1234
1235 case CC_OP_INCB:
1236 case CC_OP_INCW:
1237 case CC_OP_INCL:
1238 case CC_OP_INCQ:
1239
1240 case CC_OP_DECB:
1241 case CC_OP_DECW:
1242 case CC_OP_DECL:
1243 case CC_OP_DECQ:
1244
1245 case CC_OP_SHLB:
1246 case CC_OP_SHLW:
1247 case CC_OP_SHLL:
1248 case CC_OP_SHLQ:
1249 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1250 goto slow_jcc;
1251 break;
1252 default:
1253 slow_jcc:
1254 return 0;
1255 }
1256 return 1;
1257}
1258
1259/* generate a conditional jump to label 'l1' according to jump opcode
1260 value 'b'. In the fast case, T0 is guaranted not to be used. */
1261#ifndef VBOX
1262static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1263#else /* VBOX */
1264DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1265#endif /* VBOX */
1266{
1267 int inv, jcc_op, size, cond;
1268 TCGv t0;
1269
1270 inv = b & 1;
1271 jcc_op = (b >> 1) & 7;
1272
1273 switch(cc_op) {
1274 /* we optimize the cmp/jcc case */
1275 case CC_OP_SUBB:
1276 case CC_OP_SUBW:
1277 case CC_OP_SUBL:
1278 case CC_OP_SUBQ:
1279
1280 size = cc_op - CC_OP_SUBB;
1281 switch(jcc_op) {
1282 case JCC_Z:
1283 fast_jcc_z:
1284 switch(size) {
1285 case 0:
1286 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1287 t0 = cpu_tmp0;
1288 break;
1289 case 1:
1290 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1291 t0 = cpu_tmp0;
1292 break;
1293#ifdef TARGET_X86_64
1294 case 2:
1295 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1296 t0 = cpu_tmp0;
1297 break;
1298#endif
1299 default:
1300 t0 = cpu_cc_dst;
1301 break;
1302 }
1303 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1304 break;
1305 case JCC_S:
1306 fast_jcc_s:
1307 switch(size) {
1308 case 0:
1309 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1310 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1311 0, l1);
1312 break;
1313 case 1:
1314 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1315 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1316 0, l1);
1317 break;
1318#ifdef TARGET_X86_64
1319 case 2:
1320 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1321 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1322 0, l1);
1323 break;
1324#endif
1325 default:
1326 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1327 0, l1);
1328 break;
1329 }
1330 break;
1331
1332 case JCC_B:
1333 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1334 goto fast_jcc_b;
1335 case JCC_BE:
1336 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1337 fast_jcc_b:
1338 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1339 switch(size) {
1340 case 0:
1341 t0 = cpu_tmp0;
1342 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1343 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1344 break;
1345 case 1:
1346 t0 = cpu_tmp0;
1347 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1348 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1349 break;
1350#ifdef TARGET_X86_64
1351 case 2:
1352 t0 = cpu_tmp0;
1353 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1354 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1355 break;
1356#endif
1357 default:
1358 t0 = cpu_cc_src;
1359 break;
1360 }
1361 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1362 break;
1363
1364 case JCC_L:
1365 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1366 goto fast_jcc_l;
1367 case JCC_LE:
1368 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1369 fast_jcc_l:
1370 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1371 switch(size) {
1372 case 0:
1373 t0 = cpu_tmp0;
1374 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1375 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1376 break;
1377 case 1:
1378 t0 = cpu_tmp0;
1379 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1380 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1381 break;
1382#ifdef TARGET_X86_64
1383 case 2:
1384 t0 = cpu_tmp0;
1385 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1386 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1387 break;
1388#endif
1389 default:
1390 t0 = cpu_cc_src;
1391 break;
1392 }
1393 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1394 break;
1395
1396 default:
1397 goto slow_jcc;
1398 }
1399 break;
1400
1401 /* some jumps are easy to compute */
1402 case CC_OP_ADDB:
1403 case CC_OP_ADDW:
1404 case CC_OP_ADDL:
1405 case CC_OP_ADDQ:
1406
1407 case CC_OP_ADCB:
1408 case CC_OP_ADCW:
1409 case CC_OP_ADCL:
1410 case CC_OP_ADCQ:
1411
1412 case CC_OP_SBBB:
1413 case CC_OP_SBBW:
1414 case CC_OP_SBBL:
1415 case CC_OP_SBBQ:
1416
1417 case CC_OP_LOGICB:
1418 case CC_OP_LOGICW:
1419 case CC_OP_LOGICL:
1420 case CC_OP_LOGICQ:
1421
1422 case CC_OP_INCB:
1423 case CC_OP_INCW:
1424 case CC_OP_INCL:
1425 case CC_OP_INCQ:
1426
1427 case CC_OP_DECB:
1428 case CC_OP_DECW:
1429 case CC_OP_DECL:
1430 case CC_OP_DECQ:
1431
1432 case CC_OP_SHLB:
1433 case CC_OP_SHLW:
1434 case CC_OP_SHLL:
1435 case CC_OP_SHLQ:
1436
1437 case CC_OP_SARB:
1438 case CC_OP_SARW:
1439 case CC_OP_SARL:
1440 case CC_OP_SARQ:
1441 switch(jcc_op) {
1442 case JCC_Z:
1443 size = (cc_op - CC_OP_ADDB) & 3;
1444 goto fast_jcc_z;
1445 case JCC_S:
1446 size = (cc_op - CC_OP_ADDB) & 3;
1447 goto fast_jcc_s;
1448 default:
1449 goto slow_jcc;
1450 }
1451 break;
1452 default:
1453 slow_jcc:
1454 gen_setcc_slow_T0(s, jcc_op);
1455 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1456 cpu_T[0], 0, l1);
1457 break;
1458 }
1459}
1460
1461/* XXX: does not work with gdbstub "ice" single step - not a
1462 serious problem */
1463static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1464{
1465 int l1, l2;
1466
1467 l1 = gen_new_label();
1468 l2 = gen_new_label();
1469 gen_op_jnz_ecx(s->aflag, l1);
1470 gen_set_label(l2);
1471 gen_jmp_tb(s, next_eip, 1);
1472 gen_set_label(l1);
1473 return l2;
1474}
1475
1476#ifndef VBOX
1477static inline void gen_stos(DisasContext *s, int ot)
1478#else /* VBOX */
1479DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1480#endif /* VBOX */
1481{
1482 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1483 gen_string_movl_A0_EDI(s);
1484 gen_op_st_T0_A0(ot + s->mem_index);
1485 gen_op_movl_T0_Dshift(ot);
1486 gen_op_add_reg_T0(s->aflag, R_EDI);
1487}
1488
1489#ifndef VBOX
1490static inline void gen_lods(DisasContext *s, int ot)
1491#else /* VBOX */
1492DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1493#endif /* VBOX */
1494{
1495 gen_string_movl_A0_ESI(s);
1496 gen_op_ld_T0_A0(ot + s->mem_index);
1497 gen_op_mov_reg_T0(ot, R_EAX);
1498 gen_op_movl_T0_Dshift(ot);
1499 gen_op_add_reg_T0(s->aflag, R_ESI);
1500}
1501
1502#ifndef VBOX
1503static inline void gen_scas(DisasContext *s, int ot)
1504#else /* VBOX */
1505DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1506#endif /* VBOX */
1507{
1508 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1509 gen_string_movl_A0_EDI(s);
1510 gen_op_ld_T1_A0(ot + s->mem_index);
1511 gen_op_cmpl_T0_T1_cc();
1512 gen_op_movl_T0_Dshift(ot);
1513 gen_op_add_reg_T0(s->aflag, R_EDI);
1514}
1515
1516#ifndef VBOX
1517static inline void gen_cmps(DisasContext *s, int ot)
1518#else /* VBOX */
1519DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1520#endif /* VBOX */
1521{
1522 gen_string_movl_A0_ESI(s);
1523 gen_op_ld_T0_A0(ot + s->mem_index);
1524 gen_string_movl_A0_EDI(s);
1525 gen_op_ld_T1_A0(ot + s->mem_index);
1526 gen_op_cmpl_T0_T1_cc();
1527 gen_op_movl_T0_Dshift(ot);
1528 gen_op_add_reg_T0(s->aflag, R_ESI);
1529 gen_op_add_reg_T0(s->aflag, R_EDI);
1530}
1531
1532#ifndef VBOX
1533static inline void gen_ins(DisasContext *s, int ot)
1534#else /* VBOX */
1535DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1536#endif /* VBOX */
1537{
1538 if (use_icount)
1539 gen_io_start();
1540 gen_string_movl_A0_EDI(s);
1541 /* Note: we must do this dummy write first to be restartable in
1542 case of page fault. */
1543 gen_op_movl_T0_0();
1544 gen_op_st_T0_A0(ot + s->mem_index);
1545 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1546 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1547 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1548 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1549 gen_op_st_T0_A0(ot + s->mem_index);
1550 gen_op_movl_T0_Dshift(ot);
1551 gen_op_add_reg_T0(s->aflag, R_EDI);
1552 if (use_icount)
1553 gen_io_end();
1554}
1555
1556#ifndef VBOX
1557static inline void gen_outs(DisasContext *s, int ot)
1558#else /* VBOX */
1559DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1560#endif /* VBOX */
1561{
1562 if (use_icount)
1563 gen_io_start();
1564 gen_string_movl_A0_ESI(s);
1565 gen_op_ld_T0_A0(ot + s->mem_index);
1566
1567 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1568 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1569 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1570 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1571 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1572
1573 gen_op_movl_T0_Dshift(ot);
1574 gen_op_add_reg_T0(s->aflag, R_ESI);
1575 if (use_icount)
1576 gen_io_end();
1577}
1578
1579/* same method as Valgrind : we generate jumps to current or next
1580 instruction */
1581#ifndef VBOX
1582#define GEN_REPZ(op) \
1583static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1584 target_ulong cur_eip, target_ulong next_eip) \
1585{ \
1586 int l2; \
1587 gen_update_cc_op(s); \
1588 l2 = gen_jz_ecx_string(s, next_eip); \
1589 gen_ ## op(s, ot); \
1590 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1591 /* a loop would cause two single step exceptions if ECX = 1 \
1592 before rep string_insn */ \
1593 if (!s->jmp_opt) \
1594 gen_op_jz_ecx(s->aflag, l2); \
1595 gen_jmp(s, cur_eip); \
1596}
1597#else /* VBOX */
1598#define GEN_REPZ(op) \
1599DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1600 target_ulong cur_eip, target_ulong next_eip) \
1601{ \
1602 int l2; \
1603 gen_update_cc_op(s); \
1604 l2 = gen_jz_ecx_string(s, next_eip); \
1605 gen_ ## op(s, ot); \
1606 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1607 /* a loop would cause two single step exceptions if ECX = 1 \
1608 before rep string_insn */ \
1609 if (!s->jmp_opt) \
1610 gen_op_jz_ecx(s->aflag, l2); \
1611 gen_jmp(s, cur_eip); \
1612}
1613#endif /* VBOX */
1614
1615#ifndef VBOX
1616#define GEN_REPZ2(op) \
1617static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1618 target_ulong cur_eip, \
1619 target_ulong next_eip, \
1620 int nz) \
1621{ \
1622 int l2; \
1623 gen_update_cc_op(s); \
1624 l2 = gen_jz_ecx_string(s, next_eip); \
1625 gen_ ## op(s, ot); \
1626 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1627 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1628 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1629 if (!s->jmp_opt) \
1630 gen_op_jz_ecx(s->aflag, l2); \
1631 gen_jmp(s, cur_eip); \
1632}
1633#else /* VBOX */
1634#define GEN_REPZ2(op) \
1635DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1636 target_ulong cur_eip, \
1637 target_ulong next_eip, \
1638 int nz) \
1639{ \
1640 int l2;\
1641 gen_update_cc_op(s); \
1642 l2 = gen_jz_ecx_string(s, next_eip); \
1643 gen_ ## op(s, ot); \
1644 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1645 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1646 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1647 if (!s->jmp_opt) \
1648 gen_op_jz_ecx(s->aflag, l2); \
1649 gen_jmp(s, cur_eip); \
1650}
1651#endif /* VBOX */
1652
1653GEN_REPZ(movs)
1654GEN_REPZ(stos)
1655GEN_REPZ(lods)
1656GEN_REPZ(ins)
1657GEN_REPZ(outs)
1658GEN_REPZ2(scas)
1659GEN_REPZ2(cmps)
1660
1661static void *helper_fp_arith_ST0_FT0[8] = {
1662 helper_fadd_ST0_FT0,
1663 helper_fmul_ST0_FT0,
1664 helper_fcom_ST0_FT0,
1665 helper_fcom_ST0_FT0,
1666 helper_fsub_ST0_FT0,
1667 helper_fsubr_ST0_FT0,
1668 helper_fdiv_ST0_FT0,
1669 helper_fdivr_ST0_FT0,
1670};
1671
1672/* NOTE the exception in "r" op ordering */
1673static void *helper_fp_arith_STN_ST0[8] = {
1674 helper_fadd_STN_ST0,
1675 helper_fmul_STN_ST0,
1676 NULL,
1677 NULL,
1678 helper_fsubr_STN_ST0,
1679 helper_fsub_STN_ST0,
1680 helper_fdivr_STN_ST0,
1681 helper_fdiv_STN_ST0,
1682};
1683
1684/* if d == OR_TMP0, it means memory operand (address in A0) */
1685static void gen_op(DisasContext *s1, int op, int ot, int d)
1686{
1687 if (d != OR_TMP0) {
1688 gen_op_mov_TN_reg(ot, 0, d);
1689 } else {
1690 gen_op_ld_T0_A0(ot + s1->mem_index);
1691 }
1692 switch(op) {
1693 case OP_ADCL:
1694 if (s1->cc_op != CC_OP_DYNAMIC)
1695 gen_op_set_cc_op(s1->cc_op);
1696 gen_compute_eflags_c(cpu_tmp4);
1697 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1698 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1699 if (d != OR_TMP0)
1700 gen_op_mov_reg_T0(ot, d);
1701 else
1702 gen_op_st_T0_A0(ot + s1->mem_index);
1703 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1704 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1705 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1706 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1707 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1708 s1->cc_op = CC_OP_DYNAMIC;
1709 break;
1710 case OP_SBBL:
1711 if (s1->cc_op != CC_OP_DYNAMIC)
1712 gen_op_set_cc_op(s1->cc_op);
1713 gen_compute_eflags_c(cpu_tmp4);
1714 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1715 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1716 if (d != OR_TMP0)
1717 gen_op_mov_reg_T0(ot, d);
1718 else
1719 gen_op_st_T0_A0(ot + s1->mem_index);
1720 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1721 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1722 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1723 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1724 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1725 s1->cc_op = CC_OP_DYNAMIC;
1726 break;
1727 case OP_ADDL:
1728 gen_op_addl_T0_T1();
1729 if (d != OR_TMP0)
1730 gen_op_mov_reg_T0(ot, d);
1731 else
1732 gen_op_st_T0_A0(ot + s1->mem_index);
1733 gen_op_update2_cc();
1734 s1->cc_op = CC_OP_ADDB + ot;
1735 break;
1736 case OP_SUBL:
1737 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1738 if (d != OR_TMP0)
1739 gen_op_mov_reg_T0(ot, d);
1740 else
1741 gen_op_st_T0_A0(ot + s1->mem_index);
1742 gen_op_update2_cc();
1743 s1->cc_op = CC_OP_SUBB + ot;
1744 break;
1745 default:
1746 case OP_ANDL:
1747 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1748 if (d != OR_TMP0)
1749 gen_op_mov_reg_T0(ot, d);
1750 else
1751 gen_op_st_T0_A0(ot + s1->mem_index);
1752 gen_op_update1_cc();
1753 s1->cc_op = CC_OP_LOGICB + ot;
1754 break;
1755 case OP_ORL:
1756 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1757 if (d != OR_TMP0)
1758 gen_op_mov_reg_T0(ot, d);
1759 else
1760 gen_op_st_T0_A0(ot + s1->mem_index);
1761 gen_op_update1_cc();
1762 s1->cc_op = CC_OP_LOGICB + ot;
1763 break;
1764 case OP_XORL:
1765 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1766 if (d != OR_TMP0)
1767 gen_op_mov_reg_T0(ot, d);
1768 else
1769 gen_op_st_T0_A0(ot + s1->mem_index);
1770 gen_op_update1_cc();
1771 s1->cc_op = CC_OP_LOGICB + ot;
1772 break;
1773 case OP_CMPL:
1774 gen_op_cmpl_T0_T1_cc();
1775 s1->cc_op = CC_OP_SUBB + ot;
1776 break;
1777 }
1778}
1779
1780/* if d == OR_TMP0, it means memory operand (address in A0) */
1781static void gen_inc(DisasContext *s1, int ot, int d, int c)
1782{
1783 if (d != OR_TMP0)
1784 gen_op_mov_TN_reg(ot, 0, d);
1785 else
1786 gen_op_ld_T0_A0(ot + s1->mem_index);
1787 if (s1->cc_op != CC_OP_DYNAMIC)
1788 gen_op_set_cc_op(s1->cc_op);
1789 if (c > 0) {
1790 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1791 s1->cc_op = CC_OP_INCB + ot;
1792 } else {
1793 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1794 s1->cc_op = CC_OP_DECB + ot;
1795 }
1796 if (d != OR_TMP0)
1797 gen_op_mov_reg_T0(ot, d);
1798 else
1799 gen_op_st_T0_A0(ot + s1->mem_index);
1800 gen_compute_eflags_c(cpu_cc_src);
1801 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1802}
1803
1804static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1805 int is_right, int is_arith)
1806{
1807 target_ulong mask;
1808 int shift_label;
1809 TCGv t0, t1;
1810
1811 if (ot == OT_QUAD)
1812 mask = 0x3f;
1813 else
1814 mask = 0x1f;
1815
1816 /* load */
1817 if (op1 == OR_TMP0)
1818 gen_op_ld_T0_A0(ot + s->mem_index);
1819 else
1820 gen_op_mov_TN_reg(ot, 0, op1);
1821
1822 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1823
1824 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1825
1826 if (is_right) {
1827 if (is_arith) {
1828 gen_exts(ot, cpu_T[0]);
1829 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1830 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1831 } else {
1832 gen_extu(ot, cpu_T[0]);
1833 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1834 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1835 }
1836 } else {
1837 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1838 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1839 }
1840
1841 /* store */
1842 if (op1 == OR_TMP0)
1843 gen_op_st_T0_A0(ot + s->mem_index);
1844 else
1845 gen_op_mov_reg_T0(ot, op1);
1846
1847 /* update eflags if non zero shift */
1848 if (s->cc_op != CC_OP_DYNAMIC)
1849 gen_op_set_cc_op(s->cc_op);
1850
1851 /* XXX: inefficient */
1852 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1853 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1854
1855 tcg_gen_mov_tl(t0, cpu_T[0]);
1856 tcg_gen_mov_tl(t1, cpu_T3);
1857
1858 shift_label = gen_new_label();
1859 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1860
1861 tcg_gen_mov_tl(cpu_cc_src, t1);
1862 tcg_gen_mov_tl(cpu_cc_dst, t0);
1863 if (is_right)
1864 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1865 else
1866 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1867
1868 gen_set_label(shift_label);
1869 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1870
1871 tcg_temp_free(t0);
1872 tcg_temp_free(t1);
1873}
1874
1875static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1876 int is_right, int is_arith)
1877{
1878 int mask;
1879
1880 if (ot == OT_QUAD)
1881 mask = 0x3f;
1882 else
1883 mask = 0x1f;
1884
1885 /* load */
1886 if (op1 == OR_TMP0)
1887 gen_op_ld_T0_A0(ot + s->mem_index);
1888 else
1889 gen_op_mov_TN_reg(ot, 0, op1);
1890
1891 op2 &= mask;
1892 if (op2 != 0) {
1893 if (is_right) {
1894 if (is_arith) {
1895 gen_exts(ot, cpu_T[0]);
1896 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1897 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1898 } else {
1899 gen_extu(ot, cpu_T[0]);
1900 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1901 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1902 }
1903 } else {
1904 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1905 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1906 }
1907 }
1908
1909 /* store */
1910 if (op1 == OR_TMP0)
1911 gen_op_st_T0_A0(ot + s->mem_index);
1912 else
1913 gen_op_mov_reg_T0(ot, op1);
1914
1915 /* update eflags if non zero shift */
1916 if (op2 != 0) {
1917 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1918 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1919 if (is_right)
1920 s->cc_op = CC_OP_SARB + ot;
1921 else
1922 s->cc_op = CC_OP_SHLB + ot;
1923 }
1924}
1925
1926#ifndef VBOX
1927static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1928#else /* VBOX */
1929DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1930#endif /* VBOX */
1931{
1932 if (arg2 >= 0)
1933 tcg_gen_shli_tl(ret, arg1, arg2);
1934 else
1935 tcg_gen_shri_tl(ret, arg1, -arg2);
1936}
1937
1938/* XXX: add faster immediate case */
1939static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1940 int is_right)
1941{
1942 target_ulong mask;
1943 int label1, label2, data_bits;
1944 TCGv t0, t1, t2, a0;
1945
1946 /* XXX: inefficient, but we must use local temps */
1947 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1948 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1949 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1950 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1951
1952 if (ot == OT_QUAD)
1953 mask = 0x3f;
1954 else
1955 mask = 0x1f;
1956
1957 /* load */
1958 if (op1 == OR_TMP0) {
1959 tcg_gen_mov_tl(a0, cpu_A0);
1960 gen_op_ld_v(ot + s->mem_index, t0, a0);
1961 } else {
1962 gen_op_mov_v_reg(ot, t0, op1);
1963 }
1964
1965 tcg_gen_mov_tl(t1, cpu_T[1]);
1966
1967 tcg_gen_andi_tl(t1, t1, mask);
1968
1969 /* Must test zero case to avoid using undefined behaviour in TCG
1970 shifts. */
1971 label1 = gen_new_label();
1972 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1973
1974 if (ot <= OT_WORD)
1975 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1976 else
1977 tcg_gen_mov_tl(cpu_tmp0, t1);
1978
1979 gen_extu(ot, t0);
1980 tcg_gen_mov_tl(t2, t0);
1981
1982 data_bits = 8 << ot;
1983 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1984 fix TCG definition) */
1985 if (is_right) {
1986 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1987 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1988 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1989 } else {
1990 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1991 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1992 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1993 }
1994 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1995
1996 gen_set_label(label1);
1997 /* store */
1998 if (op1 == OR_TMP0) {
1999 gen_op_st_v(ot + s->mem_index, t0, a0);
2000 } else {
2001 gen_op_mov_reg_v(ot, op1, t0);
2002 }
2003
2004 /* update eflags */
2005 if (s->cc_op != CC_OP_DYNAMIC)
2006 gen_op_set_cc_op(s->cc_op);
2007
2008 label2 = gen_new_label();
2009 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2010
2011 gen_compute_eflags(cpu_cc_src);
2012 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2013 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2014 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2015 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2016 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2017 if (is_right) {
2018 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2019 }
2020 tcg_gen_andi_tl(t0, t0, CC_C);
2021 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2022
2023 tcg_gen_discard_tl(cpu_cc_dst);
2024 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2025
2026 gen_set_label(label2);
2027 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2028
2029 tcg_temp_free(t0);
2030 tcg_temp_free(t1);
2031 tcg_temp_free(t2);
2032 tcg_temp_free(a0);
2033}
2034
2035static void *helper_rotc[8] = {
2036 helper_rclb,
2037 helper_rclw,
2038 helper_rcll,
2039 X86_64_ONLY(helper_rclq),
2040 helper_rcrb,
2041 helper_rcrw,
2042 helper_rcrl,
2043 X86_64_ONLY(helper_rcrq),
2044};
2045
2046/* XXX: add faster immediate = 1 case */
2047static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2048 int is_right)
2049{
2050 int label1;
2051
2052 if (s->cc_op != CC_OP_DYNAMIC)
2053 gen_op_set_cc_op(s->cc_op);
2054
2055 /* load */
2056 if (op1 == OR_TMP0)
2057 gen_op_ld_T0_A0(ot + s->mem_index);
2058 else
2059 gen_op_mov_TN_reg(ot, 0, op1);
2060
2061 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2062 cpu_T[0], cpu_T[0], cpu_T[1]);
2063 /* store */
2064 if (op1 == OR_TMP0)
2065 gen_op_st_T0_A0(ot + s->mem_index);
2066 else
2067 gen_op_mov_reg_T0(ot, op1);
2068
2069 /* update eflags */
2070 label1 = gen_new_label();
2071 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2072
2073 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2074 tcg_gen_discard_tl(cpu_cc_dst);
2075 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2076
2077 gen_set_label(label1);
2078 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2079}
2080
2081/* XXX: add faster immediate case */
2082static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2083 int is_right)
2084{
2085 int label1, label2, data_bits;
2086 target_ulong mask;
2087 TCGv t0, t1, t2, a0;
2088
2089 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2090 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2091 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2092 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2093
2094 if (ot == OT_QUAD)
2095 mask = 0x3f;
2096 else
2097 mask = 0x1f;
2098
2099 /* load */
2100 if (op1 == OR_TMP0) {
2101 tcg_gen_mov_tl(a0, cpu_A0);
2102 gen_op_ld_v(ot + s->mem_index, t0, a0);
2103 } else {
2104 gen_op_mov_v_reg(ot, t0, op1);
2105 }
2106
2107 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2108
2109 tcg_gen_mov_tl(t1, cpu_T[1]);
2110 tcg_gen_mov_tl(t2, cpu_T3);
2111
2112 /* Must test zero case to avoid using undefined behaviour in TCG
2113 shifts. */
2114 label1 = gen_new_label();
2115 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2116
2117 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2118 if (ot == OT_WORD) {
2119 /* Note: we implement the Intel behaviour for shift count > 16 */
2120 if (is_right) {
2121 tcg_gen_andi_tl(t0, t0, 0xffff);
2122 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2123 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2124 tcg_gen_ext32u_tl(t0, t0);
2125
2126 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2127
2128 /* only needed if count > 16, but a test would complicate */
2129 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2130 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2131
2132 tcg_gen_shr_tl(t0, t0, t2);
2133
2134 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2135 } else {
2136 /* XXX: not optimal */
2137 tcg_gen_andi_tl(t0, t0, 0xffff);
2138 tcg_gen_shli_tl(t1, t1, 16);
2139 tcg_gen_or_tl(t1, t1, t0);
2140 tcg_gen_ext32u_tl(t1, t1);
2141
2142 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2143 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2144 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2145 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2146
2147 tcg_gen_shl_tl(t0, t0, t2);
2148 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2149 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2150 tcg_gen_or_tl(t0, t0, t1);
2151 }
2152 } else {
2153 data_bits = 8 << ot;
2154 if (is_right) {
2155 if (ot == OT_LONG)
2156 tcg_gen_ext32u_tl(t0, t0);
2157
2158 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2159
2160 tcg_gen_shr_tl(t0, t0, t2);
2161 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2162 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2163 tcg_gen_or_tl(t0, t0, t1);
2164
2165 } else {
2166 if (ot == OT_LONG)
2167 tcg_gen_ext32u_tl(t1, t1);
2168
2169 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2170
2171 tcg_gen_shl_tl(t0, t0, t2);
2172 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2173 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2174 tcg_gen_or_tl(t0, t0, t1);
2175 }
2176 }
2177 tcg_gen_mov_tl(t1, cpu_tmp4);
2178
2179 gen_set_label(label1);
2180 /* store */
2181 if (op1 == OR_TMP0) {
2182 gen_op_st_v(ot + s->mem_index, t0, a0);
2183 } else {
2184 gen_op_mov_reg_v(ot, op1, t0);
2185 }
2186
2187 /* update eflags */
2188 if (s->cc_op != CC_OP_DYNAMIC)
2189 gen_op_set_cc_op(s->cc_op);
2190
2191 label2 = gen_new_label();
2192 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2193
2194 tcg_gen_mov_tl(cpu_cc_src, t1);
2195 tcg_gen_mov_tl(cpu_cc_dst, t0);
2196 if (is_right) {
2197 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2198 } else {
2199 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2200 }
2201 gen_set_label(label2);
2202 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2203
2204 tcg_temp_free(t0);
2205 tcg_temp_free(t1);
2206 tcg_temp_free(t2);
2207 tcg_temp_free(a0);
2208}
2209
2210static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2211{
2212 if (s != OR_TMP1)
2213 gen_op_mov_TN_reg(ot, 1, s);
2214 switch(op) {
2215 case OP_ROL:
2216 gen_rot_rm_T1(s1, ot, d, 0);
2217 break;
2218 case OP_ROR:
2219 gen_rot_rm_T1(s1, ot, d, 1);
2220 break;
2221 case OP_SHL:
2222 case OP_SHL1:
2223 gen_shift_rm_T1(s1, ot, d, 0, 0);
2224 break;
2225 case OP_SHR:
2226 gen_shift_rm_T1(s1, ot, d, 1, 0);
2227 break;
2228 case OP_SAR:
2229 gen_shift_rm_T1(s1, ot, d, 1, 1);
2230 break;
2231 case OP_RCL:
2232 gen_rotc_rm_T1(s1, ot, d, 0);
2233 break;
2234 case OP_RCR:
2235 gen_rotc_rm_T1(s1, ot, d, 1);
2236 break;
2237 }
2238}
2239
2240static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2241{
2242 switch(op) {
2243 case OP_SHL:
2244 case OP_SHL1:
2245 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2246 break;
2247 case OP_SHR:
2248 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2249 break;
2250 case OP_SAR:
2251 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2252 break;
2253 default:
2254 /* currently not optimized */
2255 gen_op_movl_T1_im(c);
2256 gen_shift(s1, op, ot, d, OR_TMP1);
2257 break;
2258 }
2259}
2260
2261static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2262{
2263 target_long disp;
2264 int havesib;
2265 int base;
2266 int index;
2267 int scale;
2268 int opreg;
2269 int mod, rm, code, override, must_add_seg;
2270
2271 override = s->override;
2272 must_add_seg = s->addseg;
2273 if (override >= 0)
2274 must_add_seg = 1;
2275 mod = (modrm >> 6) & 3;
2276 rm = modrm & 7;
2277
2278 if (s->aflag) {
2279
2280 havesib = 0;
2281 base = rm;
2282 index = 0;
2283 scale = 0;
2284
2285 if (base == 4) {
2286 havesib = 1;
2287 code = ldub_code(s->pc++);
2288 scale = (code >> 6) & 3;
2289 index = ((code >> 3) & 7) | REX_X(s);
2290 base = (code & 7);
2291 }
2292 base |= REX_B(s);
2293
2294 switch (mod) {
2295 case 0:
2296 if ((base & 7) == 5) {
2297 base = -1;
2298 disp = (int32_t)ldl_code(s->pc);
2299 s->pc += 4;
2300 if (CODE64(s) && !havesib) {
2301 disp += s->pc + s->rip_offset;
2302 }
2303 } else {
2304 disp = 0;
2305 }
2306 break;
2307 case 1:
2308 disp = (int8_t)ldub_code(s->pc++);
2309 break;
2310 default:
2311 case 2:
2312 disp = ldl_code(s->pc);
2313 s->pc += 4;
2314 break;
2315 }
2316
2317 if (base >= 0) {
2318 /* for correct popl handling with esp */
2319 if (base == 4 && s->popl_esp_hack)
2320 disp += s->popl_esp_hack;
2321#ifdef TARGET_X86_64
2322 if (s->aflag == 2) {
2323 gen_op_movq_A0_reg(base);
2324 if (disp != 0) {
2325 gen_op_addq_A0_im(disp);
2326 }
2327 } else
2328#endif
2329 {
2330 gen_op_movl_A0_reg(base);
2331 if (disp != 0)
2332 gen_op_addl_A0_im(disp);
2333 }
2334 } else {
2335#ifdef TARGET_X86_64
2336 if (s->aflag == 2) {
2337 gen_op_movq_A0_im(disp);
2338 } else
2339#endif
2340 {
2341 gen_op_movl_A0_im(disp);
2342 }
2343 }
2344 /* XXX: index == 4 is always invalid */
2345 if (havesib && (index != 4 || scale != 0)) {
2346#ifdef TARGET_X86_64
2347 if (s->aflag == 2) {
2348 gen_op_addq_A0_reg_sN(scale, index);
2349 } else
2350#endif
2351 {
2352 gen_op_addl_A0_reg_sN(scale, index);
2353 }
2354 }
2355 if (must_add_seg) {
2356 if (override < 0) {
2357 if (base == R_EBP || base == R_ESP)
2358 override = R_SS;
2359 else
2360 override = R_DS;
2361 }
2362#ifdef TARGET_X86_64
2363 if (s->aflag == 2) {
2364 gen_op_addq_A0_seg(override);
2365 } else
2366#endif
2367 {
2368 gen_op_addl_A0_seg(override);
2369 }
2370 }
2371 } else {
2372 switch (mod) {
2373 case 0:
2374 if (rm == 6) {
2375 disp = lduw_code(s->pc);
2376 s->pc += 2;
2377 gen_op_movl_A0_im(disp);
2378 rm = 0; /* avoid SS override */
2379 goto no_rm;
2380 } else {
2381 disp = 0;
2382 }
2383 break;
2384 case 1:
2385 disp = (int8_t)ldub_code(s->pc++);
2386 break;
2387 default:
2388 case 2:
2389 disp = lduw_code(s->pc);
2390 s->pc += 2;
2391 break;
2392 }
2393 switch(rm) {
2394 case 0:
2395 gen_op_movl_A0_reg(R_EBX);
2396 gen_op_addl_A0_reg_sN(0, R_ESI);
2397 break;
2398 case 1:
2399 gen_op_movl_A0_reg(R_EBX);
2400 gen_op_addl_A0_reg_sN(0, R_EDI);
2401 break;
2402 case 2:
2403 gen_op_movl_A0_reg(R_EBP);
2404 gen_op_addl_A0_reg_sN(0, R_ESI);
2405 break;
2406 case 3:
2407 gen_op_movl_A0_reg(R_EBP);
2408 gen_op_addl_A0_reg_sN(0, R_EDI);
2409 break;
2410 case 4:
2411 gen_op_movl_A0_reg(R_ESI);
2412 break;
2413 case 5:
2414 gen_op_movl_A0_reg(R_EDI);
2415 break;
2416 case 6:
2417 gen_op_movl_A0_reg(R_EBP);
2418 break;
2419 default:
2420 case 7:
2421 gen_op_movl_A0_reg(R_EBX);
2422 break;
2423 }
2424 if (disp != 0)
2425 gen_op_addl_A0_im(disp);
2426 gen_op_andl_A0_ffff();
2427 no_rm:
2428 if (must_add_seg) {
2429 if (override < 0) {
2430 if (rm == 2 || rm == 3 || rm == 6)
2431 override = R_SS;
2432 else
2433 override = R_DS;
2434 }
2435 gen_op_addl_A0_seg(override);
2436 }
2437 }
2438
2439 opreg = OR_A0;
2440 disp = 0;
2441 *reg_ptr = opreg;
2442 *offset_ptr = disp;
2443}
2444
2445static void gen_nop_modrm(DisasContext *s, int modrm)
2446{
2447 int mod, rm, base, code;
2448
2449 mod = (modrm >> 6) & 3;
2450 if (mod == 3)
2451 return;
2452 rm = modrm & 7;
2453
2454 if (s->aflag) {
2455
2456 base = rm;
2457
2458 if (base == 4) {
2459 code = ldub_code(s->pc++);
2460 base = (code & 7);
2461 }
2462
2463 switch (mod) {
2464 case 0:
2465 if (base == 5) {
2466 s->pc += 4;
2467 }
2468 break;
2469 case 1:
2470 s->pc++;
2471 break;
2472 default:
2473 case 2:
2474 s->pc += 4;
2475 break;
2476 }
2477 } else {
2478 switch (mod) {
2479 case 0:
2480 if (rm == 6) {
2481 s->pc += 2;
2482 }
2483 break;
2484 case 1:
2485 s->pc++;
2486 break;
2487 default:
2488 case 2:
2489 s->pc += 2;
2490 break;
2491 }
2492 }
2493}
2494
2495/* used for LEA and MOV AX, mem */
2496static void gen_add_A0_ds_seg(DisasContext *s)
2497{
2498 int override, must_add_seg;
2499 must_add_seg = s->addseg;
2500 override = R_DS;
2501 if (s->override >= 0) {
2502 override = s->override;
2503 must_add_seg = 1;
2504 } else {
2505 override = R_DS;
2506 }
2507 if (must_add_seg) {
2508#ifdef TARGET_X86_64
2509 if (CODE64(s)) {
2510 gen_op_addq_A0_seg(override);
2511 } else
2512#endif
2513 {
2514 gen_op_addl_A0_seg(override);
2515 }
2516 }
2517}
2518
2519/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2520 OR_TMP0 */
2521static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2522{
2523 int mod, rm, opreg, disp;
2524
2525 mod = (modrm >> 6) & 3;
2526 rm = (modrm & 7) | REX_B(s);
2527 if (mod == 3) {
2528 if (is_store) {
2529 if (reg != OR_TMP0)
2530 gen_op_mov_TN_reg(ot, 0, reg);
2531 gen_op_mov_reg_T0(ot, rm);
2532 } else {
2533 gen_op_mov_TN_reg(ot, 0, rm);
2534 if (reg != OR_TMP0)
2535 gen_op_mov_reg_T0(ot, reg);
2536 }
2537 } else {
2538 gen_lea_modrm(s, modrm, &opreg, &disp);
2539 if (is_store) {
2540 if (reg != OR_TMP0)
2541 gen_op_mov_TN_reg(ot, 0, reg);
2542 gen_op_st_T0_A0(ot + s->mem_index);
2543 } else {
2544 gen_op_ld_T0_A0(ot + s->mem_index);
2545 if (reg != OR_TMP0)
2546 gen_op_mov_reg_T0(ot, reg);
2547 }
2548 }
2549}
2550
2551#ifndef VBOX
2552static inline uint32_t insn_get(DisasContext *s, int ot)
2553#else /* VBOX */
2554DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2555#endif /* VBOX */
2556{
2557 uint32_t ret;
2558
2559 switch(ot) {
2560 case OT_BYTE:
2561 ret = ldub_code(s->pc);
2562 s->pc++;
2563 break;
2564 case OT_WORD:
2565 ret = lduw_code(s->pc);
2566 s->pc += 2;
2567 break;
2568 default:
2569 case OT_LONG:
2570 ret = ldl_code(s->pc);
2571 s->pc += 4;
2572 break;
2573 }
2574 return ret;
2575}
2576
2577#ifndef VBOX
2578static inline int insn_const_size(unsigned int ot)
2579#else /* VBOX */
2580DECLINLINE(int) insn_const_size(unsigned int ot)
2581#endif /* VBOX */
2582{
2583 if (ot <= OT_LONG)
2584 return 1 << ot;
2585 else
2586 return 4;
2587}
2588
2589#ifndef VBOX
2590static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2591#else /* VBOX */
2592DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2593#endif /* VBOX */
2594{
2595 TranslationBlock *tb;
2596 target_ulong pc;
2597
2598 pc = s->cs_base + eip;
2599 tb = s->tb;
2600 /* NOTE: we handle the case where the TB spans two pages here */
2601 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2602 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2603 /* jump to same page: we can use a direct jump */
2604 tcg_gen_goto_tb(tb_num);
2605 gen_jmp_im(eip);
2606 tcg_gen_exit_tb((long)tb + tb_num);
2607 } else {
2608 /* jump to another page: currently not optimized */
2609 gen_jmp_im(eip);
2610 gen_eob(s);
2611 }
2612}
2613
2614#ifndef VBOX
2615static inline void gen_jcc(DisasContext *s, int b,
2616#else /* VBOX */
2617DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2618#endif /* VBOX */
2619 target_ulong val, target_ulong next_eip)
2620{
2621 int l1, l2, cc_op;
2622
2623 cc_op = s->cc_op;
2624 if (s->cc_op != CC_OP_DYNAMIC) {
2625 gen_op_set_cc_op(s->cc_op);
2626 s->cc_op = CC_OP_DYNAMIC;
2627 }
2628 if (s->jmp_opt) {
2629#ifdef VBOX
2630 gen_check_external_event(s);
2631#endif /* VBOX */
2632 l1 = gen_new_label();
2633 gen_jcc1(s, cc_op, b, l1);
2634
2635 gen_goto_tb(s, 0, next_eip);
2636
2637 gen_set_label(l1);
2638 gen_goto_tb(s, 1, val);
2639 s->is_jmp = 3;
2640 } else {
2641
2642 l1 = gen_new_label();
2643 l2 = gen_new_label();
2644 gen_jcc1(s, cc_op, b, l1);
2645
2646 gen_jmp_im(next_eip);
2647 tcg_gen_br(l2);
2648
2649 gen_set_label(l1);
2650 gen_jmp_im(val);
2651 gen_set_label(l2);
2652 gen_eob(s);
2653 }
2654}
2655
2656static void gen_setcc(DisasContext *s, int b)
2657{
2658 int inv, jcc_op, l1;
2659 TCGv t0;
2660
2661 if (is_fast_jcc_case(s, b)) {
2662 /* nominal case: we use a jump */
2663 /* XXX: make it faster by adding new instructions in TCG */
2664 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2665 tcg_gen_movi_tl(t0, 0);
2666 l1 = gen_new_label();
2667 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2668 tcg_gen_movi_tl(t0, 1);
2669 gen_set_label(l1);
2670 tcg_gen_mov_tl(cpu_T[0], t0);
2671 tcg_temp_free(t0);
2672 } else {
2673 /* slow case: it is more efficient not to generate a jump,
2674 although it is questionnable whether this optimization is
2675 worth to */
2676 inv = b & 1;
2677 jcc_op = (b >> 1) & 7;
2678 gen_setcc_slow_T0(s, jcc_op);
2679 if (inv) {
2680 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2681 }
2682 }
2683}
2684
2685#ifndef VBOX
2686static inline void gen_op_movl_T0_seg(int seg_reg)
2687#else /* VBOX */
2688DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2689#endif /* VBOX */
2690{
2691 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2692 offsetof(CPUX86State,segs[seg_reg].selector));
2693}
2694
2695#ifndef VBOX
2696static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2697#else /* VBOX */
2698DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2699#endif /* VBOX */
2700{
2701 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2702 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2703 offsetof(CPUX86State,segs[seg_reg].selector));
2704 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2705 tcg_gen_st_tl(cpu_T[0], cpu_env,
2706 offsetof(CPUX86State,segs[seg_reg].base));
2707#ifdef VBOX
2708 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2709 if (seg_reg == R_CS)
2710 flags |= DESC_CS_MASK;
2711 gen_op_movl_T0_im(flags);
2712 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2713#endif
2714}
2715
2716/* move T0 to seg_reg and compute if the CPU state may change. Never
2717 call this function with seg_reg == R_CS */
2718static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2719{
2720 if (s->pe && !s->vm86) {
2721 /* XXX: optimize by finding processor state dynamically */
2722 if (s->cc_op != CC_OP_DYNAMIC)
2723 gen_op_set_cc_op(s->cc_op);
2724 gen_jmp_im(cur_eip);
2725 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2726 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2727 /* abort translation because the addseg value may change or
2728 because ss32 may change. For R_SS, translation must always
2729 stop as a special handling must be done to disable hardware
2730 interrupts for the next instruction */
2731 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2732 s->is_jmp = 3;
2733 } else {
2734 gen_op_movl_seg_T0_vm(seg_reg);
2735 if (seg_reg == R_SS)
2736 s->is_jmp = 3;
2737 }
2738}
2739
2740#ifndef VBOX
2741static inline int svm_is_rep(int prefixes)
2742#else /* VBOX */
2743DECLINLINE(int) svm_is_rep(int prefixes)
2744#endif /* VBOX */
2745{
2746 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2747}
2748
2749#ifndef VBOX
2750static inline void
2751#else /* VBOX */
2752DECLINLINE(void)
2753#endif /* VBOX */
2754gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2755 uint32_t type, uint64_t param)
2756{
2757 /* no SVM activated; fast case */
2758 if (likely(!(s->flags & HF_SVMI_MASK)))
2759 return;
2760 if (s->cc_op != CC_OP_DYNAMIC)
2761 gen_op_set_cc_op(s->cc_op);
2762 gen_jmp_im(pc_start - s->cs_base);
2763 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2764 tcg_const_i32(type), tcg_const_i64(param));
2765}
2766
2767#ifndef VBOX
2768static inline void
2769#else /* VBOX */
2770DECLINLINE(void)
2771#endif
2772gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2773{
2774 gen_svm_check_intercept_param(s, pc_start, type, 0);
2775}
2776
2777#ifndef VBOX
2778static inline void gen_stack_update(DisasContext *s, int addend)
2779#else /* VBOX */
2780DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2781#endif /* VBOX */
2782{
2783#ifdef TARGET_X86_64
2784 if (CODE64(s)) {
2785 gen_op_add_reg_im(2, R_ESP, addend);
2786 } else
2787#endif
2788 if (s->ss32) {
2789 gen_op_add_reg_im(1, R_ESP, addend);
2790 } else {
2791 gen_op_add_reg_im(0, R_ESP, addend);
2792 }
2793}
2794
2795/* generate a push. It depends on ss32, addseg and dflag */
2796static void gen_push_T0(DisasContext *s)
2797{
2798#ifdef TARGET_X86_64
2799 if (CODE64(s)) {
2800 gen_op_movq_A0_reg(R_ESP);
2801 if (s->dflag) {
2802 gen_op_addq_A0_im(-8);
2803 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2804 } else {
2805 gen_op_addq_A0_im(-2);
2806 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2807 }
2808 gen_op_mov_reg_A0(2, R_ESP);
2809 } else
2810#endif
2811 {
2812 gen_op_movl_A0_reg(R_ESP);
2813 if (!s->dflag)
2814 gen_op_addl_A0_im(-2);
2815 else
2816 gen_op_addl_A0_im(-4);
2817 if (s->ss32) {
2818 if (s->addseg) {
2819 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2820 gen_op_addl_A0_seg(R_SS);
2821 }
2822 } else {
2823 gen_op_andl_A0_ffff();
2824 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2825 gen_op_addl_A0_seg(R_SS);
2826 }
2827 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2828 if (s->ss32 && !s->addseg)
2829 gen_op_mov_reg_A0(1, R_ESP);
2830 else
2831 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2832 }
2833}
2834
2835/* generate a push. It depends on ss32, addseg and dflag */
2836/* slower version for T1, only used for call Ev */
2837static void gen_push_T1(DisasContext *s)
2838{
2839#ifdef TARGET_X86_64
2840 if (CODE64(s)) {
2841 gen_op_movq_A0_reg(R_ESP);
2842 if (s->dflag) {
2843 gen_op_addq_A0_im(-8);
2844 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2845 } else {
2846 gen_op_addq_A0_im(-2);
2847 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2848 }
2849 gen_op_mov_reg_A0(2, R_ESP);
2850 } else
2851#endif
2852 {
2853 gen_op_movl_A0_reg(R_ESP);
2854 if (!s->dflag)
2855 gen_op_addl_A0_im(-2);
2856 else
2857 gen_op_addl_A0_im(-4);
2858 if (s->ss32) {
2859 if (s->addseg) {
2860 gen_op_addl_A0_seg(R_SS);
2861 }
2862 } else {
2863 gen_op_andl_A0_ffff();
2864 gen_op_addl_A0_seg(R_SS);
2865 }
2866 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2867
2868 if (s->ss32 && !s->addseg)
2869 gen_op_mov_reg_A0(1, R_ESP);
2870 else
2871 gen_stack_update(s, (-2) << s->dflag);
2872 }
2873}
2874
2875/* two step pop is necessary for precise exceptions */
2876static void gen_pop_T0(DisasContext *s)
2877{
2878#ifdef TARGET_X86_64
2879 if (CODE64(s)) {
2880 gen_op_movq_A0_reg(R_ESP);
2881 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2882 } else
2883#endif
2884 {
2885 gen_op_movl_A0_reg(R_ESP);
2886 if (s->ss32) {
2887 if (s->addseg)
2888 gen_op_addl_A0_seg(R_SS);
2889 } else {
2890 gen_op_andl_A0_ffff();
2891 gen_op_addl_A0_seg(R_SS);
2892 }
2893 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2894 }
2895}
2896
2897static void gen_pop_update(DisasContext *s)
2898{
2899#ifdef TARGET_X86_64
2900 if (CODE64(s) && s->dflag) {
2901 gen_stack_update(s, 8);
2902 } else
2903#endif
2904 {
2905 gen_stack_update(s, 2 << s->dflag);
2906 }
2907}
2908
2909static void gen_stack_A0(DisasContext *s)
2910{
2911 gen_op_movl_A0_reg(R_ESP);
2912 if (!s->ss32)
2913 gen_op_andl_A0_ffff();
2914 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2915 if (s->addseg)
2916 gen_op_addl_A0_seg(R_SS);
2917}
2918
2919/* NOTE: wrap around in 16 bit not fully handled */
2920static void gen_pusha(DisasContext *s)
2921{
2922 int i;
2923 gen_op_movl_A0_reg(R_ESP);
2924 gen_op_addl_A0_im(-16 << s->dflag);
2925 if (!s->ss32)
2926 gen_op_andl_A0_ffff();
2927 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2928 if (s->addseg)
2929 gen_op_addl_A0_seg(R_SS);
2930 for(i = 0;i < 8; i++) {
2931 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2932 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2933 gen_op_addl_A0_im(2 << s->dflag);
2934 }
2935 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2936}
2937
2938/* NOTE: wrap around in 16 bit not fully handled */
2939static void gen_popa(DisasContext *s)
2940{
2941 int i;
2942 gen_op_movl_A0_reg(R_ESP);
2943 if (!s->ss32)
2944 gen_op_andl_A0_ffff();
2945 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2946 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2947 if (s->addseg)
2948 gen_op_addl_A0_seg(R_SS);
2949 for(i = 0;i < 8; i++) {
2950 /* ESP is not reloaded */
2951 if (i != 3) {
2952 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2953 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2954 }
2955 gen_op_addl_A0_im(2 << s->dflag);
2956 }
2957 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2958}
2959
2960static void gen_enter(DisasContext *s, int esp_addend, int level)
2961{
2962 int ot, opsize;
2963
2964 level &= 0x1f;
2965#ifdef TARGET_X86_64
2966 if (CODE64(s)) {
2967 ot = s->dflag ? OT_QUAD : OT_WORD;
2968 opsize = 1 << ot;
2969
2970 gen_op_movl_A0_reg(R_ESP);
2971 gen_op_addq_A0_im(-opsize);
2972 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2973
2974 /* push bp */
2975 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2976 gen_op_st_T0_A0(ot + s->mem_index);
2977 if (level) {
2978 /* XXX: must save state */
2979 tcg_gen_helper_0_3(helper_enter64_level,
2980 tcg_const_i32(level),
2981 tcg_const_i32((ot == OT_QUAD)),
2982 cpu_T[1]);
2983 }
2984 gen_op_mov_reg_T1(ot, R_EBP);
2985 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2986 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2987 } else
2988#endif
2989 {
2990 ot = s->dflag + OT_WORD;
2991 opsize = 2 << s->dflag;
2992
2993 gen_op_movl_A0_reg(R_ESP);
2994 gen_op_addl_A0_im(-opsize);
2995 if (!s->ss32)
2996 gen_op_andl_A0_ffff();
2997 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2998 if (s->addseg)
2999 gen_op_addl_A0_seg(R_SS);
3000 /* push bp */
3001 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3002 gen_op_st_T0_A0(ot + s->mem_index);
3003 if (level) {
3004 /* XXX: must save state */
3005 tcg_gen_helper_0_3(helper_enter_level,
3006 tcg_const_i32(level),
3007 tcg_const_i32(s->dflag),
3008 cpu_T[1]);
3009 }
3010 gen_op_mov_reg_T1(ot, R_EBP);
3011 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3012 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3013 }
3014}
3015
3016static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3017{
3018 if (s->cc_op != CC_OP_DYNAMIC)
3019 gen_op_set_cc_op(s->cc_op);
3020 gen_jmp_im(cur_eip);
3021 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3022 s->is_jmp = 3;
3023}
3024
3025/* an interrupt is different from an exception because of the
3026 privilege checks */
3027static void gen_interrupt(DisasContext *s, int intno,
3028 target_ulong cur_eip, target_ulong next_eip)
3029{
3030 if (s->cc_op != CC_OP_DYNAMIC)
3031 gen_op_set_cc_op(s->cc_op);
3032 gen_jmp_im(cur_eip);
3033 tcg_gen_helper_0_2(helper_raise_interrupt,
3034 tcg_const_i32(intno),
3035 tcg_const_i32(next_eip - cur_eip));
3036 s->is_jmp = 3;
3037}
3038
3039static void gen_debug(DisasContext *s, target_ulong cur_eip)
3040{
3041 if (s->cc_op != CC_OP_DYNAMIC)
3042 gen_op_set_cc_op(s->cc_op);
3043 gen_jmp_im(cur_eip);
3044 tcg_gen_helper_0_0(helper_debug);
3045 s->is_jmp = 3;
3046}
3047
3048/* generate a generic end of block. Trace exception is also generated
3049 if needed */
3050static void gen_eob(DisasContext *s)
3051{
3052 if (s->cc_op != CC_OP_DYNAMIC)
3053 gen_op_set_cc_op(s->cc_op);
3054 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3055 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3056 }
3057 if (s->singlestep_enabled) {
3058 tcg_gen_helper_0_0(helper_debug);
3059 } else if (s->tf) {
3060 tcg_gen_helper_0_0(helper_single_step);
3061 } else {
3062 tcg_gen_exit_tb(0);
3063 }
3064 s->is_jmp = 3;
3065}
3066
3067/* generate a jump to eip. No segment change must happen before as a
3068 direct call to the next block may occur */
3069static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3070{
3071 if (s->jmp_opt) {
3072#ifdef VBOX
3073 gen_check_external_event(s);
3074#endif /* VBOX */
3075 if (s->cc_op != CC_OP_DYNAMIC) {
3076 gen_op_set_cc_op(s->cc_op);
3077 s->cc_op = CC_OP_DYNAMIC;
3078 }
3079 gen_goto_tb(s, tb_num, eip);
3080 s->is_jmp = 3;
3081 } else {
3082 gen_jmp_im(eip);
3083 gen_eob(s);
3084 }
3085}
3086
3087static void gen_jmp(DisasContext *s, target_ulong eip)
3088{
3089 gen_jmp_tb(s, eip, 0);
3090}
3091
3092#ifndef VBOX
3093static inline void gen_ldq_env_A0(int idx, int offset)
3094#else /* VBOX */
3095DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3096#endif /* VBOX */
3097{
3098 int mem_index = (idx >> 2) - 1;
3099 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3100 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3101}
3102
3103#ifndef VBOX
3104static inline void gen_stq_env_A0(int idx, int offset)
3105#else /* VBOX */
3106DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3107#endif /* VBOX */
3108{
3109 int mem_index = (idx >> 2) - 1;
3110 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3111 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3112}
3113
3114#ifndef VBOX
3115static inline void gen_ldo_env_A0(int idx, int offset)
3116#else /* VBOX */
3117DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3118#endif /* VBOX */
3119{
3120 int mem_index = (idx >> 2) - 1;
3121 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3122 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3123 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3124 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3125 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3126}
3127
3128#ifndef VBOX
3129static inline void gen_sto_env_A0(int idx, int offset)
3130#else /* VBOX */
3131DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3132#endif /* VBOX */
3133{
3134 int mem_index = (idx >> 2) - 1;
3135 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3136 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3137 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3138 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3139 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3140}
3141
3142#ifndef VBOX
3143static inline void gen_op_movo(int d_offset, int s_offset)
3144#else /* VBOX */
3145DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3146#endif /* VBOX */
3147{
3148 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3149 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3150 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3151 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3152}
3153
3154#ifndef VBOX
3155static inline void gen_op_movq(int d_offset, int s_offset)
3156#else /* VBOX */
3157DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3158#endif /* VBOX */
3159{
3160 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3161 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3162}
3163
3164#ifndef VBOX
3165static inline void gen_op_movl(int d_offset, int s_offset)
3166#else /* VBOX */
3167DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3168#endif /* VBOX */
3169{
3170 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3171 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3172}
3173
3174#ifndef VBOX
3175static inline void gen_op_movq_env_0(int d_offset)
3176#else /* VBOX */
3177DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3178#endif /* VBOX */
3179{
3180 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3181 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3182}
3183
3184#define SSE_SPECIAL ((void *)1)
3185#define SSE_DUMMY ((void *)2)
3186
3187#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3188#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3189 helper_ ## x ## ss, helper_ ## x ## sd, }
3190
3191static void *sse_op_table1[256][4] = {
3192 /* 3DNow! extensions */
3193 [0x0e] = { SSE_DUMMY }, /* femms */
3194 [0x0f] = { SSE_DUMMY }, /* pf... */
3195 /* pure SSE operations */
3196 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3197 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3198 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3199 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3200 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3201 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3202 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3203 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3204
3205 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3206 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3207 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3208 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3209 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3210 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3211 [0x2e] = { helper_ucomiss, helper_ucomisd },
3212 [0x2f] = { helper_comiss, helper_comisd },
3213 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3214 [0x51] = SSE_FOP(sqrt),
3215 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3216 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3217 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3218 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3219 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3220 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3221 [0x58] = SSE_FOP(add),
3222 [0x59] = SSE_FOP(mul),
3223 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3224 helper_cvtss2sd, helper_cvtsd2ss },
3225 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3226 [0x5c] = SSE_FOP(sub),
3227 [0x5d] = SSE_FOP(min),
3228 [0x5e] = SSE_FOP(div),
3229 [0x5f] = SSE_FOP(max),
3230
3231 [0xc2] = SSE_FOP(cmpeq),
3232 [0xc6] = { helper_shufps, helper_shufpd },
3233
3234 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3235 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3236
3237 /* MMX ops and their SSE extensions */
3238 [0x60] = MMX_OP2(punpcklbw),
3239 [0x61] = MMX_OP2(punpcklwd),
3240 [0x62] = MMX_OP2(punpckldq),
3241 [0x63] = MMX_OP2(packsswb),
3242 [0x64] = MMX_OP2(pcmpgtb),
3243 [0x65] = MMX_OP2(pcmpgtw),
3244 [0x66] = MMX_OP2(pcmpgtl),
3245 [0x67] = MMX_OP2(packuswb),
3246 [0x68] = MMX_OP2(punpckhbw),
3247 [0x69] = MMX_OP2(punpckhwd),
3248 [0x6a] = MMX_OP2(punpckhdq),
3249 [0x6b] = MMX_OP2(packssdw),
3250 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3251 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3252 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3253 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3254 [0x70] = { helper_pshufw_mmx,
3255 helper_pshufd_xmm,
3256 helper_pshufhw_xmm,
3257 helper_pshuflw_xmm },
3258 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3259 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3260 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3261 [0x74] = MMX_OP2(pcmpeqb),
3262 [0x75] = MMX_OP2(pcmpeqw),
3263 [0x76] = MMX_OP2(pcmpeql),
3264 [0x77] = { SSE_DUMMY }, /* emms */
3265 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3266 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3267 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3268 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3269 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3270 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3271 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3272 [0xd1] = MMX_OP2(psrlw),
3273 [0xd2] = MMX_OP2(psrld),
3274 [0xd3] = MMX_OP2(psrlq),
3275 [0xd4] = MMX_OP2(paddq),
3276 [0xd5] = MMX_OP2(pmullw),
3277 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3278 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3279 [0xd8] = MMX_OP2(psubusb),
3280 [0xd9] = MMX_OP2(psubusw),
3281 [0xda] = MMX_OP2(pminub),
3282 [0xdb] = MMX_OP2(pand),
3283 [0xdc] = MMX_OP2(paddusb),
3284 [0xdd] = MMX_OP2(paddusw),
3285 [0xde] = MMX_OP2(pmaxub),
3286 [0xdf] = MMX_OP2(pandn),
3287 [0xe0] = MMX_OP2(pavgb),
3288 [0xe1] = MMX_OP2(psraw),
3289 [0xe2] = MMX_OP2(psrad),
3290 [0xe3] = MMX_OP2(pavgw),
3291 [0xe4] = MMX_OP2(pmulhuw),
3292 [0xe5] = MMX_OP2(pmulhw),
3293 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3294 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3295 [0xe8] = MMX_OP2(psubsb),
3296 [0xe9] = MMX_OP2(psubsw),
3297 [0xea] = MMX_OP2(pminsw),
3298 [0xeb] = MMX_OP2(por),
3299 [0xec] = MMX_OP2(paddsb),
3300 [0xed] = MMX_OP2(paddsw),
3301 [0xee] = MMX_OP2(pmaxsw),
3302 [0xef] = MMX_OP2(pxor),
3303 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3304 [0xf1] = MMX_OP2(psllw),
3305 [0xf2] = MMX_OP2(pslld),
3306 [0xf3] = MMX_OP2(psllq),
3307 [0xf4] = MMX_OP2(pmuludq),
3308 [0xf5] = MMX_OP2(pmaddwd),
3309 [0xf6] = MMX_OP2(psadbw),
3310 [0xf7] = MMX_OP2(maskmov),
3311 [0xf8] = MMX_OP2(psubb),
3312 [0xf9] = MMX_OP2(psubw),
3313 [0xfa] = MMX_OP2(psubl),
3314 [0xfb] = MMX_OP2(psubq),
3315 [0xfc] = MMX_OP2(paddb),
3316 [0xfd] = MMX_OP2(paddw),
3317 [0xfe] = MMX_OP2(paddl),
3318};
3319
3320static void *sse_op_table2[3 * 8][2] = {
3321 [0 + 2] = MMX_OP2(psrlw),
3322 [0 + 4] = MMX_OP2(psraw),
3323 [0 + 6] = MMX_OP2(psllw),
3324 [8 + 2] = MMX_OP2(psrld),
3325 [8 + 4] = MMX_OP2(psrad),
3326 [8 + 6] = MMX_OP2(pslld),
3327 [16 + 2] = MMX_OP2(psrlq),
3328 [16 + 3] = { NULL, helper_psrldq_xmm },
3329 [16 + 6] = MMX_OP2(psllq),
3330 [16 + 7] = { NULL, helper_pslldq_xmm },
3331};
3332
3333static void *sse_op_table3[4 * 3] = {
3334 helper_cvtsi2ss,
3335 helper_cvtsi2sd,
3336 X86_64_ONLY(helper_cvtsq2ss),
3337 X86_64_ONLY(helper_cvtsq2sd),
3338
3339 helper_cvttss2si,
3340 helper_cvttsd2si,
3341 X86_64_ONLY(helper_cvttss2sq),
3342 X86_64_ONLY(helper_cvttsd2sq),
3343
3344 helper_cvtss2si,
3345 helper_cvtsd2si,
3346 X86_64_ONLY(helper_cvtss2sq),
3347 X86_64_ONLY(helper_cvtsd2sq),
3348};
3349
3350static void *sse_op_table4[8][4] = {
3351 SSE_FOP(cmpeq),
3352 SSE_FOP(cmplt),
3353 SSE_FOP(cmple),
3354 SSE_FOP(cmpunord),
3355 SSE_FOP(cmpneq),
3356 SSE_FOP(cmpnlt),
3357 SSE_FOP(cmpnle),
3358 SSE_FOP(cmpord),
3359};
3360
3361static void *sse_op_table5[256] = {
3362 [0x0c] = helper_pi2fw,
3363 [0x0d] = helper_pi2fd,
3364 [0x1c] = helper_pf2iw,
3365 [0x1d] = helper_pf2id,
3366 [0x8a] = helper_pfnacc,
3367 [0x8e] = helper_pfpnacc,
3368 [0x90] = helper_pfcmpge,
3369 [0x94] = helper_pfmin,
3370 [0x96] = helper_pfrcp,
3371 [0x97] = helper_pfrsqrt,
3372 [0x9a] = helper_pfsub,
3373 [0x9e] = helper_pfadd,
3374 [0xa0] = helper_pfcmpgt,
3375 [0xa4] = helper_pfmax,
3376 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3377 [0xa7] = helper_movq, /* pfrsqit1 */
3378 [0xaa] = helper_pfsubr,
3379 [0xae] = helper_pfacc,
3380 [0xb0] = helper_pfcmpeq,
3381 [0xb4] = helper_pfmul,
3382 [0xb6] = helper_movq, /* pfrcpit2 */
3383 [0xb7] = helper_pmulhrw_mmx,
3384 [0xbb] = helper_pswapd,
3385 [0xbf] = helper_pavgb_mmx /* pavgusb */
3386};
3387
3388struct sse_op_helper_s {
3389 void *op[2]; uint32_t ext_mask;
3390};
3391#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3392#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3393#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3394#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3395static struct sse_op_helper_s sse_op_table6[256] = {
3396 [0x00] = SSSE3_OP(pshufb),
3397 [0x01] = SSSE3_OP(phaddw),
3398 [0x02] = SSSE3_OP(phaddd),
3399 [0x03] = SSSE3_OP(phaddsw),
3400 [0x04] = SSSE3_OP(pmaddubsw),
3401 [0x05] = SSSE3_OP(phsubw),
3402 [0x06] = SSSE3_OP(phsubd),
3403 [0x07] = SSSE3_OP(phsubsw),
3404 [0x08] = SSSE3_OP(psignb),
3405 [0x09] = SSSE3_OP(psignw),
3406 [0x0a] = SSSE3_OP(psignd),
3407 [0x0b] = SSSE3_OP(pmulhrsw),
3408 [0x10] = SSE41_OP(pblendvb),
3409 [0x14] = SSE41_OP(blendvps),
3410 [0x15] = SSE41_OP(blendvpd),
3411 [0x17] = SSE41_OP(ptest),
3412 [0x1c] = SSSE3_OP(pabsb),
3413 [0x1d] = SSSE3_OP(pabsw),
3414 [0x1e] = SSSE3_OP(pabsd),
3415 [0x20] = SSE41_OP(pmovsxbw),
3416 [0x21] = SSE41_OP(pmovsxbd),
3417 [0x22] = SSE41_OP(pmovsxbq),
3418 [0x23] = SSE41_OP(pmovsxwd),
3419 [0x24] = SSE41_OP(pmovsxwq),
3420 [0x25] = SSE41_OP(pmovsxdq),
3421 [0x28] = SSE41_OP(pmuldq),
3422 [0x29] = SSE41_OP(pcmpeqq),
3423 [0x2a] = SSE41_SPECIAL, /* movntqda */
3424 [0x2b] = SSE41_OP(packusdw),
3425 [0x30] = SSE41_OP(pmovzxbw),
3426 [0x31] = SSE41_OP(pmovzxbd),
3427 [0x32] = SSE41_OP(pmovzxbq),
3428 [0x33] = SSE41_OP(pmovzxwd),
3429 [0x34] = SSE41_OP(pmovzxwq),
3430 [0x35] = SSE41_OP(pmovzxdq),
3431 [0x37] = SSE42_OP(pcmpgtq),
3432 [0x38] = SSE41_OP(pminsb),
3433 [0x39] = SSE41_OP(pminsd),
3434 [0x3a] = SSE41_OP(pminuw),
3435 [0x3b] = SSE41_OP(pminud),
3436 [0x3c] = SSE41_OP(pmaxsb),
3437 [0x3d] = SSE41_OP(pmaxsd),
3438 [0x3e] = SSE41_OP(pmaxuw),
3439 [0x3f] = SSE41_OP(pmaxud),
3440 [0x40] = SSE41_OP(pmulld),
3441 [0x41] = SSE41_OP(phminposuw),
3442};
3443
3444static struct sse_op_helper_s sse_op_table7[256] = {
3445 [0x08] = SSE41_OP(roundps),
3446 [0x09] = SSE41_OP(roundpd),
3447 [0x0a] = SSE41_OP(roundss),
3448 [0x0b] = SSE41_OP(roundsd),
3449 [0x0c] = SSE41_OP(blendps),
3450 [0x0d] = SSE41_OP(blendpd),
3451 [0x0e] = SSE41_OP(pblendw),
3452 [0x0f] = SSSE3_OP(palignr),
3453 [0x14] = SSE41_SPECIAL, /* pextrb */
3454 [0x15] = SSE41_SPECIAL, /* pextrw */
3455 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3456 [0x17] = SSE41_SPECIAL, /* extractps */
3457 [0x20] = SSE41_SPECIAL, /* pinsrb */
3458 [0x21] = SSE41_SPECIAL, /* insertps */
3459 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3460 [0x40] = SSE41_OP(dpps),
3461 [0x41] = SSE41_OP(dppd),
3462 [0x42] = SSE41_OP(mpsadbw),
3463 [0x60] = SSE42_OP(pcmpestrm),
3464 [0x61] = SSE42_OP(pcmpestri),
3465 [0x62] = SSE42_OP(pcmpistrm),
3466 [0x63] = SSE42_OP(pcmpistri),
3467};
3468
3469static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3470{
3471 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3472 int modrm, mod, rm, reg, reg_addr, offset_addr;
3473 void *sse_op2;
3474
3475 b &= 0xff;
3476 if (s->prefix & PREFIX_DATA)
3477 b1 = 1;
3478 else if (s->prefix & PREFIX_REPZ)
3479 b1 = 2;
3480 else if (s->prefix & PREFIX_REPNZ)
3481 b1 = 3;
3482 else
3483 b1 = 0;
3484 sse_op2 = sse_op_table1[b][b1];
3485 if (!sse_op2)
3486 goto illegal_op;
3487 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3488 is_xmm = 1;
3489 } else {
3490 if (b1 == 0) {
3491 /* MMX case */
3492 is_xmm = 0;
3493 } else {
3494 is_xmm = 1;
3495 }
3496 }
3497 /* simple MMX/SSE operation */
3498 if (s->flags & HF_TS_MASK) {
3499 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3500 return;
3501 }
3502 if (s->flags & HF_EM_MASK) {
3503 illegal_op:
3504 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3505 return;
3506 }
3507 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3508 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3509 goto illegal_op;
3510 if (b == 0x0e) {
3511 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3512 goto illegal_op;
3513 /* femms */
3514 tcg_gen_helper_0_0(helper_emms);
3515 return;
3516 }
3517 if (b == 0x77) {
3518 /* emms */
3519 tcg_gen_helper_0_0(helper_emms);
3520 return;
3521 }
3522 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3523 the static cpu state) */
3524 if (!is_xmm) {
3525 tcg_gen_helper_0_0(helper_enter_mmx);
3526 }
3527
3528 modrm = ldub_code(s->pc++);
3529 reg = ((modrm >> 3) & 7);
3530 if (is_xmm)
3531 reg |= rex_r;
3532 mod = (modrm >> 6) & 3;
3533 if (sse_op2 == SSE_SPECIAL) {
3534 b |= (b1 << 8);
3535 switch(b) {
3536 case 0x0e7: /* movntq */
3537 if (mod == 3)
3538 goto illegal_op;
3539 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3540 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3541 break;
3542 case 0x1e7: /* movntdq */
3543 case 0x02b: /* movntps */
3544 case 0x12b: /* movntps */
3545 case 0x3f0: /* lddqu */
3546 if (mod == 3)
3547 goto illegal_op;
3548 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3549 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3550 break;
3551 case 0x6e: /* movd mm, ea */
3552#ifdef TARGET_X86_64
3553 if (s->dflag == 2) {
3554 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3555 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3556 } else
3557#endif
3558 {
3559 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3560 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3561 offsetof(CPUX86State,fpregs[reg].mmx));
3562 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3563 }
3564 break;
3565 case 0x16e: /* movd xmm, ea */
3566#ifdef TARGET_X86_64
3567 if (s->dflag == 2) {
3568 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3569 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3570 offsetof(CPUX86State,xmm_regs[reg]));
3571 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3572 } else
3573#endif
3574 {
3575 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3576 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3577 offsetof(CPUX86State,xmm_regs[reg]));
3578 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3579 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3580 }
3581 break;
3582 case 0x6f: /* movq mm, ea */
3583 if (mod != 3) {
3584 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3585 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3586 } else {
3587 rm = (modrm & 7);
3588 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3589 offsetof(CPUX86State,fpregs[rm].mmx));
3590 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3591 offsetof(CPUX86State,fpregs[reg].mmx));
3592 }
3593 break;
3594 case 0x010: /* movups */
3595 case 0x110: /* movupd */
3596 case 0x028: /* movaps */
3597 case 0x128: /* movapd */
3598 case 0x16f: /* movdqa xmm, ea */
3599 case 0x26f: /* movdqu xmm, ea */
3600 if (mod != 3) {
3601 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3602 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3603 } else {
3604 rm = (modrm & 7) | REX_B(s);
3605 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3606 offsetof(CPUX86State,xmm_regs[rm]));
3607 }
3608 break;
3609 case 0x210: /* movss xmm, ea */
3610 if (mod != 3) {
3611 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3612 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3613 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3614 gen_op_movl_T0_0();
3615 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3616 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3617 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3618 } else {
3619 rm = (modrm & 7) | REX_B(s);
3620 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3621 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3622 }
3623 break;
3624 case 0x310: /* movsd xmm, ea */
3625 if (mod != 3) {
3626 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3627 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3628 gen_op_movl_T0_0();
3629 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3630 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3631 } else {
3632 rm = (modrm & 7) | REX_B(s);
3633 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3634 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3635 }
3636 break;
3637 case 0x012: /* movlps */
3638 case 0x112: /* movlpd */
3639 if (mod != 3) {
3640 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3641 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3642 } else {
3643 /* movhlps */
3644 rm = (modrm & 7) | REX_B(s);
3645 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3646 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3647 }
3648 break;
3649 case 0x212: /* movsldup */
3650 if (mod != 3) {
3651 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3652 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3653 } else {
3654 rm = (modrm & 7) | REX_B(s);
3655 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3656 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3657 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3658 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3659 }
3660 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3661 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3662 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3663 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3664 break;
3665 case 0x312: /* movddup */
3666 if (mod != 3) {
3667 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3668 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3669 } else {
3670 rm = (modrm & 7) | REX_B(s);
3671 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3672 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3673 }
3674 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3675 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3676 break;
3677 case 0x016: /* movhps */
3678 case 0x116: /* movhpd */
3679 if (mod != 3) {
3680 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3681 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3682 } else {
3683 /* movlhps */
3684 rm = (modrm & 7) | REX_B(s);
3685 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3686 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3687 }
3688 break;
3689 case 0x216: /* movshdup */
3690 if (mod != 3) {
3691 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3692 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3693 } else {
3694 rm = (modrm & 7) | REX_B(s);
3695 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3696 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3697 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3698 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3699 }
3700 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3701 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3702 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3703 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3704 break;
3705 case 0x7e: /* movd ea, mm */
3706#ifdef TARGET_X86_64
3707 if (s->dflag == 2) {
3708 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3709 offsetof(CPUX86State,fpregs[reg].mmx));
3710 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3711 } else
3712#endif
3713 {
3714 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3715 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3716 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3717 }
3718 break;
3719 case 0x17e: /* movd ea, xmm */
3720#ifdef TARGET_X86_64
3721 if (s->dflag == 2) {
3722 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3723 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3724 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3725 } else
3726#endif
3727 {
3728 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3729 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3730 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3731 }
3732 break;
3733 case 0x27e: /* movq xmm, ea */
3734 if (mod != 3) {
3735 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3736 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3737 } else {
3738 rm = (modrm & 7) | REX_B(s);
3739 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3740 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3741 }
3742 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3743 break;
3744 case 0x7f: /* movq ea, mm */
3745 if (mod != 3) {
3746 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3747 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3748 } else {
3749 rm = (modrm & 7);
3750 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3751 offsetof(CPUX86State,fpregs[reg].mmx));
3752 }
3753 break;
3754 case 0x011: /* movups */
3755 case 0x111: /* movupd */
3756 case 0x029: /* movaps */
3757 case 0x129: /* movapd */
3758 case 0x17f: /* movdqa ea, xmm */
3759 case 0x27f: /* movdqu ea, xmm */
3760 if (mod != 3) {
3761 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3762 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3763 } else {
3764 rm = (modrm & 7) | REX_B(s);
3765 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3766 offsetof(CPUX86State,xmm_regs[reg]));
3767 }
3768 break;
3769 case 0x211: /* movss ea, xmm */
3770 if (mod != 3) {
3771 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3772 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3773 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3774 } else {
3775 rm = (modrm & 7) | REX_B(s);
3776 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3777 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3778 }
3779 break;
3780 case 0x311: /* movsd ea, xmm */
3781 if (mod != 3) {
3782 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3783 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3784 } else {
3785 rm = (modrm & 7) | REX_B(s);
3786 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3787 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3788 }
3789 break;
3790 case 0x013: /* movlps */
3791 case 0x113: /* movlpd */
3792 if (mod != 3) {
3793 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3794 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3795 } else {
3796 goto illegal_op;
3797 }
3798 break;
3799 case 0x017: /* movhps */
3800 case 0x117: /* movhpd */
3801 if (mod != 3) {
3802 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3803 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3804 } else {
3805 goto illegal_op;
3806 }
3807 break;
3808 case 0x71: /* shift mm, im */
3809 case 0x72:
3810 case 0x73:
3811 case 0x171: /* shift xmm, im */
3812 case 0x172:
3813 case 0x173:
3814 val = ldub_code(s->pc++);
3815 if (is_xmm) {
3816 gen_op_movl_T0_im(val);
3817 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3818 gen_op_movl_T0_0();
3819 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3820 op1_offset = offsetof(CPUX86State,xmm_t0);
3821 } else {
3822 gen_op_movl_T0_im(val);
3823 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3824 gen_op_movl_T0_0();
3825 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3826 op1_offset = offsetof(CPUX86State,mmx_t0);
3827 }
3828 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3829 if (!sse_op2)
3830 goto illegal_op;
3831 if (is_xmm) {
3832 rm = (modrm & 7) | REX_B(s);
3833 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3834 } else {
3835 rm = (modrm & 7);
3836 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3837 }
3838 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3839 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3840 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3841 break;
3842 case 0x050: /* movmskps */
3843 rm = (modrm & 7) | REX_B(s);
3844 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3845 offsetof(CPUX86State,xmm_regs[rm]));
3846 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3847 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3848 gen_op_mov_reg_T0(OT_LONG, reg);
3849 break;
3850 case 0x150: /* movmskpd */
3851 rm = (modrm & 7) | REX_B(s);
3852 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3853 offsetof(CPUX86State,xmm_regs[rm]));
3854 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3855 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3856 gen_op_mov_reg_T0(OT_LONG, reg);
3857 break;
3858 case 0x02a: /* cvtpi2ps */
3859 case 0x12a: /* cvtpi2pd */
3860 tcg_gen_helper_0_0(helper_enter_mmx);
3861 if (mod != 3) {
3862 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3863 op2_offset = offsetof(CPUX86State,mmx_t0);
3864 gen_ldq_env_A0(s->mem_index, op2_offset);
3865 } else {
3866 rm = (modrm & 7);
3867 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3868 }
3869 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3870 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3871 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3872 switch(b >> 8) {
3873 case 0x0:
3874 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3875 break;
3876 default:
3877 case 0x1:
3878 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3879 break;
3880 }
3881 break;
3882 case 0x22a: /* cvtsi2ss */
3883 case 0x32a: /* cvtsi2sd */
3884 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3885 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3886 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3887 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3888 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3889 if (ot == OT_LONG) {
3890 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3891 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3892 } else {
3893 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3894 }
3895 break;
3896 case 0x02c: /* cvttps2pi */
3897 case 0x12c: /* cvttpd2pi */
3898 case 0x02d: /* cvtps2pi */
3899 case 0x12d: /* cvtpd2pi */
3900 tcg_gen_helper_0_0(helper_enter_mmx);
3901 if (mod != 3) {
3902 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3903 op2_offset = offsetof(CPUX86State,xmm_t0);
3904 gen_ldo_env_A0(s->mem_index, op2_offset);
3905 } else {
3906 rm = (modrm & 7) | REX_B(s);
3907 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3908 }
3909 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3910 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3911 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3912 switch(b) {
3913 case 0x02c:
3914 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3915 break;
3916 case 0x12c:
3917 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3918 break;
3919 case 0x02d:
3920 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3921 break;
3922 case 0x12d:
3923 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3924 break;
3925 }
3926 break;
3927 case 0x22c: /* cvttss2si */
3928 case 0x32c: /* cvttsd2si */
3929 case 0x22d: /* cvtss2si */
3930 case 0x32d: /* cvtsd2si */
3931 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3932 if (mod != 3) {
3933 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3934 if ((b >> 8) & 1) {
3935 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3936 } else {
3937 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3938 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3939 }
3940 op2_offset = offsetof(CPUX86State,xmm_t0);
3941 } else {
3942 rm = (modrm & 7) | REX_B(s);
3943 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3944 }
3945 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3946 (b & 1) * 4];
3947 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3948 if (ot == OT_LONG) {
3949 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3950 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3951 } else {
3952 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3953 }
3954 gen_op_mov_reg_T0(ot, reg);
3955 break;
3956 case 0xc4: /* pinsrw */
3957 case 0x1c4:
3958 s->rip_offset = 1;
3959 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3960 val = ldub_code(s->pc++);
3961 if (b1) {
3962 val &= 7;
3963 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3964 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3965 } else {
3966 val &= 3;
3967 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3968 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3969 }
3970 break;
3971 case 0xc5: /* pextrw */
3972 case 0x1c5:
3973 if (mod != 3)
3974 goto illegal_op;
3975 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3976 val = ldub_code(s->pc++);
3977 if (b1) {
3978 val &= 7;
3979 rm = (modrm & 7) | REX_B(s);
3980 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3981 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3982 } else {
3983 val &= 3;
3984 rm = (modrm & 7);
3985 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3986 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3987 }
3988 reg = ((modrm >> 3) & 7) | rex_r;
3989 gen_op_mov_reg_T0(ot, reg);
3990 break;
3991 case 0x1d6: /* movq ea, xmm */
3992 if (mod != 3) {
3993 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3994 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3995 } else {
3996 rm = (modrm & 7) | REX_B(s);
3997 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3998 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3999 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4000 }
4001 break;
4002 case 0x2d6: /* movq2dq */
4003 tcg_gen_helper_0_0(helper_enter_mmx);
4004 rm = (modrm & 7);
4005 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4006 offsetof(CPUX86State,fpregs[rm].mmx));
4007 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4008 break;
4009 case 0x3d6: /* movdq2q */
4010 tcg_gen_helper_0_0(helper_enter_mmx);
4011 rm = (modrm & 7) | REX_B(s);
4012 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4013 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4014 break;
4015 case 0xd7: /* pmovmskb */
4016 case 0x1d7:
4017 if (mod != 3)
4018 goto illegal_op;
4019 if (b1) {
4020 rm = (modrm & 7) | REX_B(s);
4021 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4022 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4023 } else {
4024 rm = (modrm & 7);
4025 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4026 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4027 }
4028 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4029 reg = ((modrm >> 3) & 7) | rex_r;
4030 gen_op_mov_reg_T0(OT_LONG, reg);
4031 break;
4032 case 0x138:
4033 if (s->prefix & PREFIX_REPNZ)
4034 goto crc32;
4035 case 0x038:
4036 b = modrm;
4037 modrm = ldub_code(s->pc++);
4038 rm = modrm & 7;
4039 reg = ((modrm >> 3) & 7) | rex_r;
4040 mod = (modrm >> 6) & 3;
4041
4042 sse_op2 = sse_op_table6[b].op[b1];
4043 if (!sse_op2)
4044 goto illegal_op;
4045 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4046 goto illegal_op;
4047
4048 if (b1) {
4049 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4050 if (mod == 3) {
4051 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4052 } else {
4053 op2_offset = offsetof(CPUX86State,xmm_t0);
4054 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4055 switch (b) {
4056 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4057 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4058 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4059 gen_ldq_env_A0(s->mem_index, op2_offset +
4060 offsetof(XMMReg, XMM_Q(0)));
4061 break;
4062 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4063 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4064 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4065 (s->mem_index >> 2) - 1);
4066 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4067 offsetof(XMMReg, XMM_L(0)));
4068 break;
4069 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4070 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4071 (s->mem_index >> 2) - 1);
4072 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4073 offsetof(XMMReg, XMM_W(0)));
4074 break;
4075 case 0x2a: /* movntqda */
4076 gen_ldo_env_A0(s->mem_index, op1_offset);
4077 return;
4078 default:
4079 gen_ldo_env_A0(s->mem_index, op2_offset);
4080 }
4081 }
4082 } else {
4083 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4084 if (mod == 3) {
4085 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4086 } else {
4087 op2_offset = offsetof(CPUX86State,mmx_t0);
4088 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4089 gen_ldq_env_A0(s->mem_index, op2_offset);
4090 }
4091 }
4092 if (sse_op2 == SSE_SPECIAL)
4093 goto illegal_op;
4094
4095 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4096 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4097 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4098
4099 if (b == 0x17)
4100 s->cc_op = CC_OP_EFLAGS;
4101 break;
4102 case 0x338: /* crc32 */
4103 crc32:
4104 b = modrm;
4105 modrm = ldub_code(s->pc++);
4106 reg = ((modrm >> 3) & 7) | rex_r;
4107
4108 if (b != 0xf0 && b != 0xf1)
4109 goto illegal_op;
4110 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4111 goto illegal_op;
4112
4113 if (b == 0xf0)
4114 ot = OT_BYTE;
4115 else if (b == 0xf1 && s->dflag != 2)
4116 if (s->prefix & PREFIX_DATA)
4117 ot = OT_WORD;
4118 else
4119 ot = OT_LONG;
4120 else
4121 ot = OT_QUAD;
4122
4123 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4124 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4125 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4126 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4127 cpu_T[0], tcg_const_i32(8 << ot));
4128
4129 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4130 gen_op_mov_reg_T0(ot, reg);
4131 break;
4132 case 0x03a:
4133 case 0x13a:
4134 b = modrm;
4135 modrm = ldub_code(s->pc++);
4136 rm = modrm & 7;
4137 reg = ((modrm >> 3) & 7) | rex_r;
4138 mod = (modrm >> 6) & 3;
4139
4140 sse_op2 = sse_op_table7[b].op[b1];
4141 if (!sse_op2)
4142 goto illegal_op;
4143 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4144 goto illegal_op;
4145
4146 if (sse_op2 == SSE_SPECIAL) {
4147 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4148 rm = (modrm & 7) | REX_B(s);
4149 if (mod != 3)
4150 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4151 reg = ((modrm >> 3) & 7) | rex_r;
4152 val = ldub_code(s->pc++);
4153 switch (b) {
4154 case 0x14: /* pextrb */
4155 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4156 xmm_regs[reg].XMM_B(val & 15)));
4157 if (mod == 3)
4158 gen_op_mov_reg_T0(ot, rm);
4159 else
4160 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4161 (s->mem_index >> 2) - 1);
4162 break;
4163 case 0x15: /* pextrw */
4164 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4165 xmm_regs[reg].XMM_W(val & 7)));
4166 if (mod == 3)
4167 gen_op_mov_reg_T0(ot, rm);
4168 else
4169 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4170 (s->mem_index >> 2) - 1);
4171 break;
4172 case 0x16:
4173 if (ot == OT_LONG) { /* pextrd */
4174 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4175 offsetof(CPUX86State,
4176 xmm_regs[reg].XMM_L(val & 3)));
4177 if (mod == 3)
4178 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4179 else
4180 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4181 (s->mem_index >> 2) - 1);
4182 } else { /* pextrq */
4183 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4184 offsetof(CPUX86State,
4185 xmm_regs[reg].XMM_Q(val & 1)));
4186 if (mod == 3)
4187 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4188 else
4189 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4190 (s->mem_index >> 2) - 1);
4191 }
4192 break;
4193 case 0x17: /* extractps */
4194 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4195 xmm_regs[reg].XMM_L(val & 3)));
4196 if (mod == 3)
4197 gen_op_mov_reg_T0(ot, rm);
4198 else
4199 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4200 (s->mem_index >> 2) - 1);
4201 break;
4202 case 0x20: /* pinsrb */
4203 if (mod == 3)
4204 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4205 else
4206 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4207 (s->mem_index >> 2) - 1);
4208 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4209 xmm_regs[reg].XMM_B(val & 15)));
4210 break;
4211 case 0x21: /* insertps */
4212 if (mod == 3)
4213 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4214 offsetof(CPUX86State,xmm_regs[rm]
4215 .XMM_L((val >> 6) & 3)));
4216 else
4217 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4218 (s->mem_index >> 2) - 1);
4219 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4220 offsetof(CPUX86State,xmm_regs[reg]
4221 .XMM_L((val >> 4) & 3)));
4222 if ((val >> 0) & 1)
4223 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4224 cpu_env, offsetof(CPUX86State,
4225 xmm_regs[reg].XMM_L(0)));
4226 if ((val >> 1) & 1)
4227 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4228 cpu_env, offsetof(CPUX86State,
4229 xmm_regs[reg].XMM_L(1)));
4230 if ((val >> 2) & 1)
4231 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4232 cpu_env, offsetof(CPUX86State,
4233 xmm_regs[reg].XMM_L(2)));
4234 if ((val >> 3) & 1)
4235 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4236 cpu_env, offsetof(CPUX86State,
4237 xmm_regs[reg].XMM_L(3)));
4238 break;
4239 case 0x22:
4240 if (ot == OT_LONG) { /* pinsrd */
4241 if (mod == 3)
4242 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4243 else
4244 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4245 (s->mem_index >> 2) - 1);
4246 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4247 offsetof(CPUX86State,
4248 xmm_regs[reg].XMM_L(val & 3)));
4249 } else { /* pinsrq */
4250 if (mod == 3)
4251 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4252 else
4253 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4254 (s->mem_index >> 2) - 1);
4255 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4256 offsetof(CPUX86State,
4257 xmm_regs[reg].XMM_Q(val & 1)));
4258 }
4259 break;
4260 }
4261 return;
4262 }
4263
4264 if (b1) {
4265 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4266 if (mod == 3) {
4267 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4268 } else {
4269 op2_offset = offsetof(CPUX86State,xmm_t0);
4270 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4271 gen_ldo_env_A0(s->mem_index, op2_offset);
4272 }
4273 } else {
4274 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4275 if (mod == 3) {
4276 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4277 } else {
4278 op2_offset = offsetof(CPUX86State,mmx_t0);
4279 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4280 gen_ldq_env_A0(s->mem_index, op2_offset);
4281 }
4282 }
4283 val = ldub_code(s->pc++);
4284
4285 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4286 s->cc_op = CC_OP_EFLAGS;
4287
4288 if (s->dflag == 2)
4289 /* The helper must use entire 64-bit gp registers */
4290 val |= 1 << 8;
4291 }
4292
4293 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4294 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4295 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4296 break;
4297 default:
4298 goto illegal_op;
4299 }
4300 } else {
4301 /* generic MMX or SSE operation */
4302 switch(b) {
4303 case 0x70: /* pshufx insn */
4304 case 0xc6: /* pshufx insn */
4305 case 0xc2: /* compare insns */
4306 s->rip_offset = 1;
4307 break;
4308 default:
4309 break;
4310 }
4311 if (is_xmm) {
4312 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4313 if (mod != 3) {
4314 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4315 op2_offset = offsetof(CPUX86State,xmm_t0);
4316 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4317 b == 0xc2)) {
4318 /* specific case for SSE single instructions */
4319 if (b1 == 2) {
4320 /* 32 bit access */
4321 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4322 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4323 } else {
4324 /* 64 bit access */
4325 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4326 }
4327 } else {
4328 gen_ldo_env_A0(s->mem_index, op2_offset);
4329 }
4330 } else {
4331 rm = (modrm & 7) | REX_B(s);
4332 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4333 }
4334 } else {
4335 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4336 if (mod != 3) {
4337 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4338 op2_offset = offsetof(CPUX86State,mmx_t0);
4339 gen_ldq_env_A0(s->mem_index, op2_offset);
4340 } else {
4341 rm = (modrm & 7);
4342 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4343 }
4344 }
4345 switch(b) {
4346 case 0x0f: /* 3DNow! data insns */
4347 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4348 goto illegal_op;
4349 val = ldub_code(s->pc++);
4350 sse_op2 = sse_op_table5[val];
4351 if (!sse_op2)
4352 goto illegal_op;
4353 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4354 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4355 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4356 break;
4357 case 0x70: /* pshufx insn */
4358 case 0xc6: /* pshufx insn */
4359 val = ldub_code(s->pc++);
4360 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4361 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4362 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4363 break;
4364 case 0xc2:
4365 /* compare insns */
4366 val = ldub_code(s->pc++);
4367 if (val >= 8)
4368 goto illegal_op;
4369 sse_op2 = sse_op_table4[val][b1];
4370 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4371 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4372 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4373 break;
4374 case 0xf7:
4375 /* maskmov : we must prepare A0 */
4376 if (mod != 3)
4377 goto illegal_op;
4378#ifdef TARGET_X86_64
4379 if (s->aflag == 2) {
4380 gen_op_movq_A0_reg(R_EDI);
4381 } else
4382#endif
4383 {
4384 gen_op_movl_A0_reg(R_EDI);
4385 if (s->aflag == 0)
4386 gen_op_andl_A0_ffff();
4387 }
4388 gen_add_A0_ds_seg(s);
4389
4390 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4391 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4392 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4393 break;
4394 default:
4395 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4396 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4397 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4398 break;
4399 }
4400 if (b == 0x2e || b == 0x2f) {
4401 s->cc_op = CC_OP_EFLAGS;
4402 }
4403 }
4404}
4405
4406#ifdef VBOX
4407/* Checks if it's an invalid lock sequence. Only a few instructions
4408 can be used together with the lock prefix and of those only the
4409 form that write a memory operand. So, this is kind of annoying
4410 work to do...
4411 The AMD manual lists the following instructions.
4412 ADC
4413 ADD
4414 AND
4415 BTC
4416 BTR
4417 BTS
4418 CMPXCHG
4419 CMPXCHG8B
4420 CMPXCHG16B
4421 DEC
4422 INC
4423 NEG
4424 NOT
4425 OR
4426 SBB
4427 SUB
4428 XADD
4429 XCHG
4430 XOR */
4431static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4432{
4433 target_ulong pc = s->pc;
4434 int modrm, mod, op;
4435
4436 /* X={8,16,32,64} Y={16,32,64} */
4437 switch (b)
4438 {
4439 /* /2: ADC reg/memX, immX */
4440 /* /0: ADD reg/memX, immX */
4441 /* /4: AND reg/memX, immX */
4442 /* /1: OR reg/memX, immX */
4443 /* /3: SBB reg/memX, immX */
4444 /* /5: SUB reg/memX, immX */
4445 /* /6: XOR reg/memX, immX */
4446 case 0x80:
4447 case 0x81:
4448 case 0x83:
4449 modrm = ldub_code(pc++);
4450 op = (modrm >> 3) & 7;
4451 if (op == 7) /* /7: CMP */
4452 break;
4453 mod = (modrm >> 6) & 3;
4454 if (mod == 3) /* register destination */
4455 break;
4456 return false;
4457
4458 case 0x10: /* /r: ADC reg/mem8, reg8 */
4459 case 0x11: /* /r: ADC reg/memX, regY */
4460 case 0x00: /* /r: ADD reg/mem8, reg8 */
4461 case 0x01: /* /r: ADD reg/memX, regY */
4462 case 0x20: /* /r: AND reg/mem8, reg8 */
4463 case 0x21: /* /r: AND reg/memY, regY */
4464 case 0x08: /* /r: OR reg/mem8, reg8 */
4465 case 0x09: /* /r: OR reg/memY, regY */
4466 case 0x18: /* /r: SBB reg/mem8, reg8 */
4467 case 0x19: /* /r: SBB reg/memY, regY */
4468 case 0x28: /* /r: SUB reg/mem8, reg8 */
4469 case 0x29: /* /r: SUB reg/memY, regY */
4470 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4471 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4472 case 0x30: /* /r: XOR reg/mem8, reg8 */
4473 case 0x31: /* /r: XOR reg/memY, regY */
4474 modrm = ldub_code(pc++);
4475 mod = (modrm >> 6) & 3;
4476 if (mod == 3) /* register destination */
4477 break;
4478 return false;
4479
4480 /* /1: DEC reg/memX */
4481 /* /0: INC reg/memX */
4482 case 0xfe:
4483 case 0xff:
4484 modrm = ldub_code(pc++);
4485 mod = (modrm >> 6) & 3;
4486 if (mod == 3) /* register destination */
4487 break;
4488 return false;
4489
4490 /* /3: NEG reg/memX */
4491 /* /2: NOT reg/memX */
4492 case 0xf6:
4493 case 0xf7:
4494 modrm = ldub_code(pc++);
4495 mod = (modrm >> 6) & 3;
4496 if (mod == 3) /* register destination */
4497 break;
4498 return false;
4499
4500 case 0x0f:
4501 b = ldub_code(pc++);
4502 switch (b)
4503 {
4504 /* /7: BTC reg/memY, imm8 */
4505 /* /6: BTR reg/memY, imm8 */
4506 /* /5: BTS reg/memY, imm8 */
4507 case 0xba:
4508 modrm = ldub_code(pc++);
4509 op = (modrm >> 3) & 7;
4510 if (op < 5)
4511 break;
4512 mod = (modrm >> 6) & 3;
4513 if (mod == 3) /* register destination */
4514 break;
4515 return false;
4516
4517 case 0xbb: /* /r: BTC reg/memY, regY */
4518 case 0xb3: /* /r: BTR reg/memY, regY */
4519 case 0xab: /* /r: BTS reg/memY, regY */
4520 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4521 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4522 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4523 case 0xc1: /* /r: XADD reg/memY, regY */
4524 modrm = ldub_code(pc++);
4525 mod = (modrm >> 6) & 3;
4526 if (mod == 3) /* register destination */
4527 break;
4528 return false;
4529
4530 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4531 case 0xc7:
4532 modrm = ldub_code(pc++);
4533 op = (modrm >> 3) & 7;
4534 if (op != 1)
4535 break;
4536 return false;
4537 }
4538 break;
4539 }
4540
4541 /* illegal sequence. The s->pc is past the lock prefix and that
4542 is sufficient for the TB, I think. */
4543 Log(("illegal lock sequence %VGv (b=%#x)\n", pc_start, b));
4544 return true;
4545}
4546#endif /* VBOX */
4547
4548
4549/* convert one instruction. s->is_jmp is set if the translation must
4550 be stopped. Return the next pc value */
4551static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4552{
4553 int b, prefixes, aflag, dflag;
4554 int shift, ot;
4555 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4556 target_ulong next_eip, tval;
4557 int rex_w, rex_r;
4558
4559 if (unlikely(loglevel & CPU_LOG_TB_OP))
4560 tcg_gen_debug_insn_start(pc_start);
4561 s->pc = pc_start;
4562 prefixes = 0;
4563 aflag = s->code32;
4564 dflag = s->code32;
4565 s->override = -1;
4566 rex_w = -1;
4567 rex_r = 0;
4568#ifdef TARGET_X86_64
4569 s->rex_x = 0;
4570 s->rex_b = 0;
4571 x86_64_hregs = 0;
4572#endif
4573 s->rip_offset = 0; /* for relative ip address */
4574#ifdef VBOX
4575 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4576 gen_update_eip(pc_start - s->cs_base);
4577#endif
4578 next_byte:
4579 b = ldub_code(s->pc);
4580 s->pc++;
4581 /* check prefixes */
4582#ifdef TARGET_X86_64
4583 if (CODE64(s)) {
4584 switch (b) {
4585 case 0xf3:
4586 prefixes |= PREFIX_REPZ;
4587 goto next_byte;
4588 case 0xf2:
4589 prefixes |= PREFIX_REPNZ;
4590 goto next_byte;
4591 case 0xf0:
4592 prefixes |= PREFIX_LOCK;
4593 goto next_byte;
4594 case 0x2e:
4595 s->override = R_CS;
4596 goto next_byte;
4597 case 0x36:
4598 s->override = R_SS;
4599 goto next_byte;
4600 case 0x3e:
4601 s->override = R_DS;
4602 goto next_byte;
4603 case 0x26:
4604 s->override = R_ES;
4605 goto next_byte;
4606 case 0x64:
4607 s->override = R_FS;
4608 goto next_byte;
4609 case 0x65:
4610 s->override = R_GS;
4611 goto next_byte;
4612 case 0x66:
4613 prefixes |= PREFIX_DATA;
4614 goto next_byte;
4615 case 0x67:
4616 prefixes |= PREFIX_ADR;
4617 goto next_byte;
4618 case 0x40 ... 0x4f:
4619 /* REX prefix */
4620 rex_w = (b >> 3) & 1;
4621 rex_r = (b & 0x4) << 1;
4622 s->rex_x = (b & 0x2) << 2;
4623 REX_B(s) = (b & 0x1) << 3;
4624 x86_64_hregs = 1; /* select uniform byte register addressing */
4625 goto next_byte;
4626 }
4627 if (rex_w == 1) {
4628 /* 0x66 is ignored if rex.w is set */
4629 dflag = 2;
4630 } else {
4631 if (prefixes & PREFIX_DATA)
4632 dflag ^= 1;
4633 }
4634 if (!(prefixes & PREFIX_ADR))
4635 aflag = 2;
4636 } else
4637#endif
4638 {
4639 switch (b) {
4640 case 0xf3:
4641 prefixes |= PREFIX_REPZ;
4642 goto next_byte;
4643 case 0xf2:
4644 prefixes |= PREFIX_REPNZ;
4645 goto next_byte;
4646 case 0xf0:
4647 prefixes |= PREFIX_LOCK;
4648 goto next_byte;
4649 case 0x2e:
4650 s->override = R_CS;
4651 goto next_byte;
4652 case 0x36:
4653 s->override = R_SS;
4654 goto next_byte;
4655 case 0x3e:
4656 s->override = R_DS;
4657 goto next_byte;
4658 case 0x26:
4659 s->override = R_ES;
4660 goto next_byte;
4661 case 0x64:
4662 s->override = R_FS;
4663 goto next_byte;
4664 case 0x65:
4665 s->override = R_GS;
4666 goto next_byte;
4667 case 0x66:
4668 prefixes |= PREFIX_DATA;
4669 goto next_byte;
4670 case 0x67:
4671 prefixes |= PREFIX_ADR;
4672 goto next_byte;
4673 }
4674 if (prefixes & PREFIX_DATA)
4675 dflag ^= 1;
4676 if (prefixes & PREFIX_ADR)
4677 aflag ^= 1;
4678 }
4679
4680 s->prefix = prefixes;
4681 s->aflag = aflag;
4682 s->dflag = dflag;
4683
4684 /* lock generation */
4685#ifndef VBOX
4686 if (prefixes & PREFIX_LOCK)
4687 tcg_gen_helper_0_0(helper_lock);
4688#else /* VBOX */
4689 if (prefixes & PREFIX_LOCK) {
4690 if (is_invalid_lock_sequence(s, pc_start, b)) {
4691 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4692 return s->pc;
4693 }
4694 tcg_gen_helper_0_0(helper_lock);
4695 }
4696#endif /* VBOX */
4697
4698 /* now check op code */
4699 reswitch:
4700 switch(b) {
4701 case 0x0f:
4702 /**************************/
4703 /* extended op code */
4704 b = ldub_code(s->pc++) | 0x100;
4705 goto reswitch;
4706
4707 /**************************/
4708 /* arith & logic */
4709 case 0x00 ... 0x05:
4710 case 0x08 ... 0x0d:
4711 case 0x10 ... 0x15:
4712 case 0x18 ... 0x1d:
4713 case 0x20 ... 0x25:
4714 case 0x28 ... 0x2d:
4715 case 0x30 ... 0x35:
4716 case 0x38 ... 0x3d:
4717 {
4718 int op, f, val;
4719 op = (b >> 3) & 7;
4720 f = (b >> 1) & 3;
4721
4722 if ((b & 1) == 0)
4723 ot = OT_BYTE;
4724 else
4725 ot = dflag + OT_WORD;
4726
4727 switch(f) {
4728 case 0: /* OP Ev, Gv */
4729 modrm = ldub_code(s->pc++);
4730 reg = ((modrm >> 3) & 7) | rex_r;
4731 mod = (modrm >> 6) & 3;
4732 rm = (modrm & 7) | REX_B(s);
4733 if (mod != 3) {
4734 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4735 opreg = OR_TMP0;
4736 } else if (op == OP_XORL && rm == reg) {
4737 xor_zero:
4738 /* xor reg, reg optimisation */
4739 gen_op_movl_T0_0();
4740 s->cc_op = CC_OP_LOGICB + ot;
4741 gen_op_mov_reg_T0(ot, reg);
4742 gen_op_update1_cc();
4743 break;
4744 } else {
4745 opreg = rm;
4746 }
4747 gen_op_mov_TN_reg(ot, 1, reg);
4748 gen_op(s, op, ot, opreg);
4749 break;
4750 case 1: /* OP Gv, Ev */
4751 modrm = ldub_code(s->pc++);
4752 mod = (modrm >> 6) & 3;
4753 reg = ((modrm >> 3) & 7) | rex_r;
4754 rm = (modrm & 7) | REX_B(s);
4755 if (mod != 3) {
4756 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4757 gen_op_ld_T1_A0(ot + s->mem_index);
4758 } else if (op == OP_XORL && rm == reg) {
4759 goto xor_zero;
4760 } else {
4761 gen_op_mov_TN_reg(ot, 1, rm);
4762 }
4763 gen_op(s, op, ot, reg);
4764 break;
4765 case 2: /* OP A, Iv */
4766 val = insn_get(s, ot);
4767 gen_op_movl_T1_im(val);
4768 gen_op(s, op, ot, OR_EAX);
4769 break;
4770 }
4771 }
4772 break;
4773
4774 case 0x82:
4775 if (CODE64(s))
4776 goto illegal_op;
4777 case 0x80: /* GRP1 */
4778 case 0x81:
4779 case 0x83:
4780 {
4781 int val;
4782
4783 if ((b & 1) == 0)
4784 ot = OT_BYTE;
4785 else
4786 ot = dflag + OT_WORD;
4787
4788 modrm = ldub_code(s->pc++);
4789 mod = (modrm >> 6) & 3;
4790 rm = (modrm & 7) | REX_B(s);
4791 op = (modrm >> 3) & 7;
4792
4793 if (mod != 3) {
4794 if (b == 0x83)
4795 s->rip_offset = 1;
4796 else
4797 s->rip_offset = insn_const_size(ot);
4798 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4799 opreg = OR_TMP0;
4800 } else {
4801 opreg = rm;
4802 }
4803
4804 switch(b) {
4805 default:
4806 case 0x80:
4807 case 0x81:
4808 case 0x82:
4809 val = insn_get(s, ot);
4810 break;
4811 case 0x83:
4812 val = (int8_t)insn_get(s, OT_BYTE);
4813 break;
4814 }
4815 gen_op_movl_T1_im(val);
4816 gen_op(s, op, ot, opreg);
4817 }
4818 break;
4819
4820 /**************************/
4821 /* inc, dec, and other misc arith */
4822 case 0x40 ... 0x47: /* inc Gv */
4823 ot = dflag ? OT_LONG : OT_WORD;
4824 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4825 break;
4826 case 0x48 ... 0x4f: /* dec Gv */
4827 ot = dflag ? OT_LONG : OT_WORD;
4828 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4829 break;
4830 case 0xf6: /* GRP3 */
4831 case 0xf7:
4832 if ((b & 1) == 0)
4833 ot = OT_BYTE;
4834 else
4835 ot = dflag + OT_WORD;
4836
4837 modrm = ldub_code(s->pc++);
4838 mod = (modrm >> 6) & 3;
4839 rm = (modrm & 7) | REX_B(s);
4840 op = (modrm >> 3) & 7;
4841 if (mod != 3) {
4842 if (op == 0)
4843 s->rip_offset = insn_const_size(ot);
4844 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4845 gen_op_ld_T0_A0(ot + s->mem_index);
4846 } else {
4847 gen_op_mov_TN_reg(ot, 0, rm);
4848 }
4849
4850 switch(op) {
4851 case 0: /* test */
4852 val = insn_get(s, ot);
4853 gen_op_movl_T1_im(val);
4854 gen_op_testl_T0_T1_cc();
4855 s->cc_op = CC_OP_LOGICB + ot;
4856 break;
4857 case 2: /* not */
4858 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4859 if (mod != 3) {
4860 gen_op_st_T0_A0(ot + s->mem_index);
4861 } else {
4862 gen_op_mov_reg_T0(ot, rm);
4863 }
4864 break;
4865 case 3: /* neg */
4866 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4867 if (mod != 3) {
4868 gen_op_st_T0_A0(ot + s->mem_index);
4869 } else {
4870 gen_op_mov_reg_T0(ot, rm);
4871 }
4872 gen_op_update_neg_cc();
4873 s->cc_op = CC_OP_SUBB + ot;
4874 break;
4875 case 4: /* mul */
4876 switch(ot) {
4877 case OT_BYTE:
4878 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4879 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4880 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4881 /* XXX: use 32 bit mul which could be faster */
4882 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4883 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4884 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4885 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4886 s->cc_op = CC_OP_MULB;
4887 break;
4888 case OT_WORD:
4889 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4890 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4891 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4892 /* XXX: use 32 bit mul which could be faster */
4893 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4894 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4895 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4896 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4897 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4898 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4899 s->cc_op = CC_OP_MULW;
4900 break;
4901 default:
4902 case OT_LONG:
4903#ifdef TARGET_X86_64
4904 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4905 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4906 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4907 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4908 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4909 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4910 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4911 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4912 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4913#else
4914 {
4915 TCGv t0, t1;
4916 t0 = tcg_temp_new(TCG_TYPE_I64);
4917 t1 = tcg_temp_new(TCG_TYPE_I64);
4918 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4919 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4920 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4921 tcg_gen_mul_i64(t0, t0, t1);
4922 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4923 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4924 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4925 tcg_gen_shri_i64(t0, t0, 32);
4926 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4927 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4928 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4929 }
4930#endif
4931 s->cc_op = CC_OP_MULL;
4932 break;
4933#ifdef TARGET_X86_64
4934 case OT_QUAD:
4935 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
4936 s->cc_op = CC_OP_MULQ;
4937 break;
4938#endif
4939 }
4940 break;
4941 case 5: /* imul */
4942 switch(ot) {
4943 case OT_BYTE:
4944 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4945 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4946 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4947 /* XXX: use 32 bit mul which could be faster */
4948 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4949 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4950 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4951 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4952 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4953 s->cc_op = CC_OP_MULB;
4954 break;
4955 case OT_WORD:
4956 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4957 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4958 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4959 /* XXX: use 32 bit mul which could be faster */
4960 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4961 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4962 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4963 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4964 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4965 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4966 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4967 s->cc_op = CC_OP_MULW;
4968 break;
4969 default:
4970 case OT_LONG:
4971#ifdef TARGET_X86_64
4972 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4973 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4974 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4975 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4976 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4977 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4978 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4979 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4980 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4981 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4982#else
4983 {
4984 TCGv t0, t1;
4985 t0 = tcg_temp_new(TCG_TYPE_I64);
4986 t1 = tcg_temp_new(TCG_TYPE_I64);
4987 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4988 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4989 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4990 tcg_gen_mul_i64(t0, t0, t1);
4991 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4992 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4993 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4994 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4995 tcg_gen_shri_i64(t0, t0, 32);
4996 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4997 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4998 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4999 }
5000#endif
5001 s->cc_op = CC_OP_MULL;
5002 break;
5003#ifdef TARGET_X86_64
5004 case OT_QUAD:
5005 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5006 s->cc_op = CC_OP_MULQ;
5007 break;
5008#endif
5009 }
5010 break;
5011 case 6: /* div */
5012 switch(ot) {
5013 case OT_BYTE:
5014 gen_jmp_im(pc_start - s->cs_base);
5015 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5016 break;
5017 case OT_WORD:
5018 gen_jmp_im(pc_start - s->cs_base);
5019 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5020 break;
5021 default:
5022 case OT_LONG:
5023 gen_jmp_im(pc_start - s->cs_base);
5024 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5025 break;
5026#ifdef TARGET_X86_64
5027 case OT_QUAD:
5028 gen_jmp_im(pc_start - s->cs_base);
5029 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5030 break;
5031#endif
5032 }
5033 break;
5034 case 7: /* idiv */
5035 switch(ot) {
5036 case OT_BYTE:
5037 gen_jmp_im(pc_start - s->cs_base);
5038 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5039 break;
5040 case OT_WORD:
5041 gen_jmp_im(pc_start - s->cs_base);
5042 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5043 break;
5044 default:
5045 case OT_LONG:
5046 gen_jmp_im(pc_start - s->cs_base);
5047 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5048 break;
5049#ifdef TARGET_X86_64
5050 case OT_QUAD:
5051 gen_jmp_im(pc_start - s->cs_base);
5052 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5053 break;
5054#endif
5055 }
5056 break;
5057 default:
5058 goto illegal_op;
5059 }
5060 break;
5061
5062 case 0xfe: /* GRP4 */
5063 case 0xff: /* GRP5 */
5064 if ((b & 1) == 0)
5065 ot = OT_BYTE;
5066 else
5067 ot = dflag + OT_WORD;
5068
5069 modrm = ldub_code(s->pc++);
5070 mod = (modrm >> 6) & 3;
5071 rm = (modrm & 7) | REX_B(s);
5072 op = (modrm >> 3) & 7;
5073 if (op >= 2 && b == 0xfe) {
5074 goto illegal_op;
5075 }
5076 if (CODE64(s)) {
5077 if (op == 2 || op == 4) {
5078 /* operand size for jumps is 64 bit */
5079 ot = OT_QUAD;
5080 } else if (op == 3 || op == 5) {
5081 /* for call calls, the operand is 16 or 32 bit, even
5082 in long mode */
5083 ot = dflag ? OT_LONG : OT_WORD;
5084 } else if (op == 6) {
5085 /* default push size is 64 bit */
5086 ot = dflag ? OT_QUAD : OT_WORD;
5087 }
5088 }
5089 if (mod != 3) {
5090 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5091 if (op >= 2 && op != 3 && op != 5)
5092 gen_op_ld_T0_A0(ot + s->mem_index);
5093 } else {
5094 gen_op_mov_TN_reg(ot, 0, rm);
5095 }
5096
5097 switch(op) {
5098 case 0: /* inc Ev */
5099 if (mod != 3)
5100 opreg = OR_TMP0;
5101 else
5102 opreg = rm;
5103 gen_inc(s, ot, opreg, 1);
5104 break;
5105 case 1: /* dec Ev */
5106 if (mod != 3)
5107 opreg = OR_TMP0;
5108 else
5109 opreg = rm;
5110 gen_inc(s, ot, opreg, -1);
5111 break;
5112 case 2: /* call Ev */
5113 /* XXX: optimize if memory (no 'and' is necessary) */
5114#ifdef VBOX_WITH_CALL_RECORD
5115 if (s->record_call)
5116 gen_op_record_call();
5117#endif
5118 if (s->dflag == 0)
5119 gen_op_andl_T0_ffff();
5120 next_eip = s->pc - s->cs_base;
5121 gen_movtl_T1_im(next_eip);
5122 gen_push_T1(s);
5123 gen_op_jmp_T0();
5124 gen_eob(s);
5125 break;
5126 case 3: /* lcall Ev */
5127 gen_op_ld_T1_A0(ot + s->mem_index);
5128 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5129 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5130 do_lcall:
5131 if (s->pe && !s->vm86) {
5132 if (s->cc_op != CC_OP_DYNAMIC)
5133 gen_op_set_cc_op(s->cc_op);
5134 gen_jmp_im(pc_start - s->cs_base);
5135 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5136 tcg_gen_helper_0_4(helper_lcall_protected,
5137 cpu_tmp2_i32, cpu_T[1],
5138 tcg_const_i32(dflag),
5139 tcg_const_i32(s->pc - pc_start));
5140 } else {
5141 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5142 tcg_gen_helper_0_4(helper_lcall_real,
5143 cpu_tmp2_i32, cpu_T[1],
5144 tcg_const_i32(dflag),
5145 tcg_const_i32(s->pc - s->cs_base));
5146 }
5147 gen_eob(s);
5148 break;
5149 case 4: /* jmp Ev */
5150 if (s->dflag == 0)
5151 gen_op_andl_T0_ffff();
5152 gen_op_jmp_T0();
5153 gen_eob(s);
5154 break;
5155 case 5: /* ljmp Ev */
5156 gen_op_ld_T1_A0(ot + s->mem_index);
5157 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5158 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5159 do_ljmp:
5160 if (s->pe && !s->vm86) {
5161 if (s->cc_op != CC_OP_DYNAMIC)
5162 gen_op_set_cc_op(s->cc_op);
5163 gen_jmp_im(pc_start - s->cs_base);
5164 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5165 tcg_gen_helper_0_3(helper_ljmp_protected,
5166 cpu_tmp2_i32,
5167 cpu_T[1],
5168 tcg_const_i32(s->pc - pc_start));
5169 } else {
5170 gen_op_movl_seg_T0_vm(R_CS);
5171 gen_op_movl_T0_T1();
5172 gen_op_jmp_T0();
5173 }
5174 gen_eob(s);
5175 break;
5176 case 6: /* push Ev */
5177 gen_push_T0(s);
5178 break;
5179 default:
5180 goto illegal_op;
5181 }
5182 break;
5183
5184 case 0x84: /* test Ev, Gv */
5185 case 0x85:
5186 if ((b & 1) == 0)
5187 ot = OT_BYTE;
5188 else
5189 ot = dflag + OT_WORD;
5190
5191 modrm = ldub_code(s->pc++);
5192 mod = (modrm >> 6) & 3;
5193 rm = (modrm & 7) | REX_B(s);
5194 reg = ((modrm >> 3) & 7) | rex_r;
5195
5196 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5197 gen_op_mov_TN_reg(ot, 1, reg);
5198 gen_op_testl_T0_T1_cc();
5199 s->cc_op = CC_OP_LOGICB + ot;
5200 break;
5201
5202 case 0xa8: /* test eAX, Iv */
5203 case 0xa9:
5204 if ((b & 1) == 0)
5205 ot = OT_BYTE;
5206 else
5207 ot = dflag + OT_WORD;
5208 val = insn_get(s, ot);
5209
5210 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5211 gen_op_movl_T1_im(val);
5212 gen_op_testl_T0_T1_cc();
5213 s->cc_op = CC_OP_LOGICB + ot;
5214 break;
5215
5216 case 0x98: /* CWDE/CBW */
5217#ifdef TARGET_X86_64
5218 if (dflag == 2) {
5219 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5220 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5221 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5222 } else
5223#endif
5224 if (dflag == 1) {
5225 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5226 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5227 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5228 } else {
5229 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5230 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5231 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5232 }
5233 break;
5234 case 0x99: /* CDQ/CWD */
5235#ifdef TARGET_X86_64
5236 if (dflag == 2) {
5237 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5238 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5239 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5240 } else
5241#endif
5242 if (dflag == 1) {
5243 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5244 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5245 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5246 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5247 } else {
5248 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5249 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5250 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5251 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5252 }
5253 break;
5254 case 0x1af: /* imul Gv, Ev */
5255 case 0x69: /* imul Gv, Ev, I */
5256 case 0x6b:
5257 ot = dflag + OT_WORD;
5258 modrm = ldub_code(s->pc++);
5259 reg = ((modrm >> 3) & 7) | rex_r;
5260 if (b == 0x69)
5261 s->rip_offset = insn_const_size(ot);
5262 else if (b == 0x6b)
5263 s->rip_offset = 1;
5264 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5265 if (b == 0x69) {
5266 val = insn_get(s, ot);
5267 gen_op_movl_T1_im(val);
5268 } else if (b == 0x6b) {
5269 val = (int8_t)insn_get(s, OT_BYTE);
5270 gen_op_movl_T1_im(val);
5271 } else {
5272 gen_op_mov_TN_reg(ot, 1, reg);
5273 }
5274
5275#ifdef TARGET_X86_64
5276 if (ot == OT_QUAD) {
5277 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5278 } else
5279#endif
5280 if (ot == OT_LONG) {
5281#ifdef TARGET_X86_64
5282 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5283 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5284 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5285 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5286 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5287 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5288#else
5289 {
5290 TCGv t0, t1;
5291 t0 = tcg_temp_new(TCG_TYPE_I64);
5292 t1 = tcg_temp_new(TCG_TYPE_I64);
5293 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5294 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5295 tcg_gen_mul_i64(t0, t0, t1);
5296 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5297 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5298 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5299 tcg_gen_shri_i64(t0, t0, 32);
5300 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5301 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5302 }
5303#endif
5304 } else {
5305 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5306 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5307 /* XXX: use 32 bit mul which could be faster */
5308 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5309 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5310 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5311 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5312 }
5313 gen_op_mov_reg_T0(ot, reg);
5314 s->cc_op = CC_OP_MULB + ot;
5315 break;
5316 case 0x1c0:
5317 case 0x1c1: /* xadd Ev, Gv */
5318 if ((b & 1) == 0)
5319 ot = OT_BYTE;
5320 else
5321 ot = dflag + OT_WORD;
5322 modrm = ldub_code(s->pc++);
5323 reg = ((modrm >> 3) & 7) | rex_r;
5324 mod = (modrm >> 6) & 3;
5325 if (mod == 3) {
5326 rm = (modrm & 7) | REX_B(s);
5327 gen_op_mov_TN_reg(ot, 0, reg);
5328 gen_op_mov_TN_reg(ot, 1, rm);
5329 gen_op_addl_T0_T1();
5330 gen_op_mov_reg_T1(ot, reg);
5331 gen_op_mov_reg_T0(ot, rm);
5332 } else {
5333 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5334 gen_op_mov_TN_reg(ot, 0, reg);
5335 gen_op_ld_T1_A0(ot + s->mem_index);
5336 gen_op_addl_T0_T1();
5337 gen_op_st_T0_A0(ot + s->mem_index);
5338 gen_op_mov_reg_T1(ot, reg);
5339 }
5340 gen_op_update2_cc();
5341 s->cc_op = CC_OP_ADDB + ot;
5342 break;
5343 case 0x1b0:
5344 case 0x1b1: /* cmpxchg Ev, Gv */
5345 {
5346 int label1, label2;
5347 TCGv t0, t1, t2, a0;
5348
5349 if ((b & 1) == 0)
5350 ot = OT_BYTE;
5351 else
5352 ot = dflag + OT_WORD;
5353 modrm = ldub_code(s->pc++);
5354 reg = ((modrm >> 3) & 7) | rex_r;
5355 mod = (modrm >> 6) & 3;
5356 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5357 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5358 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5359 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5360 gen_op_mov_v_reg(ot, t1, reg);
5361 if (mod == 3) {
5362 rm = (modrm & 7) | REX_B(s);
5363 gen_op_mov_v_reg(ot, t0, rm);
5364 } else {
5365 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5366 tcg_gen_mov_tl(a0, cpu_A0);
5367 gen_op_ld_v(ot + s->mem_index, t0, a0);
5368 rm = 0; /* avoid warning */
5369 }
5370 label1 = gen_new_label();
5371 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5372 tcg_gen_sub_tl(t2, t2, t0);
5373 gen_extu(ot, t2);
5374 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5375 if (mod == 3) {
5376 label2 = gen_new_label();
5377 gen_op_mov_reg_v(ot, R_EAX, t0);
5378 tcg_gen_br(label2);
5379 gen_set_label(label1);
5380 gen_op_mov_reg_v(ot, rm, t1);
5381 gen_set_label(label2);
5382 } else {
5383 tcg_gen_mov_tl(t1, t0);
5384 gen_op_mov_reg_v(ot, R_EAX, t0);
5385 gen_set_label(label1);
5386 /* always store */
5387 gen_op_st_v(ot + s->mem_index, t1, a0);
5388 }
5389 tcg_gen_mov_tl(cpu_cc_src, t0);
5390 tcg_gen_mov_tl(cpu_cc_dst, t2);
5391 s->cc_op = CC_OP_SUBB + ot;
5392 tcg_temp_free(t0);
5393 tcg_temp_free(t1);
5394 tcg_temp_free(t2);
5395 tcg_temp_free(a0);
5396 }
5397 break;
5398 case 0x1c7: /* cmpxchg8b */
5399 modrm = ldub_code(s->pc++);
5400 mod = (modrm >> 6) & 3;
5401 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5402 goto illegal_op;
5403#ifdef TARGET_X86_64
5404 if (dflag == 2) {
5405 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5406 goto illegal_op;
5407 gen_jmp_im(pc_start - s->cs_base);
5408 if (s->cc_op != CC_OP_DYNAMIC)
5409 gen_op_set_cc_op(s->cc_op);
5410 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5411 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5412 } else
5413#endif
5414 {
5415 if (!(s->cpuid_features & CPUID_CX8))
5416 goto illegal_op;
5417 gen_jmp_im(pc_start - s->cs_base);
5418 if (s->cc_op != CC_OP_DYNAMIC)
5419 gen_op_set_cc_op(s->cc_op);
5420 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5421 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5422 }
5423 s->cc_op = CC_OP_EFLAGS;
5424 break;
5425
5426 /**************************/
5427 /* push/pop */
5428 case 0x50 ... 0x57: /* push */
5429 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5430 gen_push_T0(s);
5431 break;
5432 case 0x58 ... 0x5f: /* pop */
5433 if (CODE64(s)) {
5434 ot = dflag ? OT_QUAD : OT_WORD;
5435 } else {
5436 ot = dflag + OT_WORD;
5437 }
5438 gen_pop_T0(s);
5439 /* NOTE: order is important for pop %sp */
5440 gen_pop_update(s);
5441 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5442 break;
5443 case 0x60: /* pusha */
5444 if (CODE64(s))
5445 goto illegal_op;
5446 gen_pusha(s);
5447 break;
5448 case 0x61: /* popa */
5449 if (CODE64(s))
5450 goto illegal_op;
5451 gen_popa(s);
5452 break;
5453 case 0x68: /* push Iv */
5454 case 0x6a:
5455 if (CODE64(s)) {
5456 ot = dflag ? OT_QUAD : OT_WORD;
5457 } else {
5458 ot = dflag + OT_WORD;
5459 }
5460 if (b == 0x68)
5461 val = insn_get(s, ot);
5462 else
5463 val = (int8_t)insn_get(s, OT_BYTE);
5464 gen_op_movl_T0_im(val);
5465 gen_push_T0(s);
5466 break;
5467 case 0x8f: /* pop Ev */
5468 if (CODE64(s)) {
5469 ot = dflag ? OT_QUAD : OT_WORD;
5470 } else {
5471 ot = dflag + OT_WORD;
5472 }
5473 modrm = ldub_code(s->pc++);
5474 mod = (modrm >> 6) & 3;
5475 gen_pop_T0(s);
5476 if (mod == 3) {
5477 /* NOTE: order is important for pop %sp */
5478 gen_pop_update(s);
5479 rm = (modrm & 7) | REX_B(s);
5480 gen_op_mov_reg_T0(ot, rm);
5481 } else {
5482 /* NOTE: order is important too for MMU exceptions */
5483 s->popl_esp_hack = 1 << ot;
5484 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5485 s->popl_esp_hack = 0;
5486 gen_pop_update(s);
5487 }
5488 break;
5489 case 0xc8: /* enter */
5490 {
5491 int level;
5492 val = lduw_code(s->pc);
5493 s->pc += 2;
5494 level = ldub_code(s->pc++);
5495 gen_enter(s, val, level);
5496 }
5497 break;
5498 case 0xc9: /* leave */
5499 /* XXX: exception not precise (ESP is updated before potential exception) */
5500 if (CODE64(s)) {
5501 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5502 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5503 } else if (s->ss32) {
5504 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5505 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5506 } else {
5507 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5508 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5509 }
5510 gen_pop_T0(s);
5511 if (CODE64(s)) {
5512 ot = dflag ? OT_QUAD : OT_WORD;
5513 } else {
5514 ot = dflag + OT_WORD;
5515 }
5516 gen_op_mov_reg_T0(ot, R_EBP);
5517 gen_pop_update(s);
5518 break;
5519 case 0x06: /* push es */
5520 case 0x0e: /* push cs */
5521 case 0x16: /* push ss */
5522 case 0x1e: /* push ds */
5523 if (CODE64(s))
5524 goto illegal_op;
5525 gen_op_movl_T0_seg(b >> 3);
5526 gen_push_T0(s);
5527 break;
5528 case 0x1a0: /* push fs */
5529 case 0x1a8: /* push gs */
5530 gen_op_movl_T0_seg((b >> 3) & 7);
5531 gen_push_T0(s);
5532 break;
5533 case 0x07: /* pop es */
5534 case 0x17: /* pop ss */
5535 case 0x1f: /* pop ds */
5536 if (CODE64(s))
5537 goto illegal_op;
5538 reg = b >> 3;
5539 gen_pop_T0(s);
5540 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5541 gen_pop_update(s);
5542 if (reg == R_SS) {
5543 /* if reg == SS, inhibit interrupts/trace. */
5544 /* If several instructions disable interrupts, only the
5545 _first_ does it */
5546 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5547 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5548 s->tf = 0;
5549 }
5550 if (s->is_jmp) {
5551 gen_jmp_im(s->pc - s->cs_base);
5552 gen_eob(s);
5553 }
5554 break;
5555 case 0x1a1: /* pop fs */
5556 case 0x1a9: /* pop gs */
5557 gen_pop_T0(s);
5558 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5559 gen_pop_update(s);
5560 if (s->is_jmp) {
5561 gen_jmp_im(s->pc - s->cs_base);
5562 gen_eob(s);
5563 }
5564 break;
5565
5566 /**************************/
5567 /* mov */
5568 case 0x88:
5569 case 0x89: /* mov Gv, Ev */
5570 if ((b & 1) == 0)
5571 ot = OT_BYTE;
5572 else
5573 ot = dflag + OT_WORD;
5574 modrm = ldub_code(s->pc++);
5575 reg = ((modrm >> 3) & 7) | rex_r;
5576
5577 /* generate a generic store */
5578 gen_ldst_modrm(s, modrm, ot, reg, 1);
5579 break;
5580 case 0xc6:
5581 case 0xc7: /* mov Ev, Iv */
5582 if ((b & 1) == 0)
5583 ot = OT_BYTE;
5584 else
5585 ot = dflag + OT_WORD;
5586 modrm = ldub_code(s->pc++);
5587 mod = (modrm >> 6) & 3;
5588 if (mod != 3) {
5589 s->rip_offset = insn_const_size(ot);
5590 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5591 }
5592 val = insn_get(s, ot);
5593 gen_op_movl_T0_im(val);
5594 if (mod != 3)
5595 gen_op_st_T0_A0(ot + s->mem_index);
5596 else
5597 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5598 break;
5599 case 0x8a:
5600 case 0x8b: /* mov Ev, Gv */
5601#ifdef VBOX /* dtrace hot fix */
5602 if (prefixes & PREFIX_LOCK)
5603 goto illegal_op;
5604#endif
5605 if ((b & 1) == 0)
5606 ot = OT_BYTE;
5607 else
5608 ot = OT_WORD + dflag;
5609 modrm = ldub_code(s->pc++);
5610 reg = ((modrm >> 3) & 7) | rex_r;
5611
5612 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5613 gen_op_mov_reg_T0(ot, reg);
5614 break;
5615 case 0x8e: /* mov seg, Gv */
5616 modrm = ldub_code(s->pc++);
5617 reg = (modrm >> 3) & 7;
5618 if (reg >= 6 || reg == R_CS)
5619 goto illegal_op;
5620 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5621 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5622 if (reg == R_SS) {
5623 /* if reg == SS, inhibit interrupts/trace */
5624 /* If several instructions disable interrupts, only the
5625 _first_ does it */
5626 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5627 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5628 s->tf = 0;
5629 }
5630 if (s->is_jmp) {
5631 gen_jmp_im(s->pc - s->cs_base);
5632 gen_eob(s);
5633 }
5634 break;
5635 case 0x8c: /* mov Gv, seg */
5636 modrm = ldub_code(s->pc++);
5637 reg = (modrm >> 3) & 7;
5638 mod = (modrm >> 6) & 3;
5639 if (reg >= 6)
5640 goto illegal_op;
5641 gen_op_movl_T0_seg(reg);
5642 if (mod == 3)
5643 ot = OT_WORD + dflag;
5644 else
5645 ot = OT_WORD;
5646 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5647 break;
5648
5649 case 0x1b6: /* movzbS Gv, Eb */
5650 case 0x1b7: /* movzwS Gv, Eb */
5651 case 0x1be: /* movsbS Gv, Eb */
5652 case 0x1bf: /* movswS Gv, Eb */
5653 {
5654 int d_ot;
5655 /* d_ot is the size of destination */
5656 d_ot = dflag + OT_WORD;
5657 /* ot is the size of source */
5658 ot = (b & 1) + OT_BYTE;
5659 modrm = ldub_code(s->pc++);
5660 reg = ((modrm >> 3) & 7) | rex_r;
5661 mod = (modrm >> 6) & 3;
5662 rm = (modrm & 7) | REX_B(s);
5663
5664 if (mod == 3) {
5665 gen_op_mov_TN_reg(ot, 0, rm);
5666 switch(ot | (b & 8)) {
5667 case OT_BYTE:
5668 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5669 break;
5670 case OT_BYTE | 8:
5671 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5672 break;
5673 case OT_WORD:
5674 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5675 break;
5676 default:
5677 case OT_WORD | 8:
5678 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5679 break;
5680 }
5681 gen_op_mov_reg_T0(d_ot, reg);
5682 } else {
5683 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5684 if (b & 8) {
5685 gen_op_lds_T0_A0(ot + s->mem_index);
5686 } else {
5687 gen_op_ldu_T0_A0(ot + s->mem_index);
5688 }
5689 gen_op_mov_reg_T0(d_ot, reg);
5690 }
5691 }
5692 break;
5693
5694 case 0x8d: /* lea */
5695 ot = dflag + OT_WORD;
5696 modrm = ldub_code(s->pc++);
5697 mod = (modrm >> 6) & 3;
5698 if (mod == 3)
5699 goto illegal_op;
5700 reg = ((modrm >> 3) & 7) | rex_r;
5701 /* we must ensure that no segment is added */
5702 s->override = -1;
5703 val = s->addseg;
5704 s->addseg = 0;
5705 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5706 s->addseg = val;
5707 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5708 break;
5709
5710 case 0xa0: /* mov EAX, Ov */
5711 case 0xa1:
5712 case 0xa2: /* mov Ov, EAX */
5713 case 0xa3:
5714 {
5715 target_ulong offset_addr;
5716
5717 if ((b & 1) == 0)
5718 ot = OT_BYTE;
5719 else
5720 ot = dflag + OT_WORD;
5721#ifdef TARGET_X86_64
5722 if (s->aflag == 2) {
5723 offset_addr = ldq_code(s->pc);
5724 s->pc += 8;
5725 gen_op_movq_A0_im(offset_addr);
5726 } else
5727#endif
5728 {
5729 if (s->aflag) {
5730 offset_addr = insn_get(s, OT_LONG);
5731 } else {
5732 offset_addr = insn_get(s, OT_WORD);
5733 }
5734 gen_op_movl_A0_im(offset_addr);
5735 }
5736 gen_add_A0_ds_seg(s);
5737 if ((b & 2) == 0) {
5738 gen_op_ld_T0_A0(ot + s->mem_index);
5739 gen_op_mov_reg_T0(ot, R_EAX);
5740 } else {
5741 gen_op_mov_TN_reg(ot, 0, R_EAX);
5742 gen_op_st_T0_A0(ot + s->mem_index);
5743 }
5744 }
5745 break;
5746 case 0xd7: /* xlat */
5747#ifdef TARGET_X86_64
5748 if (s->aflag == 2) {
5749 gen_op_movq_A0_reg(R_EBX);
5750 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5751 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5752 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5753 } else
5754#endif
5755 {
5756 gen_op_movl_A0_reg(R_EBX);
5757 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5758 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5759 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5760 if (s->aflag == 0)
5761 gen_op_andl_A0_ffff();
5762 else
5763 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5764 }
5765 gen_add_A0_ds_seg(s);
5766 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5767 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5768 break;
5769 case 0xb0 ... 0xb7: /* mov R, Ib */
5770 val = insn_get(s, OT_BYTE);
5771 gen_op_movl_T0_im(val);
5772 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5773 break;
5774 case 0xb8 ... 0xbf: /* mov R, Iv */
5775#ifdef TARGET_X86_64
5776 if (dflag == 2) {
5777 uint64_t tmp;
5778 /* 64 bit case */
5779 tmp = ldq_code(s->pc);
5780 s->pc += 8;
5781 reg = (b & 7) | REX_B(s);
5782 gen_movtl_T0_im(tmp);
5783 gen_op_mov_reg_T0(OT_QUAD, reg);
5784 } else
5785#endif
5786 {
5787 ot = dflag ? OT_LONG : OT_WORD;
5788 val = insn_get(s, ot);
5789 reg = (b & 7) | REX_B(s);
5790 gen_op_movl_T0_im(val);
5791 gen_op_mov_reg_T0(ot, reg);
5792 }
5793 break;
5794
5795 case 0x91 ... 0x97: /* xchg R, EAX */
5796 ot = dflag + OT_WORD;
5797 reg = (b & 7) | REX_B(s);
5798 rm = R_EAX;
5799 goto do_xchg_reg;
5800 case 0x86:
5801 case 0x87: /* xchg Ev, Gv */
5802 if ((b & 1) == 0)
5803 ot = OT_BYTE;
5804 else
5805 ot = dflag + OT_WORD;
5806 modrm = ldub_code(s->pc++);
5807 reg = ((modrm >> 3) & 7) | rex_r;
5808 mod = (modrm >> 6) & 3;
5809 if (mod == 3) {
5810 rm = (modrm & 7) | REX_B(s);
5811 do_xchg_reg:
5812 gen_op_mov_TN_reg(ot, 0, reg);
5813 gen_op_mov_TN_reg(ot, 1, rm);
5814 gen_op_mov_reg_T0(ot, rm);
5815 gen_op_mov_reg_T1(ot, reg);
5816 } else {
5817 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5818 gen_op_mov_TN_reg(ot, 0, reg);
5819 /* for xchg, lock is implicit */
5820 if (!(prefixes & PREFIX_LOCK))
5821 tcg_gen_helper_0_0(helper_lock);
5822 gen_op_ld_T1_A0(ot + s->mem_index);
5823 gen_op_st_T0_A0(ot + s->mem_index);
5824 if (!(prefixes & PREFIX_LOCK))
5825 tcg_gen_helper_0_0(helper_unlock);
5826 gen_op_mov_reg_T1(ot, reg);
5827 }
5828 break;
5829 case 0xc4: /* les Gv */
5830 if (CODE64(s))
5831 goto illegal_op;
5832 op = R_ES;
5833 goto do_lxx;
5834 case 0xc5: /* lds Gv */
5835 if (CODE64(s))
5836 goto illegal_op;
5837 op = R_DS;
5838 goto do_lxx;
5839 case 0x1b2: /* lss Gv */
5840 op = R_SS;
5841 goto do_lxx;
5842 case 0x1b4: /* lfs Gv */
5843 op = R_FS;
5844 goto do_lxx;
5845 case 0x1b5: /* lgs Gv */
5846 op = R_GS;
5847 do_lxx:
5848 ot = dflag ? OT_LONG : OT_WORD;
5849 modrm = ldub_code(s->pc++);
5850 reg = ((modrm >> 3) & 7) | rex_r;
5851 mod = (modrm >> 6) & 3;
5852 if (mod == 3)
5853 goto illegal_op;
5854 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5855 gen_op_ld_T1_A0(ot + s->mem_index);
5856 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5857 /* load the segment first to handle exceptions properly */
5858 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5859 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5860 /* then put the data */
5861 gen_op_mov_reg_T1(ot, reg);
5862 if (s->is_jmp) {
5863 gen_jmp_im(s->pc - s->cs_base);
5864 gen_eob(s);
5865 }
5866 break;
5867
5868 /************************/
5869 /* shifts */
5870 case 0xc0:
5871 case 0xc1:
5872 /* shift Ev,Ib */
5873 shift = 2;
5874 grp2:
5875 {
5876 if ((b & 1) == 0)
5877 ot = OT_BYTE;
5878 else
5879 ot = dflag + OT_WORD;
5880
5881 modrm = ldub_code(s->pc++);
5882 mod = (modrm >> 6) & 3;
5883 op = (modrm >> 3) & 7;
5884
5885 if (mod != 3) {
5886 if (shift == 2) {
5887 s->rip_offset = 1;
5888 }
5889 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5890 opreg = OR_TMP0;
5891 } else {
5892 opreg = (modrm & 7) | REX_B(s);
5893 }
5894
5895 /* simpler op */
5896 if (shift == 0) {
5897 gen_shift(s, op, ot, opreg, OR_ECX);
5898 } else {
5899 if (shift == 2) {
5900 shift = ldub_code(s->pc++);
5901 }
5902 gen_shifti(s, op, ot, opreg, shift);
5903 }
5904 }
5905 break;
5906 case 0xd0:
5907 case 0xd1:
5908 /* shift Ev,1 */
5909 shift = 1;
5910 goto grp2;
5911 case 0xd2:
5912 case 0xd3:
5913 /* shift Ev,cl */
5914 shift = 0;
5915 goto grp2;
5916
5917 case 0x1a4: /* shld imm */
5918 op = 0;
5919 shift = 1;
5920 goto do_shiftd;
5921 case 0x1a5: /* shld cl */
5922 op = 0;
5923 shift = 0;
5924 goto do_shiftd;
5925 case 0x1ac: /* shrd imm */
5926 op = 1;
5927 shift = 1;
5928 goto do_shiftd;
5929 case 0x1ad: /* shrd cl */
5930 op = 1;
5931 shift = 0;
5932 do_shiftd:
5933 ot = dflag + OT_WORD;
5934 modrm = ldub_code(s->pc++);
5935 mod = (modrm >> 6) & 3;
5936 rm = (modrm & 7) | REX_B(s);
5937 reg = ((modrm >> 3) & 7) | rex_r;
5938 if (mod != 3) {
5939 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5940 opreg = OR_TMP0;
5941 } else {
5942 opreg = rm;
5943 }
5944 gen_op_mov_TN_reg(ot, 1, reg);
5945
5946 if (shift) {
5947 val = ldub_code(s->pc++);
5948 tcg_gen_movi_tl(cpu_T3, val);
5949 } else {
5950 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
5951 }
5952 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5953 break;
5954
5955 /************************/
5956 /* floats */
5957 case 0xd8 ... 0xdf:
5958 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5959 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5960 /* XXX: what to do if illegal op ? */
5961 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5962 break;
5963 }
5964 modrm = ldub_code(s->pc++);
5965 mod = (modrm >> 6) & 3;
5966 rm = modrm & 7;
5967 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5968 if (mod != 3) {
5969 /* memory op */
5970 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5971 switch(op) {
5972 case 0x00 ... 0x07: /* fxxxs */
5973 case 0x10 ... 0x17: /* fixxxl */
5974 case 0x20 ... 0x27: /* fxxxl */
5975 case 0x30 ... 0x37: /* fixxx */
5976 {
5977 int op1;
5978 op1 = op & 7;
5979
5980 switch(op >> 4) {
5981 case 0:
5982 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5983 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5984 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
5985 break;
5986 case 1:
5987 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5988 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5989 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5990 break;
5991 case 2:
5992 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5993 (s->mem_index >> 2) - 1);
5994 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
5995 break;
5996 case 3:
5997 default:
5998 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5999 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6000 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6001 break;
6002 }
6003
6004 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6005 if (op1 == 3) {
6006 /* fcomp needs pop */
6007 tcg_gen_helper_0_0(helper_fpop);
6008 }
6009 }
6010 break;
6011 case 0x08: /* flds */
6012 case 0x0a: /* fsts */
6013 case 0x0b: /* fstps */
6014 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6015 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6016 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6017 switch(op & 7) {
6018 case 0:
6019 switch(op >> 4) {
6020 case 0:
6021 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6022 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6023 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6024 break;
6025 case 1:
6026 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6027 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6028 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6029 break;
6030 case 2:
6031 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6032 (s->mem_index >> 2) - 1);
6033 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6034 break;
6035 case 3:
6036 default:
6037 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6038 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6039 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6040 break;
6041 }
6042 break;
6043 case 1:
6044 /* XXX: the corresponding CPUID bit must be tested ! */
6045 switch(op >> 4) {
6046 case 1:
6047 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6048 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6049 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6050 break;
6051 case 2:
6052 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6053 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6054 (s->mem_index >> 2) - 1);
6055 break;
6056 case 3:
6057 default:
6058 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6059 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6060 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6061 break;
6062 }
6063 tcg_gen_helper_0_0(helper_fpop);
6064 break;
6065 default:
6066 switch(op >> 4) {
6067 case 0:
6068 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6069 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6070 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6071 break;
6072 case 1:
6073 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6074 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6075 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6076 break;
6077 case 2:
6078 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6079 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6080 (s->mem_index >> 2) - 1);
6081 break;
6082 case 3:
6083 default:
6084 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6085 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6086 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6087 break;
6088 }
6089 if ((op & 7) == 3)
6090 tcg_gen_helper_0_0(helper_fpop);
6091 break;
6092 }
6093 break;
6094 case 0x0c: /* fldenv mem */
6095 if (s->cc_op != CC_OP_DYNAMIC)
6096 gen_op_set_cc_op(s->cc_op);
6097 gen_jmp_im(pc_start - s->cs_base);
6098 tcg_gen_helper_0_2(helper_fldenv,
6099 cpu_A0, tcg_const_i32(s->dflag));
6100 break;
6101 case 0x0d: /* fldcw mem */
6102 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6103 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6104 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6105 break;
6106 case 0x0e: /* fnstenv mem */
6107 if (s->cc_op != CC_OP_DYNAMIC)
6108 gen_op_set_cc_op(s->cc_op);
6109 gen_jmp_im(pc_start - s->cs_base);
6110 tcg_gen_helper_0_2(helper_fstenv,
6111 cpu_A0, tcg_const_i32(s->dflag));
6112 break;
6113 case 0x0f: /* fnstcw mem */
6114 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6115 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6116 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6117 break;
6118 case 0x1d: /* fldt mem */
6119 if (s->cc_op != CC_OP_DYNAMIC)
6120 gen_op_set_cc_op(s->cc_op);
6121 gen_jmp_im(pc_start - s->cs_base);
6122 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6123 break;
6124 case 0x1f: /* fstpt mem */
6125 if (s->cc_op != CC_OP_DYNAMIC)
6126 gen_op_set_cc_op(s->cc_op);
6127 gen_jmp_im(pc_start - s->cs_base);
6128 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6129 tcg_gen_helper_0_0(helper_fpop);
6130 break;
6131 case 0x2c: /* frstor mem */
6132 if (s->cc_op != CC_OP_DYNAMIC)
6133 gen_op_set_cc_op(s->cc_op);
6134 gen_jmp_im(pc_start - s->cs_base);
6135 tcg_gen_helper_0_2(helper_frstor,
6136 cpu_A0, tcg_const_i32(s->dflag));
6137 break;
6138 case 0x2e: /* fnsave mem */
6139 if (s->cc_op != CC_OP_DYNAMIC)
6140 gen_op_set_cc_op(s->cc_op);
6141 gen_jmp_im(pc_start - s->cs_base);
6142 tcg_gen_helper_0_2(helper_fsave,
6143 cpu_A0, tcg_const_i32(s->dflag));
6144 break;
6145 case 0x2f: /* fnstsw mem */
6146 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6147 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6148 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6149 break;
6150 case 0x3c: /* fbld */
6151 if (s->cc_op != CC_OP_DYNAMIC)
6152 gen_op_set_cc_op(s->cc_op);
6153 gen_jmp_im(pc_start - s->cs_base);
6154 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6155 break;
6156 case 0x3e: /* fbstp */
6157 if (s->cc_op != CC_OP_DYNAMIC)
6158 gen_op_set_cc_op(s->cc_op);
6159 gen_jmp_im(pc_start - s->cs_base);
6160 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6161 tcg_gen_helper_0_0(helper_fpop);
6162 break;
6163 case 0x3d: /* fildll */
6164 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6165 (s->mem_index >> 2) - 1);
6166 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6167 break;
6168 case 0x3f: /* fistpll */
6169 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6170 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6171 (s->mem_index >> 2) - 1);
6172 tcg_gen_helper_0_0(helper_fpop);
6173 break;
6174 default:
6175 goto illegal_op;
6176 }
6177 } else {
6178 /* register float ops */
6179 opreg = rm;
6180
6181 switch(op) {
6182 case 0x08: /* fld sti */
6183 tcg_gen_helper_0_0(helper_fpush);
6184 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6185 break;
6186 case 0x09: /* fxchg sti */
6187 case 0x29: /* fxchg4 sti, undocumented op */
6188 case 0x39: /* fxchg7 sti, undocumented op */
6189 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6190 break;
6191 case 0x0a: /* grp d9/2 */
6192 switch(rm) {
6193 case 0: /* fnop */
6194 /* check exceptions (FreeBSD FPU probe) */
6195 if (s->cc_op != CC_OP_DYNAMIC)
6196 gen_op_set_cc_op(s->cc_op);
6197 gen_jmp_im(pc_start - s->cs_base);
6198 tcg_gen_helper_0_0(helper_fwait);
6199 break;
6200 default:
6201 goto illegal_op;
6202 }
6203 break;
6204 case 0x0c: /* grp d9/4 */
6205 switch(rm) {
6206 case 0: /* fchs */
6207 tcg_gen_helper_0_0(helper_fchs_ST0);
6208 break;
6209 case 1: /* fabs */
6210 tcg_gen_helper_0_0(helper_fabs_ST0);
6211 break;
6212 case 4: /* ftst */
6213 tcg_gen_helper_0_0(helper_fldz_FT0);
6214 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6215 break;
6216 case 5: /* fxam */
6217 tcg_gen_helper_0_0(helper_fxam_ST0);
6218 break;
6219 default:
6220 goto illegal_op;
6221 }
6222 break;
6223 case 0x0d: /* grp d9/5 */
6224 {
6225 switch(rm) {
6226 case 0:
6227 tcg_gen_helper_0_0(helper_fpush);
6228 tcg_gen_helper_0_0(helper_fld1_ST0);
6229 break;
6230 case 1:
6231 tcg_gen_helper_0_0(helper_fpush);
6232 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6233 break;
6234 case 2:
6235 tcg_gen_helper_0_0(helper_fpush);
6236 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6237 break;
6238 case 3:
6239 tcg_gen_helper_0_0(helper_fpush);
6240 tcg_gen_helper_0_0(helper_fldpi_ST0);
6241 break;
6242 case 4:
6243 tcg_gen_helper_0_0(helper_fpush);
6244 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6245 break;
6246 case 5:
6247 tcg_gen_helper_0_0(helper_fpush);
6248 tcg_gen_helper_0_0(helper_fldln2_ST0);
6249 break;
6250 case 6:
6251 tcg_gen_helper_0_0(helper_fpush);
6252 tcg_gen_helper_0_0(helper_fldz_ST0);
6253 break;
6254 default:
6255 goto illegal_op;
6256 }
6257 }
6258 break;
6259 case 0x0e: /* grp d9/6 */
6260 switch(rm) {
6261 case 0: /* f2xm1 */
6262 tcg_gen_helper_0_0(helper_f2xm1);
6263 break;
6264 case 1: /* fyl2x */
6265 tcg_gen_helper_0_0(helper_fyl2x);
6266 break;
6267 case 2: /* fptan */
6268 tcg_gen_helper_0_0(helper_fptan);
6269 break;
6270 case 3: /* fpatan */
6271 tcg_gen_helper_0_0(helper_fpatan);
6272 break;
6273 case 4: /* fxtract */
6274 tcg_gen_helper_0_0(helper_fxtract);
6275 break;
6276 case 5: /* fprem1 */
6277 tcg_gen_helper_0_0(helper_fprem1);
6278 break;
6279 case 6: /* fdecstp */
6280 tcg_gen_helper_0_0(helper_fdecstp);
6281 break;
6282 default:
6283 case 7: /* fincstp */
6284 tcg_gen_helper_0_0(helper_fincstp);
6285 break;
6286 }
6287 break;
6288 case 0x0f: /* grp d9/7 */
6289 switch(rm) {
6290 case 0: /* fprem */
6291 tcg_gen_helper_0_0(helper_fprem);
6292 break;
6293 case 1: /* fyl2xp1 */
6294 tcg_gen_helper_0_0(helper_fyl2xp1);
6295 break;
6296 case 2: /* fsqrt */
6297 tcg_gen_helper_0_0(helper_fsqrt);
6298 break;
6299 case 3: /* fsincos */
6300 tcg_gen_helper_0_0(helper_fsincos);
6301 break;
6302 case 5: /* fscale */
6303 tcg_gen_helper_0_0(helper_fscale);
6304 break;
6305 case 4: /* frndint */
6306 tcg_gen_helper_0_0(helper_frndint);
6307 break;
6308 case 6: /* fsin */
6309 tcg_gen_helper_0_0(helper_fsin);
6310 break;
6311 default:
6312 case 7: /* fcos */
6313 tcg_gen_helper_0_0(helper_fcos);
6314 break;
6315 }
6316 break;
6317 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6318 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6319 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6320 {
6321 int op1;
6322
6323 op1 = op & 7;
6324 if (op >= 0x20) {
6325 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6326 if (op >= 0x30)
6327 tcg_gen_helper_0_0(helper_fpop);
6328 } else {
6329 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6330 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6331 }
6332 }
6333 break;
6334 case 0x02: /* fcom */
6335 case 0x22: /* fcom2, undocumented op */
6336 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6337 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6338 break;
6339 case 0x03: /* fcomp */
6340 case 0x23: /* fcomp3, undocumented op */
6341 case 0x32: /* fcomp5, undocumented op */
6342 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6343 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6344 tcg_gen_helper_0_0(helper_fpop);
6345 break;
6346 case 0x15: /* da/5 */
6347 switch(rm) {
6348 case 1: /* fucompp */
6349 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6350 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6351 tcg_gen_helper_0_0(helper_fpop);
6352 tcg_gen_helper_0_0(helper_fpop);
6353 break;
6354 default:
6355 goto illegal_op;
6356 }
6357 break;
6358 case 0x1c:
6359 switch(rm) {
6360 case 0: /* feni (287 only, just do nop here) */
6361 break;
6362 case 1: /* fdisi (287 only, just do nop here) */
6363 break;
6364 case 2: /* fclex */
6365 tcg_gen_helper_0_0(helper_fclex);
6366 break;
6367 case 3: /* fninit */
6368 tcg_gen_helper_0_0(helper_fninit);
6369 break;
6370 case 4: /* fsetpm (287 only, just do nop here) */
6371 break;
6372 default:
6373 goto illegal_op;
6374 }
6375 break;
6376 case 0x1d: /* fucomi */
6377 if (s->cc_op != CC_OP_DYNAMIC)
6378 gen_op_set_cc_op(s->cc_op);
6379 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6380 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6381 s->cc_op = CC_OP_EFLAGS;
6382 break;
6383 case 0x1e: /* fcomi */
6384 if (s->cc_op != CC_OP_DYNAMIC)
6385 gen_op_set_cc_op(s->cc_op);
6386 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6387 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6388 s->cc_op = CC_OP_EFLAGS;
6389 break;
6390 case 0x28: /* ffree sti */
6391 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6392 break;
6393 case 0x2a: /* fst sti */
6394 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6395 break;
6396 case 0x2b: /* fstp sti */
6397 case 0x0b: /* fstp1 sti, undocumented op */
6398 case 0x3a: /* fstp8 sti, undocumented op */
6399 case 0x3b: /* fstp9 sti, undocumented op */
6400 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6401 tcg_gen_helper_0_0(helper_fpop);
6402 break;
6403 case 0x2c: /* fucom st(i) */
6404 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6405 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6406 break;
6407 case 0x2d: /* fucomp st(i) */
6408 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6409 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6410 tcg_gen_helper_0_0(helper_fpop);
6411 break;
6412 case 0x33: /* de/3 */
6413 switch(rm) {
6414 case 1: /* fcompp */
6415 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6416 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6417 tcg_gen_helper_0_0(helper_fpop);
6418 tcg_gen_helper_0_0(helper_fpop);
6419 break;
6420 default:
6421 goto illegal_op;
6422 }
6423 break;
6424 case 0x38: /* ffreep sti, undocumented op */
6425 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6426 tcg_gen_helper_0_0(helper_fpop);
6427 break;
6428 case 0x3c: /* df/4 */
6429 switch(rm) {
6430 case 0:
6431 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6432 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6433 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6434 break;
6435 default:
6436 goto illegal_op;
6437 }
6438 break;
6439 case 0x3d: /* fucomip */
6440 if (s->cc_op != CC_OP_DYNAMIC)
6441 gen_op_set_cc_op(s->cc_op);
6442 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6443 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6444 tcg_gen_helper_0_0(helper_fpop);
6445 s->cc_op = CC_OP_EFLAGS;
6446 break;
6447 case 0x3e: /* fcomip */
6448 if (s->cc_op != CC_OP_DYNAMIC)
6449 gen_op_set_cc_op(s->cc_op);
6450 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6451 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6452 tcg_gen_helper_0_0(helper_fpop);
6453 s->cc_op = CC_OP_EFLAGS;
6454 break;
6455 case 0x10 ... 0x13: /* fcmovxx */
6456 case 0x18 ... 0x1b:
6457 {
6458 int op1, l1;
6459 static const uint8_t fcmov_cc[8] = {
6460 (JCC_B << 1),
6461 (JCC_Z << 1),
6462 (JCC_BE << 1),
6463 (JCC_P << 1),
6464 };
6465 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6466 l1 = gen_new_label();
6467 gen_jcc1(s, s->cc_op, op1, l1);
6468 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6469 gen_set_label(l1);
6470 }
6471 break;
6472 default:
6473 goto illegal_op;
6474 }
6475 }
6476 break;
6477 /************************/
6478 /* string ops */
6479
6480 case 0xa4: /* movsS */
6481 case 0xa5:
6482 if ((b & 1) == 0)
6483 ot = OT_BYTE;
6484 else
6485 ot = dflag + OT_WORD;
6486
6487 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6488 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6489 } else {
6490 gen_movs(s, ot);
6491 }
6492 break;
6493
6494 case 0xaa: /* stosS */
6495 case 0xab:
6496 if ((b & 1) == 0)
6497 ot = OT_BYTE;
6498 else
6499 ot = dflag + OT_WORD;
6500
6501 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6502 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6503 } else {
6504 gen_stos(s, ot);
6505 }
6506 break;
6507 case 0xac: /* lodsS */
6508 case 0xad:
6509 if ((b & 1) == 0)
6510 ot = OT_BYTE;
6511 else
6512 ot = dflag + OT_WORD;
6513 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6514 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6515 } else {
6516 gen_lods(s, ot);
6517 }
6518 break;
6519 case 0xae: /* scasS */
6520 case 0xaf:
6521 if ((b & 1) == 0)
6522 ot = OT_BYTE;
6523 else
6524 ot = dflag + OT_WORD;
6525 if (prefixes & PREFIX_REPNZ) {
6526 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6527 } else if (prefixes & PREFIX_REPZ) {
6528 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6529 } else {
6530 gen_scas(s, ot);
6531 s->cc_op = CC_OP_SUBB + ot;
6532 }
6533 break;
6534
6535 case 0xa6: /* cmpsS */
6536 case 0xa7:
6537 if ((b & 1) == 0)
6538 ot = OT_BYTE;
6539 else
6540 ot = dflag + OT_WORD;
6541 if (prefixes & PREFIX_REPNZ) {
6542 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6543 } else if (prefixes & PREFIX_REPZ) {
6544 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6545 } else {
6546 gen_cmps(s, ot);
6547 s->cc_op = CC_OP_SUBB + ot;
6548 }
6549 break;
6550 case 0x6c: /* insS */
6551 case 0x6d:
6552 if ((b & 1) == 0)
6553 ot = OT_BYTE;
6554 else
6555 ot = dflag ? OT_LONG : OT_WORD;
6556 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6557 gen_op_andl_T0_ffff();
6558 gen_check_io(s, ot, pc_start - s->cs_base,
6559 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6560 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6561 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6562 } else {
6563 gen_ins(s, ot);
6564 if (use_icount) {
6565 gen_jmp(s, s->pc - s->cs_base);
6566 }
6567 }
6568 break;
6569 case 0x6e: /* outsS */
6570 case 0x6f:
6571 if ((b & 1) == 0)
6572 ot = OT_BYTE;
6573 else
6574 ot = dflag ? OT_LONG : OT_WORD;
6575 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6576 gen_op_andl_T0_ffff();
6577 gen_check_io(s, ot, pc_start - s->cs_base,
6578 svm_is_rep(prefixes) | 4);
6579 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6580 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6581 } else {
6582 gen_outs(s, ot);
6583 if (use_icount) {
6584 gen_jmp(s, s->pc - s->cs_base);
6585 }
6586 }
6587 break;
6588
6589 /************************/
6590 /* port I/O */
6591
6592 case 0xe4:
6593 case 0xe5:
6594 if ((b & 1) == 0)
6595 ot = OT_BYTE;
6596 else
6597 ot = dflag ? OT_LONG : OT_WORD;
6598 val = ldub_code(s->pc++);
6599 gen_op_movl_T0_im(val);
6600 gen_check_io(s, ot, pc_start - s->cs_base,
6601 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6602 if (use_icount)
6603 gen_io_start();
6604 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6605 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6606 gen_op_mov_reg_T1(ot, R_EAX);
6607 if (use_icount) {
6608 gen_io_end();
6609 gen_jmp(s, s->pc - s->cs_base);
6610 }
6611 break;
6612 case 0xe6:
6613 case 0xe7:
6614 if ((b & 1) == 0)
6615 ot = OT_BYTE;
6616 else
6617 ot = dflag ? OT_LONG : OT_WORD;
6618 val = ldub_code(s->pc++);
6619 gen_op_movl_T0_im(val);
6620 gen_check_io(s, ot, pc_start - s->cs_base,
6621 svm_is_rep(prefixes));
6622#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6623 if (val == 0x80)
6624 break;
6625#endif /* VBOX */
6626 gen_op_mov_TN_reg(ot, 1, R_EAX);
6627
6628 if (use_icount)
6629 gen_io_start();
6630 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6631 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6632 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6633 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6634 if (use_icount) {
6635 gen_io_end();
6636 gen_jmp(s, s->pc - s->cs_base);
6637 }
6638 break;
6639 case 0xec:
6640 case 0xed:
6641 if ((b & 1) == 0)
6642 ot = OT_BYTE;
6643 else
6644 ot = dflag ? OT_LONG : OT_WORD;
6645 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6646 gen_op_andl_T0_ffff();
6647 gen_check_io(s, ot, pc_start - s->cs_base,
6648 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6649 if (use_icount)
6650 gen_io_start();
6651 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6652 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6653 gen_op_mov_reg_T1(ot, R_EAX);
6654 if (use_icount) {
6655 gen_io_end();
6656 gen_jmp(s, s->pc - s->cs_base);
6657 }
6658 break;
6659 case 0xee:
6660 case 0xef:
6661 if ((b & 1) == 0)
6662 ot = OT_BYTE;
6663 else
6664 ot = dflag ? OT_LONG : OT_WORD;
6665 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6666 gen_op_andl_T0_ffff();
6667 gen_check_io(s, ot, pc_start - s->cs_base,
6668 svm_is_rep(prefixes));
6669 gen_op_mov_TN_reg(ot, 1, R_EAX);
6670
6671 if (use_icount)
6672 gen_io_start();
6673 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6674 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6675 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6676 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6677 if (use_icount) {
6678 gen_io_end();
6679 gen_jmp(s, s->pc - s->cs_base);
6680 }
6681 break;
6682
6683 /************************/
6684 /* control */
6685 case 0xc2: /* ret im */
6686 val = ldsw_code(s->pc);
6687 s->pc += 2;
6688 gen_pop_T0(s);
6689 if (CODE64(s) && s->dflag)
6690 s->dflag = 2;
6691 gen_stack_update(s, val + (2 << s->dflag));
6692 if (s->dflag == 0)
6693 gen_op_andl_T0_ffff();
6694 gen_op_jmp_T0();
6695 gen_eob(s);
6696 break;
6697 case 0xc3: /* ret */
6698 gen_pop_T0(s);
6699 gen_pop_update(s);
6700 if (s->dflag == 0)
6701 gen_op_andl_T0_ffff();
6702 gen_op_jmp_T0();
6703 gen_eob(s);
6704 break;
6705 case 0xca: /* lret im */
6706 val = ldsw_code(s->pc);
6707 s->pc += 2;
6708 do_lret:
6709 if (s->pe && !s->vm86) {
6710 if (s->cc_op != CC_OP_DYNAMIC)
6711 gen_op_set_cc_op(s->cc_op);
6712 gen_jmp_im(pc_start - s->cs_base);
6713 tcg_gen_helper_0_2(helper_lret_protected,
6714 tcg_const_i32(s->dflag),
6715 tcg_const_i32(val));
6716 } else {
6717 gen_stack_A0(s);
6718 /* pop offset */
6719 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6720 if (s->dflag == 0)
6721 gen_op_andl_T0_ffff();
6722 /* NOTE: keeping EIP updated is not a problem in case of
6723 exception */
6724 gen_op_jmp_T0();
6725 /* pop selector */
6726 gen_op_addl_A0_im(2 << s->dflag);
6727 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6728 gen_op_movl_seg_T0_vm(R_CS);
6729 /* add stack offset */
6730 gen_stack_update(s, val + (4 << s->dflag));
6731 }
6732 gen_eob(s);
6733 break;
6734 case 0xcb: /* lret */
6735 val = 0;
6736 goto do_lret;
6737 case 0xcf: /* iret */
6738 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6739 if (!s->pe) {
6740 /* real mode */
6741 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6742 s->cc_op = CC_OP_EFLAGS;
6743 } else if (s->vm86) {
6744#ifdef VBOX
6745 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6746#else
6747 if (s->iopl != 3) {
6748#endif
6749 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6750 } else {
6751 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6752 s->cc_op = CC_OP_EFLAGS;
6753 }
6754 } else {
6755 if (s->cc_op != CC_OP_DYNAMIC)
6756 gen_op_set_cc_op(s->cc_op);
6757 gen_jmp_im(pc_start - s->cs_base);
6758 tcg_gen_helper_0_2(helper_iret_protected,
6759 tcg_const_i32(s->dflag),
6760 tcg_const_i32(s->pc - s->cs_base));
6761 s->cc_op = CC_OP_EFLAGS;
6762 }
6763 gen_eob(s);
6764 break;
6765 case 0xe8: /* call im */
6766 {
6767 if (dflag)
6768 tval = (int32_t)insn_get(s, OT_LONG);
6769 else
6770 tval = (int16_t)insn_get(s, OT_WORD);
6771 next_eip = s->pc - s->cs_base;
6772 tval += next_eip;
6773 if (s->dflag == 0)
6774 tval &= 0xffff;
6775 gen_movtl_T0_im(next_eip);
6776 gen_push_T0(s);
6777 gen_jmp(s, tval);
6778 }
6779 break;
6780 case 0x9a: /* lcall im */
6781 {
6782 unsigned int selector, offset;
6783
6784 if (CODE64(s))
6785 goto illegal_op;
6786 ot = dflag ? OT_LONG : OT_WORD;
6787 offset = insn_get(s, ot);
6788 selector = insn_get(s, OT_WORD);
6789
6790 gen_op_movl_T0_im(selector);
6791 gen_op_movl_T1_imu(offset);
6792 }
6793 goto do_lcall;
6794 case 0xe9: /* jmp im */
6795 if (dflag)
6796 tval = (int32_t)insn_get(s, OT_LONG);
6797 else
6798 tval = (int16_t)insn_get(s, OT_WORD);
6799 tval += s->pc - s->cs_base;
6800 if (s->dflag == 0)
6801 tval &= 0xffff;
6802 gen_jmp(s, tval);
6803 break;
6804 case 0xea: /* ljmp im */
6805 {
6806 unsigned int selector, offset;
6807
6808 if (CODE64(s))
6809 goto illegal_op;
6810 ot = dflag ? OT_LONG : OT_WORD;
6811 offset = insn_get(s, ot);
6812 selector = insn_get(s, OT_WORD);
6813
6814 gen_op_movl_T0_im(selector);
6815 gen_op_movl_T1_imu(offset);
6816 }
6817 goto do_ljmp;
6818 case 0xeb: /* jmp Jb */
6819 tval = (int8_t)insn_get(s, OT_BYTE);
6820 tval += s->pc - s->cs_base;
6821 if (s->dflag == 0)
6822 tval &= 0xffff;
6823 gen_jmp(s, tval);
6824 break;
6825 case 0x70 ... 0x7f: /* jcc Jb */
6826 tval = (int8_t)insn_get(s, OT_BYTE);
6827 goto do_jcc;
6828 case 0x180 ... 0x18f: /* jcc Jv */
6829 if (dflag) {
6830 tval = (int32_t)insn_get(s, OT_LONG);
6831 } else {
6832 tval = (int16_t)insn_get(s, OT_WORD);
6833 }
6834 do_jcc:
6835 next_eip = s->pc - s->cs_base;
6836 tval += next_eip;
6837 if (s->dflag == 0)
6838 tval &= 0xffff;
6839 gen_jcc(s, b, tval, next_eip);
6840 break;
6841
6842 case 0x190 ... 0x19f: /* setcc Gv */
6843 modrm = ldub_code(s->pc++);
6844 gen_setcc(s, b);
6845 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6846 break;
6847 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6848 {
6849 int l1;
6850 TCGv t0;
6851
6852 ot = dflag + OT_WORD;
6853 modrm = ldub_code(s->pc++);
6854 reg = ((modrm >> 3) & 7) | rex_r;
6855 mod = (modrm >> 6) & 3;
6856 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6857 if (mod != 3) {
6858 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6859 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6860 } else {
6861 rm = (modrm & 7) | REX_B(s);
6862 gen_op_mov_v_reg(ot, t0, rm);
6863 }
6864#ifdef TARGET_X86_64
6865 if (ot == OT_LONG) {
6866 /* XXX: specific Intel behaviour ? */
6867 l1 = gen_new_label();
6868 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6869 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6870 gen_set_label(l1);
6871 tcg_gen_movi_tl(cpu_tmp0, 0);
6872 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6873 } else
6874#endif
6875 {
6876 l1 = gen_new_label();
6877 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6878 gen_op_mov_reg_v(ot, reg, t0);
6879 gen_set_label(l1);
6880 }
6881 tcg_temp_free(t0);
6882 }
6883 break;
6884
6885 /************************/
6886 /* flags */
6887 case 0x9c: /* pushf */
6888 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6889#ifdef VBOX
6890 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6891#else
6892 if (s->vm86 && s->iopl != 3) {
6893#endif
6894 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6895 } else {
6896 if (s->cc_op != CC_OP_DYNAMIC)
6897 gen_op_set_cc_op(s->cc_op);
6898#ifdef VBOX
6899 if (s->vm86 && s->vme && s->iopl != 3)
6900 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6901 else
6902#endif
6903 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6904 gen_push_T0(s);
6905 }
6906 break;
6907 case 0x9d: /* popf */
6908 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6909#ifdef VBOX
6910 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6911#else
6912 if (s->vm86 && s->iopl != 3) {
6913#endif
6914 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6915 } else {
6916 gen_pop_T0(s);
6917 if (s->cpl == 0) {
6918 if (s->dflag) {
6919 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6920 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6921 } else {
6922 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6923 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6924 }
6925 } else {
6926 if (s->cpl <= s->iopl) {
6927 if (s->dflag) {
6928 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6929 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6930 } else {
6931 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6932 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6933 }
6934 } else {
6935 if (s->dflag) {
6936 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6937 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6938 } else {
6939#ifdef VBOX
6940 if (s->vm86 && s->vme)
6941 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
6942 else
6943#endif
6944 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6945 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6946 }
6947 }
6948 }
6949 gen_pop_update(s);
6950 s->cc_op = CC_OP_EFLAGS;
6951 /* abort translation because TF flag may change */
6952 gen_jmp_im(s->pc - s->cs_base);
6953 gen_eob(s);
6954 }
6955 break;
6956 case 0x9e: /* sahf */
6957 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6958 goto illegal_op;
6959 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6960 if (s->cc_op != CC_OP_DYNAMIC)
6961 gen_op_set_cc_op(s->cc_op);
6962 gen_compute_eflags(cpu_cc_src);
6963 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6964 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6965 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6966 s->cc_op = CC_OP_EFLAGS;
6967 break;
6968 case 0x9f: /* lahf */
6969 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6970 goto illegal_op;
6971 if (s->cc_op != CC_OP_DYNAMIC)
6972 gen_op_set_cc_op(s->cc_op);
6973 gen_compute_eflags(cpu_T[0]);
6974 /* Note: gen_compute_eflags() only gives the condition codes */
6975 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6976 gen_op_mov_reg_T0(OT_BYTE, R_AH);
6977 break;
6978 case 0xf5: /* cmc */
6979 if (s->cc_op != CC_OP_DYNAMIC)
6980 gen_op_set_cc_op(s->cc_op);
6981 gen_compute_eflags(cpu_cc_src);
6982 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6983 s->cc_op = CC_OP_EFLAGS;
6984 break;
6985 case 0xf8: /* clc */
6986 if (s->cc_op != CC_OP_DYNAMIC)
6987 gen_op_set_cc_op(s->cc_op);
6988 gen_compute_eflags(cpu_cc_src);
6989 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6990 s->cc_op = CC_OP_EFLAGS;
6991 break;
6992 case 0xf9: /* stc */
6993 if (s->cc_op != CC_OP_DYNAMIC)
6994 gen_op_set_cc_op(s->cc_op);
6995 gen_compute_eflags(cpu_cc_src);
6996 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6997 s->cc_op = CC_OP_EFLAGS;
6998 break;
6999 case 0xfc: /* cld */
7000 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7001 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7002 break;
7003 case 0xfd: /* std */
7004 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7005 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7006 break;
7007
7008 /************************/
7009 /* bit operations */
7010 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7011 ot = dflag + OT_WORD;
7012 modrm = ldub_code(s->pc++);
7013 op = (modrm >> 3) & 7;
7014 mod = (modrm >> 6) & 3;
7015 rm = (modrm & 7) | REX_B(s);
7016 if (mod != 3) {
7017 s->rip_offset = 1;
7018 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7019 gen_op_ld_T0_A0(ot + s->mem_index);
7020 } else {
7021 gen_op_mov_TN_reg(ot, 0, rm);
7022 }
7023 /* load shift */
7024 val = ldub_code(s->pc++);
7025 gen_op_movl_T1_im(val);
7026 if (op < 4)
7027 goto illegal_op;
7028 op -= 4;
7029 goto bt_op;
7030 case 0x1a3: /* bt Gv, Ev */
7031 op = 0;
7032 goto do_btx;
7033 case 0x1ab: /* bts */
7034 op = 1;
7035 goto do_btx;
7036 case 0x1b3: /* btr */
7037 op = 2;
7038 goto do_btx;
7039 case 0x1bb: /* btc */
7040 op = 3;
7041 do_btx:
7042 ot = dflag + OT_WORD;
7043 modrm = ldub_code(s->pc++);
7044 reg = ((modrm >> 3) & 7) | rex_r;
7045 mod = (modrm >> 6) & 3;
7046 rm = (modrm & 7) | REX_B(s);
7047 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7048 if (mod != 3) {
7049 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7050 /* specific case: we need to add a displacement */
7051 gen_exts(ot, cpu_T[1]);
7052 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7053 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7054 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7055 gen_op_ld_T0_A0(ot + s->mem_index);
7056 } else {
7057 gen_op_mov_TN_reg(ot, 0, rm);
7058 }
7059 bt_op:
7060 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7061 switch(op) {
7062 case 0:
7063 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7064 tcg_gen_movi_tl(cpu_cc_dst, 0);
7065 break;
7066 case 1:
7067 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7068 tcg_gen_movi_tl(cpu_tmp0, 1);
7069 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7070 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7071 break;
7072 case 2:
7073 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7074 tcg_gen_movi_tl(cpu_tmp0, 1);
7075 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7076 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7077 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7078 break;
7079 default:
7080 case 3:
7081 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7082 tcg_gen_movi_tl(cpu_tmp0, 1);
7083 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7084 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7085 break;
7086 }
7087 s->cc_op = CC_OP_SARB + ot;
7088 if (op != 0) {
7089 if (mod != 3)
7090 gen_op_st_T0_A0(ot + s->mem_index);
7091 else
7092 gen_op_mov_reg_T0(ot, rm);
7093 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7094 tcg_gen_movi_tl(cpu_cc_dst, 0);
7095 }
7096 break;
7097 case 0x1bc: /* bsf */
7098 case 0x1bd: /* bsr */
7099 {
7100 int label1;
7101 TCGv t0;
7102
7103 ot = dflag + OT_WORD;
7104 modrm = ldub_code(s->pc++);
7105 reg = ((modrm >> 3) & 7) | rex_r;
7106 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7107 gen_extu(ot, cpu_T[0]);
7108 label1 = gen_new_label();
7109 tcg_gen_movi_tl(cpu_cc_dst, 0);
7110 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7111 tcg_gen_mov_tl(t0, cpu_T[0]);
7112 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7113 if (b & 1) {
7114 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7115 } else {
7116 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7117 }
7118 gen_op_mov_reg_T0(ot, reg);
7119 tcg_gen_movi_tl(cpu_cc_dst, 1);
7120 gen_set_label(label1);
7121 tcg_gen_discard_tl(cpu_cc_src);
7122 s->cc_op = CC_OP_LOGICB + ot;
7123 tcg_temp_free(t0);
7124 }
7125 break;
7126 /************************/
7127 /* bcd */
7128 case 0x27: /* daa */
7129 if (CODE64(s))
7130 goto illegal_op;
7131 if (s->cc_op != CC_OP_DYNAMIC)
7132 gen_op_set_cc_op(s->cc_op);
7133 tcg_gen_helper_0_0(helper_daa);
7134 s->cc_op = CC_OP_EFLAGS;
7135 break;
7136 case 0x2f: /* das */
7137 if (CODE64(s))
7138 goto illegal_op;
7139 if (s->cc_op != CC_OP_DYNAMIC)
7140 gen_op_set_cc_op(s->cc_op);
7141 tcg_gen_helper_0_0(helper_das);
7142 s->cc_op = CC_OP_EFLAGS;
7143 break;
7144 case 0x37: /* aaa */
7145 if (CODE64(s))
7146 goto illegal_op;
7147 if (s->cc_op != CC_OP_DYNAMIC)
7148 gen_op_set_cc_op(s->cc_op);
7149 tcg_gen_helper_0_0(helper_aaa);
7150 s->cc_op = CC_OP_EFLAGS;
7151 break;
7152 case 0x3f: /* aas */
7153 if (CODE64(s))
7154 goto illegal_op;
7155 if (s->cc_op != CC_OP_DYNAMIC)
7156 gen_op_set_cc_op(s->cc_op);
7157 tcg_gen_helper_0_0(helper_aas);
7158 s->cc_op = CC_OP_EFLAGS;
7159 break;
7160 case 0xd4: /* aam */
7161 if (CODE64(s))
7162 goto illegal_op;
7163 val = ldub_code(s->pc++);
7164 if (val == 0) {
7165 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7166 } else {
7167 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7168 s->cc_op = CC_OP_LOGICB;
7169 }
7170 break;
7171 case 0xd5: /* aad */
7172 if (CODE64(s))
7173 goto illegal_op;
7174 val = ldub_code(s->pc++);
7175 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7176 s->cc_op = CC_OP_LOGICB;
7177 break;
7178 /************************/
7179 /* misc */
7180 case 0x90: /* nop */
7181 /* XXX: xchg + rex handling */
7182 /* XXX: correct lock test for all insn */
7183 if (prefixes & PREFIX_LOCK)
7184 goto illegal_op;
7185 if (prefixes & PREFIX_REPZ) {
7186 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7187 }
7188 break;
7189 case 0x9b: /* fwait */
7190 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7191 (HF_MP_MASK | HF_TS_MASK)) {
7192 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7193 } else {
7194 if (s->cc_op != CC_OP_DYNAMIC)
7195 gen_op_set_cc_op(s->cc_op);
7196 gen_jmp_im(pc_start - s->cs_base);
7197 tcg_gen_helper_0_0(helper_fwait);
7198 }
7199 break;
7200 case 0xcc: /* int3 */
7201#ifdef VBOX
7202 if (s->vm86 && s->iopl != 3 && !s->vme) {
7203 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7204 } else
7205#endif
7206 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7207 break;
7208 case 0xcd: /* int N */
7209 val = ldub_code(s->pc++);
7210#ifdef VBOX
7211 if (s->vm86 && s->iopl != 3 && !s->vme) {
7212#else
7213 if (s->vm86 && s->iopl != 3) {
7214#endif
7215 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7216 } else {
7217 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7218 }
7219 break;
7220 case 0xce: /* into */
7221 if (CODE64(s))
7222 goto illegal_op;
7223 if (s->cc_op != CC_OP_DYNAMIC)
7224 gen_op_set_cc_op(s->cc_op);
7225 gen_jmp_im(pc_start - s->cs_base);
7226 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7227 break;
7228 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7229 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7230#if 1
7231 gen_debug(s, pc_start - s->cs_base);
7232#else
7233 /* start debug */
7234 tb_flush(cpu_single_env);
7235 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7236#endif
7237 break;
7238 case 0xfa: /* cli */
7239 if (!s->vm86) {
7240 if (s->cpl <= s->iopl) {
7241 tcg_gen_helper_0_0(helper_cli);
7242 } else {
7243 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7244 }
7245 } else {
7246 if (s->iopl == 3) {
7247 tcg_gen_helper_0_0(helper_cli);
7248#ifdef VBOX
7249 } else if (s->iopl != 3 && s->vme) {
7250 tcg_gen_helper_0_0(helper_cli_vme);
7251#endif
7252 } else {
7253 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7254 }
7255 }
7256 break;
7257 case 0xfb: /* sti */
7258 if (!s->vm86) {
7259 if (s->cpl <= s->iopl) {
7260 gen_sti:
7261 tcg_gen_helper_0_0(helper_sti);
7262 /* interruptions are enabled only the first insn after sti */
7263 /* If several instructions disable interrupts, only the
7264 _first_ does it */
7265 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7266 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7267 /* give a chance to handle pending irqs */
7268 gen_jmp_im(s->pc - s->cs_base);
7269 gen_eob(s);
7270 } else {
7271 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7272 }
7273 } else {
7274 if (s->iopl == 3) {
7275 goto gen_sti;
7276#ifdef VBOX
7277 } else if (s->iopl != 3 && s->vme) {
7278 tcg_gen_helper_0_0(helper_sti_vme);
7279 /* give a chance to handle pending irqs */
7280 gen_jmp_im(s->pc - s->cs_base);
7281 gen_eob(s);
7282#endif
7283 } else {
7284 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7285 }
7286 }
7287 break;
7288 case 0x62: /* bound */
7289 if (CODE64(s))
7290 goto illegal_op;
7291 ot = dflag ? OT_LONG : OT_WORD;
7292 modrm = ldub_code(s->pc++);
7293 reg = (modrm >> 3) & 7;
7294 mod = (modrm >> 6) & 3;
7295 if (mod == 3)
7296 goto illegal_op;
7297 gen_op_mov_TN_reg(ot, 0, reg);
7298 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7299 gen_jmp_im(pc_start - s->cs_base);
7300 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7301 if (ot == OT_WORD)
7302 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7303 else
7304 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7305 break;
7306 case 0x1c8 ... 0x1cf: /* bswap reg */
7307 reg = (b & 7) | REX_B(s);
7308#ifdef TARGET_X86_64
7309 if (dflag == 2) {
7310 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7311 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7312 gen_op_mov_reg_T0(OT_QUAD, reg);
7313 } else
7314 {
7315 TCGv tmp0;
7316 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7317
7318 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7319 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7320 tcg_gen_bswap_i32(tmp0, tmp0);
7321 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7322 gen_op_mov_reg_T0(OT_LONG, reg);
7323 }
7324#else
7325 {
7326 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7327 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7328 gen_op_mov_reg_T0(OT_LONG, reg);
7329 }
7330#endif
7331 break;
7332 case 0xd6: /* salc */
7333 if (CODE64(s))
7334 goto illegal_op;
7335 if (s->cc_op != CC_OP_DYNAMIC)
7336 gen_op_set_cc_op(s->cc_op);
7337 gen_compute_eflags_c(cpu_T[0]);
7338 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7339 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7340 break;
7341 case 0xe0: /* loopnz */
7342 case 0xe1: /* loopz */
7343 case 0xe2: /* loop */
7344 case 0xe3: /* jecxz */
7345 {
7346 int l1, l2, l3;
7347
7348 tval = (int8_t)insn_get(s, OT_BYTE);
7349 next_eip = s->pc - s->cs_base;
7350 tval += next_eip;
7351 if (s->dflag == 0)
7352 tval &= 0xffff;
7353
7354 l1 = gen_new_label();
7355 l2 = gen_new_label();
7356 l3 = gen_new_label();
7357 b &= 3;
7358 switch(b) {
7359 case 0: /* loopnz */
7360 case 1: /* loopz */
7361 if (s->cc_op != CC_OP_DYNAMIC)
7362 gen_op_set_cc_op(s->cc_op);
7363 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7364 gen_op_jz_ecx(s->aflag, l3);
7365 gen_compute_eflags(cpu_tmp0);
7366 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7367 if (b == 0) {
7368 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7369 } else {
7370 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7371 }
7372 break;
7373 case 2: /* loop */
7374 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7375 gen_op_jnz_ecx(s->aflag, l1);
7376 break;
7377 default:
7378 case 3: /* jcxz */
7379 gen_op_jz_ecx(s->aflag, l1);
7380 break;
7381 }
7382
7383 gen_set_label(l3);
7384 gen_jmp_im(next_eip);
7385 tcg_gen_br(l2);
7386
7387 gen_set_label(l1);
7388 gen_jmp_im(tval);
7389 gen_set_label(l2);
7390 gen_eob(s);
7391 }
7392 break;
7393 case 0x130: /* wrmsr */
7394 case 0x132: /* rdmsr */
7395 if (s->cpl != 0) {
7396 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7397 } else {
7398 if (s->cc_op != CC_OP_DYNAMIC)
7399 gen_op_set_cc_op(s->cc_op);
7400 gen_jmp_im(pc_start - s->cs_base);
7401 if (b & 2) {
7402 tcg_gen_helper_0_0(helper_rdmsr);
7403 } else {
7404 tcg_gen_helper_0_0(helper_wrmsr);
7405 }
7406 }
7407 break;
7408 case 0x131: /* rdtsc */
7409 if (s->cc_op != CC_OP_DYNAMIC)
7410 gen_op_set_cc_op(s->cc_op);
7411 gen_jmp_im(pc_start - s->cs_base);
7412 if (use_icount)
7413 gen_io_start();
7414 tcg_gen_helper_0_0(helper_rdtsc);
7415 if (use_icount) {
7416 gen_io_end();
7417 gen_jmp(s, s->pc - s->cs_base);
7418 }
7419 break;
7420 case 0x133: /* rdpmc */
7421 if (s->cc_op != CC_OP_DYNAMIC)
7422 gen_op_set_cc_op(s->cc_op);
7423 gen_jmp_im(pc_start - s->cs_base);
7424 tcg_gen_helper_0_0(helper_rdpmc);
7425 break;
7426 case 0x134: /* sysenter */
7427#ifndef VBOX
7428 /* For Intel SYSENTER is valid on 64-bit */
7429 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7430#else
7431 /** @todo: make things right */
7432 if (CODE64(s))
7433#endif
7434 goto illegal_op;
7435 if (!s->pe) {
7436 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7437 } else {
7438 if (s->cc_op != CC_OP_DYNAMIC) {
7439 gen_op_set_cc_op(s->cc_op);
7440 s->cc_op = CC_OP_DYNAMIC;
7441 }
7442 gen_jmp_im(pc_start - s->cs_base);
7443 tcg_gen_helper_0_0(helper_sysenter);
7444 gen_eob(s);
7445 }
7446 break;
7447 case 0x135: /* sysexit */
7448#ifndef VBOX
7449 /* For Intel SYSEXIT is valid on 64-bit */
7450 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7451#else
7452 /** @todo: make things right */
7453 if (CODE64(s))
7454#endif
7455 goto illegal_op;
7456 if (!s->pe) {
7457 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7458 } else {
7459 if (s->cc_op != CC_OP_DYNAMIC) {
7460 gen_op_set_cc_op(s->cc_op);
7461 s->cc_op = CC_OP_DYNAMIC;
7462 }
7463 gen_jmp_im(pc_start - s->cs_base);
7464 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7465 gen_eob(s);
7466 }
7467 break;
7468#ifdef TARGET_X86_64
7469 case 0x105: /* syscall */
7470 /* XXX: is it usable in real mode ? */
7471 if (s->cc_op != CC_OP_DYNAMIC) {
7472 gen_op_set_cc_op(s->cc_op);
7473 s->cc_op = CC_OP_DYNAMIC;
7474 }
7475 gen_jmp_im(pc_start - s->cs_base);
7476 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7477 gen_eob(s);
7478 break;
7479 case 0x107: /* sysret */
7480 if (!s->pe) {
7481 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7482 } else {
7483 if (s->cc_op != CC_OP_DYNAMIC) {
7484 gen_op_set_cc_op(s->cc_op);
7485 s->cc_op = CC_OP_DYNAMIC;
7486 }
7487 gen_jmp_im(pc_start - s->cs_base);
7488 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7489 /* condition codes are modified only in long mode */
7490 if (s->lma)
7491 s->cc_op = CC_OP_EFLAGS;
7492 gen_eob(s);
7493 }
7494 break;
7495#endif
7496 case 0x1a2: /* cpuid */
7497 if (s->cc_op != CC_OP_DYNAMIC)
7498 gen_op_set_cc_op(s->cc_op);
7499 gen_jmp_im(pc_start - s->cs_base);
7500 tcg_gen_helper_0_0(helper_cpuid);
7501 break;
7502 case 0xf4: /* hlt */
7503 if (s->cpl != 0) {
7504 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7505 } else {
7506 if (s->cc_op != CC_OP_DYNAMIC)
7507 gen_op_set_cc_op(s->cc_op);
7508 gen_jmp_im(pc_start - s->cs_base);
7509 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7510 s->is_jmp = 3;
7511 }
7512 break;
7513 case 0x100:
7514 modrm = ldub_code(s->pc++);
7515 mod = (modrm >> 6) & 3;
7516 op = (modrm >> 3) & 7;
7517 switch(op) {
7518 case 0: /* sldt */
7519 if (!s->pe || s->vm86)
7520 goto illegal_op;
7521 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7522 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7523 ot = OT_WORD;
7524 if (mod == 3)
7525 ot += s->dflag;
7526 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7527 break;
7528 case 2: /* lldt */
7529 if (!s->pe || s->vm86)
7530 goto illegal_op;
7531 if (s->cpl != 0) {
7532 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7533 } else {
7534 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7535 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7536 gen_jmp_im(pc_start - s->cs_base);
7537 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7538 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7539 }
7540 break;
7541 case 1: /* str */
7542 if (!s->pe || s->vm86)
7543 goto illegal_op;
7544 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7545 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7546 ot = OT_WORD;
7547 if (mod == 3)
7548 ot += s->dflag;
7549 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7550 break;
7551 case 3: /* ltr */
7552 if (!s->pe || s->vm86)
7553 goto illegal_op;
7554 if (s->cpl != 0) {
7555 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7556 } else {
7557 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7558 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7559 gen_jmp_im(pc_start - s->cs_base);
7560 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7561 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7562 }
7563 break;
7564 case 4: /* verr */
7565 case 5: /* verw */
7566 if (!s->pe || s->vm86)
7567 goto illegal_op;
7568 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7569 if (s->cc_op != CC_OP_DYNAMIC)
7570 gen_op_set_cc_op(s->cc_op);
7571 if (op == 4)
7572 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7573 else
7574 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7575 s->cc_op = CC_OP_EFLAGS;
7576 break;
7577 default:
7578 goto illegal_op;
7579 }
7580 break;
7581 case 0x101:
7582 modrm = ldub_code(s->pc++);
7583 mod = (modrm >> 6) & 3;
7584 op = (modrm >> 3) & 7;
7585 rm = modrm & 7;
7586 switch(op) {
7587 case 0: /* sgdt */
7588 if (mod == 3)
7589 goto illegal_op;
7590 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7591 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7592 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7593 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7594 gen_add_A0_im(s, 2);
7595 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7596 if (!s->dflag)
7597 gen_op_andl_T0_im(0xffffff);
7598 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7599 break;
7600 case 1:
7601 if (mod == 3) {
7602 switch (rm) {
7603 case 0: /* monitor */
7604 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7605 s->cpl != 0)
7606 goto illegal_op;
7607 if (s->cc_op != CC_OP_DYNAMIC)
7608 gen_op_set_cc_op(s->cc_op);
7609 gen_jmp_im(pc_start - s->cs_base);
7610#ifdef TARGET_X86_64
7611 if (s->aflag == 2) {
7612 gen_op_movq_A0_reg(R_EAX);
7613 } else
7614#endif
7615 {
7616 gen_op_movl_A0_reg(R_EAX);
7617 if (s->aflag == 0)
7618 gen_op_andl_A0_ffff();
7619 }
7620 gen_add_A0_ds_seg(s);
7621 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7622 break;
7623 case 1: /* mwait */
7624 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7625 s->cpl != 0)
7626 goto illegal_op;
7627 if (s->cc_op != CC_OP_DYNAMIC) {
7628 gen_op_set_cc_op(s->cc_op);
7629 s->cc_op = CC_OP_DYNAMIC;
7630 }
7631 gen_jmp_im(pc_start - s->cs_base);
7632 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7633 gen_eob(s);
7634 break;
7635 default:
7636 goto illegal_op;
7637 }
7638 } else { /* sidt */
7639 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7640 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7641 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7642 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7643 gen_add_A0_im(s, 2);
7644 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7645 if (!s->dflag)
7646 gen_op_andl_T0_im(0xffffff);
7647 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7648 }
7649 break;
7650 case 2: /* lgdt */
7651 case 3: /* lidt */
7652 if (mod == 3) {
7653 if (s->cc_op != CC_OP_DYNAMIC)
7654 gen_op_set_cc_op(s->cc_op);
7655 gen_jmp_im(pc_start - s->cs_base);
7656 switch(rm) {
7657 case 0: /* VMRUN */
7658 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7659 goto illegal_op;
7660 if (s->cpl != 0) {
7661 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7662 break;
7663 } else {
7664 tcg_gen_helper_0_2(helper_vmrun,
7665 tcg_const_i32(s->aflag),
7666 tcg_const_i32(s->pc - pc_start));
7667 tcg_gen_exit_tb(0);
7668 s->is_jmp = 3;
7669 }
7670 break;
7671 case 1: /* VMMCALL */
7672 if (!(s->flags & HF_SVME_MASK))
7673 goto illegal_op;
7674 tcg_gen_helper_0_0(helper_vmmcall);
7675 break;
7676 case 2: /* VMLOAD */
7677 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7678 goto illegal_op;
7679 if (s->cpl != 0) {
7680 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7681 break;
7682 } else {
7683 tcg_gen_helper_0_1(helper_vmload,
7684 tcg_const_i32(s->aflag));
7685 }
7686 break;
7687 case 3: /* VMSAVE */
7688 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7689 goto illegal_op;
7690 if (s->cpl != 0) {
7691 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7692 break;
7693 } else {
7694 tcg_gen_helper_0_1(helper_vmsave,
7695 tcg_const_i32(s->aflag));
7696 }
7697 break;
7698 case 4: /* STGI */
7699 if ((!(s->flags & HF_SVME_MASK) &&
7700 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7701 !s->pe)
7702 goto illegal_op;
7703 if (s->cpl != 0) {
7704 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7705 break;
7706 } else {
7707 tcg_gen_helper_0_0(helper_stgi);
7708 }
7709 break;
7710 case 5: /* CLGI */
7711 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7712 goto illegal_op;
7713 if (s->cpl != 0) {
7714 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7715 break;
7716 } else {
7717 tcg_gen_helper_0_0(helper_clgi);
7718 }
7719 break;
7720 case 6: /* SKINIT */
7721 if ((!(s->flags & HF_SVME_MASK) &&
7722 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7723 !s->pe)
7724 goto illegal_op;
7725 tcg_gen_helper_0_0(helper_skinit);
7726 break;
7727 case 7: /* INVLPGA */
7728 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7729 goto illegal_op;
7730 if (s->cpl != 0) {
7731 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7732 break;
7733 } else {
7734 tcg_gen_helper_0_1(helper_invlpga,
7735 tcg_const_i32(s->aflag));
7736 }
7737 break;
7738 default:
7739 goto illegal_op;
7740 }
7741 } else if (s->cpl != 0) {
7742 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7743 } else {
7744 gen_svm_check_intercept(s, pc_start,
7745 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7746 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7747 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7748 gen_add_A0_im(s, 2);
7749 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7750 if (!s->dflag)
7751 gen_op_andl_T0_im(0xffffff);
7752 if (op == 2) {
7753 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7754 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7755 } else {
7756 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7757 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7758 }
7759 }
7760 break;
7761 case 4: /* smsw */
7762 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7763 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7764 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7765 break;
7766 case 6: /* lmsw */
7767 if (s->cpl != 0) {
7768 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7769 } else {
7770 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7771 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7772 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7773 gen_jmp_im(s->pc - s->cs_base);
7774 gen_eob(s);
7775 }
7776 break;
7777 case 7: /* invlpg */
7778 if (s->cpl != 0) {
7779 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7780 } else {
7781 if (mod == 3) {
7782#ifdef TARGET_X86_64
7783 if (CODE64(s) && rm == 0) {
7784 /* swapgs */
7785 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7786 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7787 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7788 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7789 } else
7790#endif
7791 {
7792 goto illegal_op;
7793 }
7794 } else {
7795 if (s->cc_op != CC_OP_DYNAMIC)
7796 gen_op_set_cc_op(s->cc_op);
7797 gen_jmp_im(pc_start - s->cs_base);
7798 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7799 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7800 gen_jmp_im(s->pc - s->cs_base);
7801 gen_eob(s);
7802 }
7803 }
7804 break;
7805 default:
7806 goto illegal_op;
7807 }
7808 break;
7809 case 0x108: /* invd */
7810 case 0x109: /* wbinvd */
7811 if (s->cpl != 0) {
7812 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7813 } else {
7814 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7815 /* nothing to do */
7816 }
7817 break;
7818 case 0x63: /* arpl or movslS (x86_64) */
7819#ifdef TARGET_X86_64
7820 if (CODE64(s)) {
7821 int d_ot;
7822 /* d_ot is the size of destination */
7823 d_ot = dflag + OT_WORD;
7824
7825 modrm = ldub_code(s->pc++);
7826 reg = ((modrm >> 3) & 7) | rex_r;
7827 mod = (modrm >> 6) & 3;
7828 rm = (modrm & 7) | REX_B(s);
7829
7830 if (mod == 3) {
7831 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7832 /* sign extend */
7833 if (d_ot == OT_QUAD)
7834 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7835 gen_op_mov_reg_T0(d_ot, reg);
7836 } else {
7837 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7838 if (d_ot == OT_QUAD) {
7839 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7840 } else {
7841 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7842 }
7843 gen_op_mov_reg_T0(d_ot, reg);
7844 }
7845 } else
7846#endif
7847 {
7848 int label1;
7849 TCGv t0, t1, t2;
7850
7851 if (!s->pe || s->vm86)
7852 goto illegal_op;
7853 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7854 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7855 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7856 ot = OT_WORD;
7857 modrm = ldub_code(s->pc++);
7858 reg = (modrm >> 3) & 7;
7859 mod = (modrm >> 6) & 3;
7860 rm = modrm & 7;
7861 if (mod != 3) {
7862 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7863 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7864 } else {
7865 gen_op_mov_v_reg(ot, t0, rm);
7866 }
7867 gen_op_mov_v_reg(ot, t1, reg);
7868 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7869 tcg_gen_andi_tl(t1, t1, 3);
7870 tcg_gen_movi_tl(t2, 0);
7871 label1 = gen_new_label();
7872 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7873 tcg_gen_andi_tl(t0, t0, ~3);
7874 tcg_gen_or_tl(t0, t0, t1);
7875 tcg_gen_movi_tl(t2, CC_Z);
7876 gen_set_label(label1);
7877 if (mod != 3) {
7878 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7879 } else {
7880 gen_op_mov_reg_v(ot, rm, t0);
7881 }
7882 if (s->cc_op != CC_OP_DYNAMIC)
7883 gen_op_set_cc_op(s->cc_op);
7884 gen_compute_eflags(cpu_cc_src);
7885 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7886 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7887 s->cc_op = CC_OP_EFLAGS;
7888 tcg_temp_free(t0);
7889 tcg_temp_free(t1);
7890 tcg_temp_free(t2);
7891 }
7892 break;
7893 case 0x102: /* lar */
7894 case 0x103: /* lsl */
7895 {
7896 int label1;
7897 TCGv t0;
7898 if (!s->pe || s->vm86)
7899 goto illegal_op;
7900 ot = dflag ? OT_LONG : OT_WORD;
7901 modrm = ldub_code(s->pc++);
7902 reg = ((modrm >> 3) & 7) | rex_r;
7903 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7904 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7905 if (s->cc_op != CC_OP_DYNAMIC)
7906 gen_op_set_cc_op(s->cc_op);
7907 if (b == 0x102)
7908 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
7909 else
7910 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
7911 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7912 label1 = gen_new_label();
7913 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7914 gen_op_mov_reg_v(ot, reg, t0);
7915 gen_set_label(label1);
7916 s->cc_op = CC_OP_EFLAGS;
7917 tcg_temp_free(t0);
7918 }
7919 break;
7920 case 0x118:
7921 modrm = ldub_code(s->pc++);
7922 mod = (modrm >> 6) & 3;
7923 op = (modrm >> 3) & 7;
7924 switch(op) {
7925 case 0: /* prefetchnta */
7926 case 1: /* prefetchnt0 */
7927 case 2: /* prefetchnt0 */
7928 case 3: /* prefetchnt0 */
7929 if (mod == 3)
7930 goto illegal_op;
7931 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7932 /* nothing more to do */
7933 break;
7934 default: /* nop (multi byte) */
7935 gen_nop_modrm(s, modrm);
7936 break;
7937 }
7938 break;
7939 case 0x119 ... 0x11f: /* nop (multi byte) */
7940 modrm = ldub_code(s->pc++);
7941 gen_nop_modrm(s, modrm);
7942 break;
7943 case 0x120: /* mov reg, crN */
7944 case 0x122: /* mov crN, reg */
7945 if (s->cpl != 0) {
7946 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7947 } else {
7948 modrm = ldub_code(s->pc++);
7949 if ((modrm & 0xc0) != 0xc0)
7950 goto illegal_op;
7951 rm = (modrm & 7) | REX_B(s);
7952 reg = ((modrm >> 3) & 7) | rex_r;
7953 if (CODE64(s))
7954 ot = OT_QUAD;
7955 else
7956 ot = OT_LONG;
7957 switch(reg) {
7958 case 0:
7959 case 2:
7960 case 3:
7961 case 4:
7962 case 8:
7963 if (s->cc_op != CC_OP_DYNAMIC)
7964 gen_op_set_cc_op(s->cc_op);
7965 gen_jmp_im(pc_start - s->cs_base);
7966 if (b & 2) {
7967 gen_op_mov_TN_reg(ot, 0, rm);
7968 tcg_gen_helper_0_2(helper_write_crN,
7969 tcg_const_i32(reg), cpu_T[0]);
7970 gen_jmp_im(s->pc - s->cs_base);
7971 gen_eob(s);
7972 } else {
7973 tcg_gen_helper_1_1(helper_read_crN,
7974 cpu_T[0], tcg_const_i32(reg));
7975 gen_op_mov_reg_T0(ot, rm);
7976 }
7977 break;
7978 default:
7979 goto illegal_op;
7980 }
7981 }
7982 break;
7983 case 0x121: /* mov reg, drN */
7984 case 0x123: /* mov drN, reg */
7985 if (s->cpl != 0) {
7986 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7987 } else {
7988 modrm = ldub_code(s->pc++);
7989 if ((modrm & 0xc0) != 0xc0)
7990 goto illegal_op;
7991 rm = (modrm & 7) | REX_B(s);
7992 reg = ((modrm >> 3) & 7) | rex_r;
7993 if (CODE64(s))
7994 ot = OT_QUAD;
7995 else
7996 ot = OT_LONG;
7997 /* XXX: do it dynamically with CR4.DE bit */
7998 if (reg == 4 || reg == 5 || reg >= 8)
7999 goto illegal_op;
8000 if (b & 2) {
8001 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8002 gen_op_mov_TN_reg(ot, 0, rm);
8003 tcg_gen_helper_0_2(helper_movl_drN_T0,
8004 tcg_const_i32(reg), cpu_T[0]);
8005 gen_jmp_im(s->pc - s->cs_base);
8006 gen_eob(s);
8007 } else {
8008 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8009 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8010 gen_op_mov_reg_T0(ot, rm);
8011 }
8012 }
8013 break;
8014 case 0x106: /* clts */
8015 if (s->cpl != 0) {
8016 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8017 } else {
8018 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8019 tcg_gen_helper_0_0(helper_clts);
8020 /* abort block because static cpu state changed */
8021 gen_jmp_im(s->pc - s->cs_base);
8022 gen_eob(s);
8023 }
8024 break;
8025 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8026 case 0x1c3: /* MOVNTI reg, mem */
8027 if (!(s->cpuid_features & CPUID_SSE2))
8028 goto illegal_op;
8029 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8030 modrm = ldub_code(s->pc++);
8031 mod = (modrm >> 6) & 3;
8032 if (mod == 3)
8033 goto illegal_op;
8034 reg = ((modrm >> 3) & 7) | rex_r;
8035 /* generate a generic store */
8036 gen_ldst_modrm(s, modrm, ot, reg, 1);
8037 break;
8038 case 0x1ae:
8039 modrm = ldub_code(s->pc++);
8040 mod = (modrm >> 6) & 3;
8041 op = (modrm >> 3) & 7;
8042 switch(op) {
8043 case 0: /* fxsave */
8044 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8045 (s->flags & HF_EM_MASK))
8046 goto illegal_op;
8047 if (s->flags & HF_TS_MASK) {
8048 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8049 break;
8050 }
8051 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8052 if (s->cc_op != CC_OP_DYNAMIC)
8053 gen_op_set_cc_op(s->cc_op);
8054 gen_jmp_im(pc_start - s->cs_base);
8055 tcg_gen_helper_0_2(helper_fxsave,
8056 cpu_A0, tcg_const_i32((s->dflag == 2)));
8057 break;
8058 case 1: /* fxrstor */
8059 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8060 (s->flags & HF_EM_MASK))
8061 goto illegal_op;
8062 if (s->flags & HF_TS_MASK) {
8063 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8064 break;
8065 }
8066 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8067 if (s->cc_op != CC_OP_DYNAMIC)
8068 gen_op_set_cc_op(s->cc_op);
8069 gen_jmp_im(pc_start - s->cs_base);
8070 tcg_gen_helper_0_2(helper_fxrstor,
8071 cpu_A0, tcg_const_i32((s->dflag == 2)));
8072 break;
8073 case 2: /* ldmxcsr */
8074 case 3: /* stmxcsr */
8075 if (s->flags & HF_TS_MASK) {
8076 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8077 break;
8078 }
8079 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8080 mod == 3)
8081 goto illegal_op;
8082 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8083 if (op == 2) {
8084 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8085 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8086 } else {
8087 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8088 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8089 }
8090 break;
8091 case 5: /* lfence */
8092 case 6: /* mfence */
8093 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8094 goto illegal_op;
8095 break;
8096 case 7: /* sfence / clflush */
8097 if ((modrm & 0xc7) == 0xc0) {
8098 /* sfence */
8099 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8100 if (!(s->cpuid_features & CPUID_SSE))
8101 goto illegal_op;
8102 } else {
8103 /* clflush */
8104 if (!(s->cpuid_features & CPUID_CLFLUSH))
8105 goto illegal_op;
8106 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8107 }
8108 break;
8109 default:
8110 goto illegal_op;
8111 }
8112 break;
8113 case 0x10d: /* 3DNow! prefetch(w) */
8114 modrm = ldub_code(s->pc++);
8115 mod = (modrm >> 6) & 3;
8116 if (mod == 3)
8117 goto illegal_op;
8118 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8119 /* ignore for now */
8120 break;
8121 case 0x1aa: /* rsm */
8122 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8123 if (!(s->flags & HF_SMM_MASK))
8124 goto illegal_op;
8125 if (s->cc_op != CC_OP_DYNAMIC) {
8126 gen_op_set_cc_op(s->cc_op);
8127 s->cc_op = CC_OP_DYNAMIC;
8128 }
8129 gen_jmp_im(s->pc - s->cs_base);
8130 tcg_gen_helper_0_0(helper_rsm);
8131 gen_eob(s);
8132 break;
8133 case 0x1b8: /* SSE4.2 popcnt */
8134 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8135 PREFIX_REPZ)
8136 goto illegal_op;
8137 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8138 goto illegal_op;
8139
8140 modrm = ldub_code(s->pc++);
8141 reg = ((modrm >> 3) & 7);
8142
8143 if (s->prefix & PREFIX_DATA)
8144 ot = OT_WORD;
8145 else if (s->dflag != 2)
8146 ot = OT_LONG;
8147 else
8148 ot = OT_QUAD;
8149
8150 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8151 tcg_gen_helper_1_2(helper_popcnt,
8152 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8153 gen_op_mov_reg_T0(ot, reg);
8154
8155 s->cc_op = CC_OP_EFLAGS;
8156 break;
8157 case 0x10e ... 0x10f:
8158 /* 3DNow! instructions, ignore prefixes */
8159 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8160 case 0x110 ... 0x117:
8161 case 0x128 ... 0x12f:
8162 case 0x138 ... 0x13a:
8163 case 0x150 ... 0x177:
8164 case 0x17c ... 0x17f:
8165 case 0x1c2:
8166 case 0x1c4 ... 0x1c6:
8167 case 0x1d0 ... 0x1fe:
8168 gen_sse(s, b, pc_start, rex_r);
8169 break;
8170 default:
8171 goto illegal_op;
8172 }
8173 /* lock generation */
8174 if (s->prefix & PREFIX_LOCK)
8175 tcg_gen_helper_0_0(helper_unlock);
8176 return s->pc;
8177 illegal_op:
8178 if (s->prefix & PREFIX_LOCK)
8179 tcg_gen_helper_0_0(helper_unlock);
8180 /* XXX: ensure that no lock was generated */
8181 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8182 return s->pc;
8183}
8184
8185void optimize_flags_init(void)
8186{
8187#ifndef VBOX
8188#if TCG_TARGET_REG_BITS == 32
8189 assert(sizeof(CCTable) == (1 << 3));
8190#else
8191 assert(sizeof(CCTable) == (1 << 4));
8192#endif
8193#endif
8194 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8195 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8196 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8197 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8198 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8199 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8200 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8201 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8202 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8203
8204 /* register helpers */
8205
8206#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8207#include "helper.h"
8208}
8209
8210/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8211 basic block 'tb'. If search_pc is TRUE, also generate PC
8212 information for each intermediate instruction. */
8213#ifndef VBOX
8214static inline void gen_intermediate_code_internal(CPUState *env,
8215#else /* VBOX */
8216DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8217#endif /* VBOX */
8218 TranslationBlock *tb,
8219 int search_pc)
8220{
8221 DisasContext dc1, *dc = &dc1;
8222 target_ulong pc_ptr;
8223 uint16_t *gen_opc_end;
8224 int j, lj, cflags;
8225 uint64_t flags;
8226 target_ulong pc_start;
8227 target_ulong cs_base;
8228 int num_insns;
8229 int max_insns;
8230
8231 /* generate intermediate code */
8232 pc_start = tb->pc;
8233 cs_base = tb->cs_base;
8234 flags = tb->flags;
8235 cflags = tb->cflags;
8236
8237 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8238 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8239 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8240 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8241 dc->f_st = 0;
8242 dc->vm86 = (flags >> VM_SHIFT) & 1;
8243#ifdef VBOX_WITH_CALL_RECORD
8244 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8245 if ( !(env->state & CPU_RAW_RING0)
8246 && (env->cr[0] & CR0_PG_MASK)
8247 && !(env->eflags & X86_EFL_IF)
8248 && dc->code32)
8249 dc->record_call = 1;
8250 else
8251 dc->record_call = 0;
8252#endif
8253 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8254 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8255 dc->tf = (flags >> TF_SHIFT) & 1;
8256 dc->singlestep_enabled = env->singlestep_enabled;
8257 dc->cc_op = CC_OP_DYNAMIC;
8258 dc->cs_base = cs_base;
8259 dc->tb = tb;
8260 dc->popl_esp_hack = 0;
8261 /* select memory access functions */
8262 dc->mem_index = 0;
8263 if (flags & HF_SOFTMMU_MASK) {
8264 if (dc->cpl == 3)
8265 dc->mem_index = 2 * 4;
8266 else
8267 dc->mem_index = 1 * 4;
8268 }
8269 dc->cpuid_features = env->cpuid_features;
8270 dc->cpuid_ext_features = env->cpuid_ext_features;
8271 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8272 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8273#ifdef TARGET_X86_64
8274 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8275 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8276#endif
8277 dc->flags = flags;
8278 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8279 (flags & HF_INHIBIT_IRQ_MASK)
8280#ifndef CONFIG_SOFTMMU
8281 || (flags & HF_SOFTMMU_MASK)
8282#endif
8283 );
8284#if 0
8285 /* check addseg logic */
8286 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8287 printf("ERROR addseg\n");
8288#endif
8289
8290 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8291 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8292 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8293 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8294
8295 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8296 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8297 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8298 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8299 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8300 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8301 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8302 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8303 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8304
8305 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8306
8307 dc->is_jmp = DISAS_NEXT;
8308 pc_ptr = pc_start;
8309 lj = -1;
8310 num_insns = 0;
8311 max_insns = tb->cflags & CF_COUNT_MASK;
8312 if (max_insns == 0)
8313 max_insns = CF_COUNT_MASK;
8314
8315 gen_icount_start();
8316 for(;;) {
8317 if (env->nb_breakpoints > 0) {
8318 for(j = 0; j < env->nb_breakpoints; j++) {
8319 if (env->breakpoints[j] == pc_ptr) {
8320 gen_debug(dc, pc_ptr - dc->cs_base);
8321 break;
8322 }
8323 }
8324 }
8325 if (search_pc) {
8326 j = gen_opc_ptr - gen_opc_buf;
8327 if (lj < j) {
8328 lj++;
8329 while (lj < j)
8330 gen_opc_instr_start[lj++] = 0;
8331 }
8332 gen_opc_pc[lj] = pc_ptr;
8333 gen_opc_cc_op[lj] = dc->cc_op;
8334 gen_opc_instr_start[lj] = 1;
8335 gen_opc_icount[lj] = num_insns;
8336 }
8337 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8338 gen_io_start();
8339
8340 pc_ptr = disas_insn(dc, pc_ptr);
8341 num_insns++;
8342 /* stop translation if indicated */
8343 if (dc->is_jmp)
8344 break;
8345#ifdef VBOX
8346#ifdef DEBUG
8347/*
8348 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8349 {
8350 //should never happen as the jump to the patch code terminates the translation block
8351 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8352 }
8353*/
8354#endif
8355 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8356 {
8357 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8358 gen_jmp_im(pc_ptr - dc->cs_base);
8359 gen_eob(dc);
8360 break;
8361 }
8362#endif /* VBOX */
8363
8364 /* if single step mode, we generate only one instruction and
8365 generate an exception */
8366 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8367 the flag and abort the translation to give the irqs a
8368 change to be happen */
8369 if (dc->tf || dc->singlestep_enabled ||
8370 (flags & HF_INHIBIT_IRQ_MASK)) {
8371 gen_jmp_im(pc_ptr - dc->cs_base);
8372 gen_eob(dc);
8373 break;
8374 }
8375 /* if too long translation, stop generation too */
8376 if (gen_opc_ptr >= gen_opc_end ||
8377 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8378 num_insns >= max_insns) {
8379 gen_jmp_im(pc_ptr - dc->cs_base);
8380 gen_eob(dc);
8381 break;
8382 }
8383 }
8384 if (tb->cflags & CF_LAST_IO)
8385 gen_io_end();
8386 gen_icount_end(tb, num_insns);
8387 *gen_opc_ptr = INDEX_op_end;
8388 /* we don't forget to fill the last values */
8389 if (search_pc) {
8390 j = gen_opc_ptr - gen_opc_buf;
8391 lj++;
8392 while (lj <= j)
8393 gen_opc_instr_start[lj++] = 0;
8394 }
8395
8396#ifdef DEBUG_DISAS
8397 if (loglevel & CPU_LOG_TB_CPU) {
8398 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8399 }
8400 if (loglevel & CPU_LOG_TB_IN_ASM) {
8401 int disas_flags;
8402 fprintf(logfile, "----------------\n");
8403 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8404#ifdef TARGET_X86_64
8405 if (dc->code64)
8406 disas_flags = 2;
8407 else
8408#endif
8409 disas_flags = !dc->code32;
8410 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8411 fprintf(logfile, "\n");
8412 }
8413#endif
8414
8415 if (!search_pc) {
8416 tb->size = pc_ptr - pc_start;
8417 tb->icount = num_insns;
8418 }
8419}
8420
8421void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8422{
8423 gen_intermediate_code_internal(env, tb, 0);
8424}
8425
8426void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8427{
8428 gen_intermediate_code_internal(env, tb, 1);
8429}
8430
8431void gen_pc_load(CPUState *env, TranslationBlock *tb,
8432 unsigned long searched_pc, int pc_pos, void *puc)
8433{
8434 int cc_op;
8435#ifdef DEBUG_DISAS
8436 if (loglevel & CPU_LOG_TB_OP) {
8437 int i;
8438 fprintf(logfile, "RESTORE:\n");
8439 for(i = 0;i <= pc_pos; i++) {
8440 if (gen_opc_instr_start[i]) {
8441 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8442 }
8443 }
8444 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8445 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8446 (uint32_t)tb->cs_base);
8447 }
8448#endif
8449 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8450 cc_op = gen_opc_cc_op[pc_pos];
8451 if (cc_op != CC_OP_DYNAMIC)
8452 env->cc_op = cc_op;
8453}
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette