VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 16894

最後變更 在這個檔案從16894是 16455,由 vboxsync 提交於 16 年 前

REM: segment forced sync, cleanups

  • 屬性 svn:eol-style 設為 native
檔案大小: 276.2 KB
 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int pvi; /* CR4.PVI */
148 int record_call; /* record calls for CSAM or not? */
149#endif
150 int cpl;
151 int iopl;
152 int tf; /* TF cpu flag */
153 int singlestep_enabled; /* "hardware" single step enabled */
154 int jmp_opt; /* use direct block chaining for direct jumps */
155 int mem_index; /* select memory access functions */
156 uint64_t flags; /* all execution flags */
157 struct TranslationBlock *tb;
158 int popl_esp_hack; /* for correct popl with esp base handling */
159 int rip_offset; /* only used in x86_64, but left for simplicity */
160 int cpuid_features;
161 int cpuid_ext_features;
162 int cpuid_ext2_features;
163 int cpuid_ext3_features;
164} DisasContext;
165
166static void gen_eob(DisasContext *s);
167static void gen_jmp(DisasContext *s, target_ulong eip);
168static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
169
170#ifdef VBOX
171static void gen_check_external_event();
172#endif
173
174/* i386 arith/logic operations */
175enum {
176 OP_ADDL,
177 OP_ORL,
178 OP_ADCL,
179 OP_SBBL,
180 OP_ANDL,
181 OP_SUBL,
182 OP_XORL,
183 OP_CMPL,
184};
185
186/* i386 shift ops */
187enum {
188 OP_ROL,
189 OP_ROR,
190 OP_RCL,
191 OP_RCR,
192 OP_SHL,
193 OP_SHR,
194 OP_SHL1, /* undocumented */
195 OP_SAR = 7,
196};
197
198enum {
199 JCC_O,
200 JCC_B,
201 JCC_Z,
202 JCC_BE,
203 JCC_S,
204 JCC_P,
205 JCC_L,
206 JCC_LE,
207};
208
209/* operand size */
210enum {
211 OT_BYTE = 0,
212 OT_WORD,
213 OT_LONG,
214 OT_QUAD,
215};
216
217enum {
218 /* I386 int registers */
219 OR_EAX, /* MUST be even numbered */
220 OR_ECX,
221 OR_EDX,
222 OR_EBX,
223 OR_ESP,
224 OR_EBP,
225 OR_ESI,
226 OR_EDI,
227
228 OR_TMP0 = 16, /* temporary operand register */
229 OR_TMP1,
230 OR_A0, /* temporary register used when doing address evaluation */
231};
232
233#ifndef VBOX
234static inline void gen_op_movl_T0_0(void)
235#else /* VBOX */
236DECLINLINE(void) gen_op_movl_T0_0(void)
237#endif /* VBOX */
238{
239 tcg_gen_movi_tl(cpu_T[0], 0);
240}
241
242#ifndef VBOX
243static inline void gen_op_movl_T0_im(int32_t val)
244#else /* VBOX */
245DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
246#endif /* VBOX */
247{
248 tcg_gen_movi_tl(cpu_T[0], val);
249}
250
251#ifndef VBOX
252static inline void gen_op_movl_T0_imu(uint32_t val)
253#else /* VBOX */
254DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
255#endif /* VBOX */
256{
257 tcg_gen_movi_tl(cpu_T[0], val);
258}
259
260#ifndef VBOX
261static inline void gen_op_movl_T1_im(int32_t val)
262#else /* VBOX */
263DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
264#endif /* VBOX */
265{
266 tcg_gen_movi_tl(cpu_T[1], val);
267}
268
269#ifndef VBOX
270static inline void gen_op_movl_T1_imu(uint32_t val)
271#else /* VBOX */
272DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
273#endif /* VBOX */
274{
275 tcg_gen_movi_tl(cpu_T[1], val);
276}
277
278#ifndef VBOX
279static inline void gen_op_movl_A0_im(uint32_t val)
280#else /* VBOX */
281DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
282#endif /* VBOX */
283{
284 tcg_gen_movi_tl(cpu_A0, val);
285}
286
287#ifdef TARGET_X86_64
288#ifndef VBOX
289static inline void gen_op_movq_A0_im(int64_t val)
290#else /* VBOX */
291DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
292#endif /* VBOX */
293{
294 tcg_gen_movi_tl(cpu_A0, val);
295}
296#endif
297
298#ifndef VBOX
299static inline void gen_movtl_T0_im(target_ulong val)
300#else /* VBOX */
301DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
302#endif /* VBOX */
303{
304 tcg_gen_movi_tl(cpu_T[0], val);
305}
306
307#ifndef VBOX
308static inline void gen_movtl_T1_im(target_ulong val)
309#else /* VBOX */
310DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
311#endif /* VBOX */
312{
313 tcg_gen_movi_tl(cpu_T[1], val);
314}
315
316#ifndef VBOX
317static inline void gen_op_andl_T0_ffff(void)
318#else /* VBOX */
319DECLINLINE(void) gen_op_andl_T0_ffff(void)
320#endif /* VBOX */
321{
322 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
323}
324
325#ifndef VBOX
326static inline void gen_op_andl_T0_im(uint32_t val)
327#else /* VBOX */
328DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
329#endif /* VBOX */
330{
331 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
332}
333
334#ifndef VBOX
335static inline void gen_op_movl_T0_T1(void)
336#else /* VBOX */
337DECLINLINE(void) gen_op_movl_T0_T1(void)
338#endif /* VBOX */
339{
340 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
341}
342
343#ifndef VBOX
344static inline void gen_op_andl_A0_ffff(void)
345#else /* VBOX */
346DECLINLINE(void) gen_op_andl_A0_ffff(void)
347#endif /* VBOX */
348{
349 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
350}
351
352#ifdef TARGET_X86_64
353
354#define NB_OP_SIZES 4
355
356#else /* !TARGET_X86_64 */
357
358#define NB_OP_SIZES 3
359
360#endif /* !TARGET_X86_64 */
361
362#if defined(WORDS_BIGENDIAN)
363#define REG_B_OFFSET (sizeof(target_ulong) - 1)
364#define REG_H_OFFSET (sizeof(target_ulong) - 2)
365#define REG_W_OFFSET (sizeof(target_ulong) - 2)
366#define REG_L_OFFSET (sizeof(target_ulong) - 4)
367#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
368#else
369#define REG_B_OFFSET 0
370#define REG_H_OFFSET 1
371#define REG_W_OFFSET 0
372#define REG_L_OFFSET 0
373#define REG_LH_OFFSET 4
374#endif
375
376#ifndef VBOX
377static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
378#else /* VBOX */
379DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
380#endif /* VBOX */
381{
382 switch(ot) {
383 case OT_BYTE:
384 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
385 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
386 } else {
387 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
388 }
389 break;
390 case OT_WORD:
391 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
392 break;
393#ifdef TARGET_X86_64
394 case OT_LONG:
395 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
396 /* high part of register set to zero */
397 tcg_gen_movi_tl(cpu_tmp0, 0);
398 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
399 break;
400 default:
401 case OT_QUAD:
402 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
403 break;
404#else
405 default:
406 case OT_LONG:
407 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
408 break;
409#endif
410 }
411}
412
413#ifndef VBOX
414static inline void gen_op_mov_reg_T0(int ot, int reg)
415#else /* VBOX */
416DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
417#endif /* VBOX */
418{
419 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
420}
421
422#ifndef VBOX
423static inline void gen_op_mov_reg_T1(int ot, int reg)
424#else /* VBOX */
425DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
426#endif /* VBOX */
427{
428 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
429}
430
431#ifndef VBOX
432static inline void gen_op_mov_reg_A0(int size, int reg)
433#else /* VBOX */
434DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
435#endif /* VBOX */
436{
437 switch(size) {
438 case 0:
439 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
440 break;
441#ifdef TARGET_X86_64
442 case 1:
443 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
444 /* high part of register set to zero */
445 tcg_gen_movi_tl(cpu_tmp0, 0);
446 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
447 break;
448 default:
449 case 2:
450 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
451 break;
452#else
453 default:
454 case 1:
455 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
456 break;
457#endif
458 }
459}
460
461#ifndef VBOX
462static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
463#else /* VBOX */
464DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
465#endif /* VBOX */
466{
467 switch(ot) {
468 case OT_BYTE:
469 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
470#ifndef VBOX
471 goto std_case;
472#else
473 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
474#endif
475 } else {
476 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
477 }
478 break;
479 default:
480 std_case:
481 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
482 break;
483 }
484}
485
486#ifndef VBOX
487static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
488#else /* VBOX */
489DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
490#endif /* VBOX */
491{
492 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
493}
494
495#ifndef VBOX
496static inline void gen_op_movl_A0_reg(int reg)
497#else /* VBOX */
498DECLINLINE(void) gen_op_movl_A0_reg(int reg)
499#endif /* VBOX */
500{
501 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
502}
503
504#ifndef VBOX
505static inline void gen_op_addl_A0_im(int32_t val)
506#else /* VBOX */
507DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
508#endif /* VBOX */
509{
510 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
511#ifdef TARGET_X86_64
512 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
513#endif
514}
515
516#ifdef TARGET_X86_64
517#ifndef VBOX
518static inline void gen_op_addq_A0_im(int64_t val)
519#else /* VBOX */
520DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
521#endif /* VBOX */
522{
523 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
524}
525#endif
526
527static void gen_add_A0_im(DisasContext *s, int val)
528{
529#ifdef TARGET_X86_64
530 if (CODE64(s))
531 gen_op_addq_A0_im(val);
532 else
533#endif
534 gen_op_addl_A0_im(val);
535}
536
537#ifndef VBOX
538static inline void gen_op_addl_T0_T1(void)
539#else /* VBOX */
540DECLINLINE(void) gen_op_addl_T0_T1(void)
541#endif /* VBOX */
542{
543 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
544}
545
546#ifndef VBOX
547static inline void gen_op_jmp_T0(void)
548#else /* VBOX */
549DECLINLINE(void) gen_op_jmp_T0(void)
550#endif /* VBOX */
551{
552 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
553}
554
555#ifndef VBOX
556static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
557#else /* VBOX */
558DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
559#endif /* VBOX */
560{
561 switch(size) {
562 case 0:
563 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
564 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
565 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
566 break;
567 case 1:
568 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
569 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
570#ifdef TARGET_X86_64
571 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
572#endif
573 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
574 break;
575#ifdef TARGET_X86_64
576 case 2:
577 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
578 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
579 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
580 break;
581#endif
582 }
583}
584
585#ifndef VBOX
586static inline void gen_op_add_reg_T0(int size, int reg)
587#else /* VBOX */
588DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
589#endif /* VBOX */
590{
591 switch(size) {
592 case 0:
593 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
594 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
595 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
596 break;
597 case 1:
598 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
599 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
600#ifdef TARGET_X86_64
601 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
602#endif
603 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
604 break;
605#ifdef TARGET_X86_64
606 case 2:
607 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
608 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
609 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
610 break;
611#endif
612 }
613}
614
615#ifndef VBOX
616static inline void gen_op_set_cc_op(int32_t val)
617#else /* VBOX */
618DECLINLINE(void) gen_op_set_cc_op(int32_t val)
619#endif /* VBOX */
620{
621 tcg_gen_movi_i32(cpu_cc_op, val);
622}
623
624#ifndef VBOX
625static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
626#else /* VBOX */
627DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
628#endif /* VBOX */
629{
630 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
631 if (shift != 0)
632 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
633 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
634#ifdef TARGET_X86_64
635 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
636#endif
637}
638#ifdef VBOX
639DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
640{
641 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
642#ifdef FORCE_SEGMENT_SYNC
643#if 1
644 TCGv t0;
645
646 /* Considering poor quality of TCG optimizer - better call directly */
647 t0 = tcg_temp_local_new(TCG_TYPE_TL);
648 tcg_gen_movi_tl(t0, reg);
649 tcg_gen_helper_0_1(helper_sync_seg, t0);
650 tcg_temp_free(t0);
651#else
652 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
653 int skip_label;
654 TCGv t0, a0;
655
656 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
657 for data/stack segments, as expects alive cpu_T[0] */
658 if (reg != R_GS)
659 return;
660
661 if (keepA0)
662 {
663 /* we need to store old cpu_A0 */
664 a0 = tcg_temp_local_new(TCG_TYPE_TL);
665 tcg_gen_mov_tl(a0, cpu_A0);
666 }
667
668 skip_label = gen_new_label();
669 t0 = tcg_temp_local_new(TCG_TYPE_TL);
670
671 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
672 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
673 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
674 tcg_gen_andi_tl(t0, t0, VM_MASK);
675 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
676 tcg_gen_movi_tl(t0, reg);
677
678 tcg_gen_helper_0_1(helper_sync_seg, t0);
679
680 tcg_temp_free(t0);
681
682 gen_set_label(skip_label);
683 if (keepA0)
684 {
685 tcg_gen_mov_tl(cpu_A0, a0);
686 tcg_temp_free(a0);
687 }
688#endif /* 0 */
689#endif /* FORCE_SEGMENT_SYNC */
690}
691#endif
692
693#ifndef VBOX
694static inline void gen_op_movl_A0_seg(int reg)
695#else /* VBOX */
696DECLINLINE(void) gen_op_movl_A0_seg(int reg)
697#endif /* VBOX */
698{
699#ifdef VBOX
700 gen_op_seg_check(reg, false);
701#endif
702 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
703}
704
705#ifndef VBOX
706static inline void gen_op_addl_A0_seg(int reg)
707#else /* VBOX */
708DECLINLINE(void) gen_op_addl_A0_seg(int reg)
709#endif /* VBOX */
710{
711#ifdef VBOX
712 gen_op_seg_check(reg, true);
713#endif
714 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
715 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
716#ifdef TARGET_X86_64
717 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
718#endif
719}
720
721#ifdef TARGET_X86_64
722#ifndef VBOX
723static inline void gen_op_movq_A0_seg(int reg)
724#else /* VBOX */
725DECLINLINE(void) gen_op_movq_A0_seg(int reg)
726#endif /* VBOX */
727{
728#ifdef VBOX
729 gen_op_seg_check(reg, false);
730#endif
731 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
732}
733
734#ifndef VBOX
735static inline void gen_op_addq_A0_seg(int reg)
736#else /* VBOX */
737DECLINLINE(void) gen_op_addq_A0_seg(int reg)
738#endif /* VBOX */
739{
740#ifdef VBOX
741 gen_op_seg_check(reg, true);
742#endif
743 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
744 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
745}
746
747#ifndef VBOX
748static inline void gen_op_movq_A0_reg(int reg)
749#else /* VBOX */
750DECLINLINE(void) gen_op_movq_A0_reg(int reg)
751#endif /* VBOX */
752{
753 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
754}
755
756#ifndef VBOX
757static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
758#else /* VBOX */
759DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
760#endif /* VBOX */
761{
762 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
763 if (shift != 0)
764 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
765 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
766}
767#endif
768
769#ifndef VBOX
770static inline void gen_op_lds_T0_A0(int idx)
771#else /* VBOX */
772DECLINLINE(void) gen_op_lds_T0_A0(int idx)
773#endif /* VBOX */
774{
775 int mem_index = (idx >> 2) - 1;
776 switch(idx & 3) {
777 case 0:
778 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
779 break;
780 case 1:
781 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
782 break;
783 default:
784 case 2:
785 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
786 break;
787 }
788}
789
790#ifndef VBOX
791static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
792#else /* VBOX */
793DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
794#endif /* VBOX */
795{
796 int mem_index = (idx >> 2) - 1;
797 switch(idx & 3) {
798 case 0:
799 tcg_gen_qemu_ld8u(t0, a0, mem_index);
800 break;
801 case 1:
802 tcg_gen_qemu_ld16u(t0, a0, mem_index);
803 break;
804 case 2:
805 tcg_gen_qemu_ld32u(t0, a0, mem_index);
806 break;
807 default:
808 case 3:
809 tcg_gen_qemu_ld64(t0, a0, mem_index);
810 break;
811 }
812}
813
814/* XXX: always use ldu or lds */
815#ifndef VBOX
816static inline void gen_op_ld_T0_A0(int idx)
817#else /* VBOX */
818DECLINLINE(void) gen_op_ld_T0_A0(int idx)
819#endif /* VBOX */
820{
821 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
822}
823
824#ifndef VBOX
825static inline void gen_op_ldu_T0_A0(int idx)
826#else /* VBOX */
827DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
828#endif /* VBOX */
829{
830 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
831}
832
833#ifndef VBOX
834static inline void gen_op_ld_T1_A0(int idx)
835#else /* VBOX */
836DECLINLINE(void) gen_op_ld_T1_A0(int idx)
837#endif /* VBOX */
838{
839 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
840}
841
842#ifndef VBOX
843static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
844#else /* VBOX */
845DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
846#endif /* VBOX */
847{
848 int mem_index = (idx >> 2) - 1;
849 switch(idx & 3) {
850 case 0:
851 tcg_gen_qemu_st8(t0, a0, mem_index);
852 break;
853 case 1:
854 tcg_gen_qemu_st16(t0, a0, mem_index);
855 break;
856 case 2:
857 tcg_gen_qemu_st32(t0, a0, mem_index);
858 break;
859 default:
860 case 3:
861 tcg_gen_qemu_st64(t0, a0, mem_index);
862 break;
863 }
864}
865
866#ifndef VBOX
867static inline void gen_op_st_T0_A0(int idx)
868#else /* VBOX */
869DECLINLINE(void) gen_op_st_T0_A0(int idx)
870#endif /* VBOX */
871{
872 gen_op_st_v(idx, cpu_T[0], cpu_A0);
873}
874
875#ifndef VBOX
876static inline void gen_op_st_T1_A0(int idx)
877#else /* VBOX */
878DECLINLINE(void) gen_op_st_T1_A0(int idx)
879#endif /* VBOX */
880{
881 gen_op_st_v(idx, cpu_T[1], cpu_A0);
882}
883
884#ifdef VBOX
885static void gen_check_external_event()
886{
887#if 1
888 /** @todo: once TCG codegen improves, we may want to use version
889 from else version */
890 tcg_gen_helper_0_0(helper_check_external_event);
891#else
892 int skip_label;
893 TCGv t0;
894
895 skip_label = gen_new_label();
896 t0 = tcg_temp_local_new(TCG_TYPE_TL);
897 /* t0 = cpu_tmp0; */
898
899 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
900 /* Keep in sync with helper_check_external_event() */
901 tcg_gen_andi_tl(t0, t0,
902 CPU_INTERRUPT_EXTERNAL_EXIT
903 | CPU_INTERRUPT_EXTERNAL_TIMER
904 | CPU_INTERRUPT_EXTERNAL_DMA
905 | CPU_INTERRUPT_EXTERNAL_HARD);
906 /** @todo: predict branch as taken */
907 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
908 tcg_temp_free(t0);
909
910 tcg_gen_helper_0_0(helper_check_external_event);
911
912 gen_set_label(skip_label);
913#endif
914}
915
916static void gen_check_external_event2()
917{
918 tcg_gen_helper_0_0(helper_check_external_event);
919}
920
921#endif
922
923#ifndef VBOX
924static inline void gen_jmp_im(target_ulong pc)
925#else /* VBOX */
926DECLINLINE(void) gen_jmp_im(target_ulong pc)
927#endif /* VBOX */
928{
929 tcg_gen_movi_tl(cpu_tmp0, pc);
930 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
931}
932
933#ifdef VBOX
934DECLINLINE(void) gen_update_eip(target_ulong pc)
935{
936 gen_jmp_im(pc);
937#ifdef VBOX_DUMP_STATE
938 tcg_gen_helper_0_0(helper_dump_state);
939#endif
940}
941
942#endif
943
944#ifndef VBOX
945static inline void gen_string_movl_A0_ESI(DisasContext *s)
946#else /* VBOX */
947DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
948#endif /* VBOX */
949{
950 int override;
951
952 override = s->override;
953#ifdef TARGET_X86_64
954 if (s->aflag == 2) {
955 if (override >= 0) {
956 gen_op_movq_A0_seg(override);
957 gen_op_addq_A0_reg_sN(0, R_ESI);
958 } else {
959 gen_op_movq_A0_reg(R_ESI);
960 }
961 } else
962#endif
963 if (s->aflag) {
964 /* 32 bit address */
965 if (s->addseg && override < 0)
966 override = R_DS;
967 if (override >= 0) {
968 gen_op_movl_A0_seg(override);
969 gen_op_addl_A0_reg_sN(0, R_ESI);
970 } else {
971 gen_op_movl_A0_reg(R_ESI);
972 }
973 } else {
974 /* 16 address, always override */
975 if (override < 0)
976 override = R_DS;
977 gen_op_movl_A0_reg(R_ESI);
978 gen_op_andl_A0_ffff();
979 gen_op_addl_A0_seg(override);
980 }
981}
982
983#ifndef VBOX
984static inline void gen_string_movl_A0_EDI(DisasContext *s)
985#else /* VBOX */
986DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
987#endif /* VBOX */
988{
989#ifdef TARGET_X86_64
990 if (s->aflag == 2) {
991 gen_op_movq_A0_reg(R_EDI);
992 } else
993#endif
994 if (s->aflag) {
995 if (s->addseg) {
996 gen_op_movl_A0_seg(R_ES);
997 gen_op_addl_A0_reg_sN(0, R_EDI);
998 } else {
999 gen_op_movl_A0_reg(R_EDI);
1000 }
1001 } else {
1002 gen_op_movl_A0_reg(R_EDI);
1003 gen_op_andl_A0_ffff();
1004 gen_op_addl_A0_seg(R_ES);
1005 }
1006}
1007
1008#ifndef VBOX
1009static inline void gen_op_movl_T0_Dshift(int ot)
1010#else /* VBOX */
1011DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
1012#endif /* VBOX */
1013{
1014 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
1015 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
1016};
1017
1018static void gen_extu(int ot, TCGv reg)
1019{
1020 switch(ot) {
1021 case OT_BYTE:
1022 tcg_gen_ext8u_tl(reg, reg);
1023 break;
1024 case OT_WORD:
1025 tcg_gen_ext16u_tl(reg, reg);
1026 break;
1027 case OT_LONG:
1028 tcg_gen_ext32u_tl(reg, reg);
1029 break;
1030 default:
1031 break;
1032 }
1033}
1034
1035static void gen_exts(int ot, TCGv reg)
1036{
1037 switch(ot) {
1038 case OT_BYTE:
1039 tcg_gen_ext8s_tl(reg, reg);
1040 break;
1041 case OT_WORD:
1042 tcg_gen_ext16s_tl(reg, reg);
1043 break;
1044 case OT_LONG:
1045 tcg_gen_ext32s_tl(reg, reg);
1046 break;
1047 default:
1048 break;
1049 }
1050}
1051
1052#ifndef VBOX
1053static inline void gen_op_jnz_ecx(int size, int label1)
1054#else /* VBOX */
1055DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1056#endif /* VBOX */
1057{
1058 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1059 gen_extu(size + 1, cpu_tmp0);
1060 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1061}
1062
1063#ifndef VBOX
1064static inline void gen_op_jz_ecx(int size, int label1)
1065#else /* VBOX */
1066DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1067#endif /* VBOX */
1068{
1069 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1070 gen_extu(size + 1, cpu_tmp0);
1071 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1072}
1073
1074static void *helper_in_func[3] = {
1075 helper_inb,
1076 helper_inw,
1077 helper_inl,
1078};
1079
1080static void *helper_out_func[3] = {
1081 helper_outb,
1082 helper_outw,
1083 helper_outl,
1084};
1085
1086static void *gen_check_io_func[3] = {
1087 helper_check_iob,
1088 helper_check_iow,
1089 helper_check_iol,
1090};
1091
1092static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1093 uint32_t svm_flags)
1094{
1095 int state_saved;
1096 target_ulong next_eip;
1097
1098 state_saved = 0;
1099 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1100 if (s->cc_op != CC_OP_DYNAMIC)
1101 gen_op_set_cc_op(s->cc_op);
1102 gen_jmp_im(cur_eip);
1103 state_saved = 1;
1104 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1105 tcg_gen_helper_0_1(gen_check_io_func[ot],
1106 cpu_tmp2_i32);
1107 }
1108 if(s->flags & HF_SVMI_MASK) {
1109 if (!state_saved) {
1110 if (s->cc_op != CC_OP_DYNAMIC)
1111 gen_op_set_cc_op(s->cc_op);
1112 gen_jmp_im(cur_eip);
1113 state_saved = 1;
1114 }
1115 svm_flags |= (1 << (4 + ot));
1116 next_eip = s->pc - s->cs_base;
1117 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1118 tcg_gen_helper_0_3(helper_svm_check_io,
1119 cpu_tmp2_i32,
1120 tcg_const_i32(svm_flags),
1121 tcg_const_i32(next_eip - cur_eip));
1122 }
1123}
1124
1125#ifndef VBOX
1126static inline void gen_movs(DisasContext *s, int ot)
1127#else /* VBOX */
1128DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1129#endif /* VBOX */
1130{
1131 gen_string_movl_A0_ESI(s);
1132 gen_op_ld_T0_A0(ot + s->mem_index);
1133 gen_string_movl_A0_EDI(s);
1134 gen_op_st_T0_A0(ot + s->mem_index);
1135 gen_op_movl_T0_Dshift(ot);
1136 gen_op_add_reg_T0(s->aflag, R_ESI);
1137 gen_op_add_reg_T0(s->aflag, R_EDI);
1138}
1139
1140#ifndef VBOX
1141static inline void gen_update_cc_op(DisasContext *s)
1142#else /* VBOX */
1143DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1144#endif /* VBOX */
1145{
1146 if (s->cc_op != CC_OP_DYNAMIC) {
1147 gen_op_set_cc_op(s->cc_op);
1148 s->cc_op = CC_OP_DYNAMIC;
1149 }
1150}
1151
1152static void gen_op_update1_cc(void)
1153{
1154 tcg_gen_discard_tl(cpu_cc_src);
1155 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1156}
1157
1158static void gen_op_update2_cc(void)
1159{
1160 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1161 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1162}
1163
1164#ifndef VBOX
1165static inline void gen_op_cmpl_T0_T1_cc(void)
1166#else /* VBOX */
1167DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1168#endif /* VBOX */
1169{
1170 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1171 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1172}
1173
1174#ifndef VBOX
1175static inline void gen_op_testl_T0_T1_cc(void)
1176#else /* VBOX */
1177DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1178#endif /* VBOX */
1179{
1180 tcg_gen_discard_tl(cpu_cc_src);
1181 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1182}
1183
1184static void gen_op_update_neg_cc(void)
1185{
1186 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1187 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1188}
1189
1190/* compute eflags.C to reg */
1191static void gen_compute_eflags_c(TCGv reg)
1192{
1193#if TCG_TARGET_REG_BITS == 32
1194 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1195 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1196 (long)cc_table + offsetof(CCTable, compute_c));
1197 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1198 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1199 1, &cpu_tmp2_i32, 0, NULL);
1200#else
1201 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1202 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1203 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1204 (long)cc_table + offsetof(CCTable, compute_c));
1205 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1206 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1207 1, &cpu_tmp2_i32, 0, NULL);
1208#endif
1209 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1210}
1211
1212/* compute all eflags to cc_src */
1213static void gen_compute_eflags(TCGv reg)
1214{
1215#if TCG_TARGET_REG_BITS == 32
1216 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1217 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1218 (long)cc_table + offsetof(CCTable, compute_all));
1219 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1220 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1221 1, &cpu_tmp2_i32, 0, NULL);
1222#else
1223 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1224 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1225 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1226 (long)cc_table + offsetof(CCTable, compute_all));
1227 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1228 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1229 1, &cpu_tmp2_i32, 0, NULL);
1230#endif
1231 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1232}
1233
1234#ifndef VBOX
1235static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1236#else /* VBOX */
1237DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1238#endif /* VBOX */
1239{
1240 if (s->cc_op != CC_OP_DYNAMIC)
1241 gen_op_set_cc_op(s->cc_op);
1242 switch(jcc_op) {
1243 case JCC_O:
1244 gen_compute_eflags(cpu_T[0]);
1245 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1246 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1247 break;
1248 case JCC_B:
1249 gen_compute_eflags_c(cpu_T[0]);
1250 break;
1251 case JCC_Z:
1252 gen_compute_eflags(cpu_T[0]);
1253 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1254 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1255 break;
1256 case JCC_BE:
1257 gen_compute_eflags(cpu_tmp0);
1258 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1259 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1260 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1261 break;
1262 case JCC_S:
1263 gen_compute_eflags(cpu_T[0]);
1264 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1265 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1266 break;
1267 case JCC_P:
1268 gen_compute_eflags(cpu_T[0]);
1269 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1270 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1271 break;
1272 case JCC_L:
1273 gen_compute_eflags(cpu_tmp0);
1274 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1275 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1276 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1277 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1278 break;
1279 default:
1280 case JCC_LE:
1281 gen_compute_eflags(cpu_tmp0);
1282 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1283 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1284 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1285 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1286 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1287 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1288 break;
1289 }
1290}
1291
1292/* return true if setcc_slow is not needed (WARNING: must be kept in
1293 sync with gen_jcc1) */
1294static int is_fast_jcc_case(DisasContext *s, int b)
1295{
1296 int jcc_op;
1297 jcc_op = (b >> 1) & 7;
1298 switch(s->cc_op) {
1299 /* we optimize the cmp/jcc case */
1300 case CC_OP_SUBB:
1301 case CC_OP_SUBW:
1302 case CC_OP_SUBL:
1303 case CC_OP_SUBQ:
1304 if (jcc_op == JCC_O || jcc_op == JCC_P)
1305 goto slow_jcc;
1306 break;
1307
1308 /* some jumps are easy to compute */
1309 case CC_OP_ADDB:
1310 case CC_OP_ADDW:
1311 case CC_OP_ADDL:
1312 case CC_OP_ADDQ:
1313
1314 case CC_OP_LOGICB:
1315 case CC_OP_LOGICW:
1316 case CC_OP_LOGICL:
1317 case CC_OP_LOGICQ:
1318
1319 case CC_OP_INCB:
1320 case CC_OP_INCW:
1321 case CC_OP_INCL:
1322 case CC_OP_INCQ:
1323
1324 case CC_OP_DECB:
1325 case CC_OP_DECW:
1326 case CC_OP_DECL:
1327 case CC_OP_DECQ:
1328
1329 case CC_OP_SHLB:
1330 case CC_OP_SHLW:
1331 case CC_OP_SHLL:
1332 case CC_OP_SHLQ:
1333 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1334 goto slow_jcc;
1335 break;
1336 default:
1337 slow_jcc:
1338 return 0;
1339 }
1340 return 1;
1341}
1342
1343/* generate a conditional jump to label 'l1' according to jump opcode
1344 value 'b'. In the fast case, T0 is guaranted not to be used. */
1345#ifndef VBOX
1346static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1347#else /* VBOX */
1348DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1349#endif /* VBOX */
1350{
1351 int inv, jcc_op, size, cond;
1352 TCGv t0;
1353
1354 inv = b & 1;
1355 jcc_op = (b >> 1) & 7;
1356
1357 switch(cc_op) {
1358 /* we optimize the cmp/jcc case */
1359 case CC_OP_SUBB:
1360 case CC_OP_SUBW:
1361 case CC_OP_SUBL:
1362 case CC_OP_SUBQ:
1363
1364 size = cc_op - CC_OP_SUBB;
1365 switch(jcc_op) {
1366 case JCC_Z:
1367 fast_jcc_z:
1368 switch(size) {
1369 case 0:
1370 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1371 t0 = cpu_tmp0;
1372 break;
1373 case 1:
1374 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1375 t0 = cpu_tmp0;
1376 break;
1377#ifdef TARGET_X86_64
1378 case 2:
1379 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1380 t0 = cpu_tmp0;
1381 break;
1382#endif
1383 default:
1384 t0 = cpu_cc_dst;
1385 break;
1386 }
1387 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1388 break;
1389 case JCC_S:
1390 fast_jcc_s:
1391 switch(size) {
1392 case 0:
1393 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1394 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1395 0, l1);
1396 break;
1397 case 1:
1398 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1399 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1400 0, l1);
1401 break;
1402#ifdef TARGET_X86_64
1403 case 2:
1404 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1405 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1406 0, l1);
1407 break;
1408#endif
1409 default:
1410 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1411 0, l1);
1412 break;
1413 }
1414 break;
1415
1416 case JCC_B:
1417 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1418 goto fast_jcc_b;
1419 case JCC_BE:
1420 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1421 fast_jcc_b:
1422 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1423 switch(size) {
1424 case 0:
1425 t0 = cpu_tmp0;
1426 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1427 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1428 break;
1429 case 1:
1430 t0 = cpu_tmp0;
1431 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1432 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1433 break;
1434#ifdef TARGET_X86_64
1435 case 2:
1436 t0 = cpu_tmp0;
1437 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1438 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1439 break;
1440#endif
1441 default:
1442 t0 = cpu_cc_src;
1443 break;
1444 }
1445 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1446 break;
1447
1448 case JCC_L:
1449 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1450 goto fast_jcc_l;
1451 case JCC_LE:
1452 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1453 fast_jcc_l:
1454 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1455 switch(size) {
1456 case 0:
1457 t0 = cpu_tmp0;
1458 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1459 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1460 break;
1461 case 1:
1462 t0 = cpu_tmp0;
1463 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1464 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1465 break;
1466#ifdef TARGET_X86_64
1467 case 2:
1468 t0 = cpu_tmp0;
1469 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1470 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1471 break;
1472#endif
1473 default:
1474 t0 = cpu_cc_src;
1475 break;
1476 }
1477 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1478 break;
1479
1480 default:
1481 goto slow_jcc;
1482 }
1483 break;
1484
1485 /* some jumps are easy to compute */
1486 case CC_OP_ADDB:
1487 case CC_OP_ADDW:
1488 case CC_OP_ADDL:
1489 case CC_OP_ADDQ:
1490
1491 case CC_OP_ADCB:
1492 case CC_OP_ADCW:
1493 case CC_OP_ADCL:
1494 case CC_OP_ADCQ:
1495
1496 case CC_OP_SBBB:
1497 case CC_OP_SBBW:
1498 case CC_OP_SBBL:
1499 case CC_OP_SBBQ:
1500
1501 case CC_OP_LOGICB:
1502 case CC_OP_LOGICW:
1503 case CC_OP_LOGICL:
1504 case CC_OP_LOGICQ:
1505
1506 case CC_OP_INCB:
1507 case CC_OP_INCW:
1508 case CC_OP_INCL:
1509 case CC_OP_INCQ:
1510
1511 case CC_OP_DECB:
1512 case CC_OP_DECW:
1513 case CC_OP_DECL:
1514 case CC_OP_DECQ:
1515
1516 case CC_OP_SHLB:
1517 case CC_OP_SHLW:
1518 case CC_OP_SHLL:
1519 case CC_OP_SHLQ:
1520
1521 case CC_OP_SARB:
1522 case CC_OP_SARW:
1523 case CC_OP_SARL:
1524 case CC_OP_SARQ:
1525 switch(jcc_op) {
1526 case JCC_Z:
1527 size = (cc_op - CC_OP_ADDB) & 3;
1528 goto fast_jcc_z;
1529 case JCC_S:
1530 size = (cc_op - CC_OP_ADDB) & 3;
1531 goto fast_jcc_s;
1532 default:
1533 goto slow_jcc;
1534 }
1535 break;
1536 default:
1537 slow_jcc:
1538 gen_setcc_slow_T0(s, jcc_op);
1539 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1540 cpu_T[0], 0, l1);
1541 break;
1542 }
1543}
1544
1545/* XXX: does not work with gdbstub "ice" single step - not a
1546 serious problem */
1547static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1548{
1549 int l1, l2;
1550
1551 l1 = gen_new_label();
1552 l2 = gen_new_label();
1553 gen_op_jnz_ecx(s->aflag, l1);
1554 gen_set_label(l2);
1555 gen_jmp_tb(s, next_eip, 1);
1556 gen_set_label(l1);
1557 return l2;
1558}
1559
1560#ifndef VBOX
1561static inline void gen_stos(DisasContext *s, int ot)
1562#else /* VBOX */
1563DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1564#endif /* VBOX */
1565{
1566 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1567 gen_string_movl_A0_EDI(s);
1568 gen_op_st_T0_A0(ot + s->mem_index);
1569 gen_op_movl_T0_Dshift(ot);
1570 gen_op_add_reg_T0(s->aflag, R_EDI);
1571}
1572
1573#ifndef VBOX
1574static inline void gen_lods(DisasContext *s, int ot)
1575#else /* VBOX */
1576DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1577#endif /* VBOX */
1578{
1579 gen_string_movl_A0_ESI(s);
1580 gen_op_ld_T0_A0(ot + s->mem_index);
1581 gen_op_mov_reg_T0(ot, R_EAX);
1582 gen_op_movl_T0_Dshift(ot);
1583 gen_op_add_reg_T0(s->aflag, R_ESI);
1584}
1585
1586#ifndef VBOX
1587static inline void gen_scas(DisasContext *s, int ot)
1588#else /* VBOX */
1589DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1590#endif /* VBOX */
1591{
1592 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1593 gen_string_movl_A0_EDI(s);
1594 gen_op_ld_T1_A0(ot + s->mem_index);
1595 gen_op_cmpl_T0_T1_cc();
1596 gen_op_movl_T0_Dshift(ot);
1597 gen_op_add_reg_T0(s->aflag, R_EDI);
1598}
1599
1600#ifndef VBOX
1601static inline void gen_cmps(DisasContext *s, int ot)
1602#else /* VBOX */
1603DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1604#endif /* VBOX */
1605{
1606 gen_string_movl_A0_ESI(s);
1607 gen_op_ld_T0_A0(ot + s->mem_index);
1608 gen_string_movl_A0_EDI(s);
1609 gen_op_ld_T1_A0(ot + s->mem_index);
1610 gen_op_cmpl_T0_T1_cc();
1611 gen_op_movl_T0_Dshift(ot);
1612 gen_op_add_reg_T0(s->aflag, R_ESI);
1613 gen_op_add_reg_T0(s->aflag, R_EDI);
1614}
1615
1616#ifndef VBOX
1617static inline void gen_ins(DisasContext *s, int ot)
1618#else /* VBOX */
1619DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1620#endif /* VBOX */
1621{
1622 if (use_icount)
1623 gen_io_start();
1624 gen_string_movl_A0_EDI(s);
1625 /* Note: we must do this dummy write first to be restartable in
1626 case of page fault. */
1627 gen_op_movl_T0_0();
1628 gen_op_st_T0_A0(ot + s->mem_index);
1629 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1630 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1631 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1632 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1633 gen_op_st_T0_A0(ot + s->mem_index);
1634 gen_op_movl_T0_Dshift(ot);
1635 gen_op_add_reg_T0(s->aflag, R_EDI);
1636 if (use_icount)
1637 gen_io_end();
1638}
1639
1640#ifndef VBOX
1641static inline void gen_outs(DisasContext *s, int ot)
1642#else /* VBOX */
1643DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1644#endif /* VBOX */
1645{
1646 if (use_icount)
1647 gen_io_start();
1648 gen_string_movl_A0_ESI(s);
1649 gen_op_ld_T0_A0(ot + s->mem_index);
1650
1651 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1652 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1653 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1654 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1655 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1656
1657 gen_op_movl_T0_Dshift(ot);
1658 gen_op_add_reg_T0(s->aflag, R_ESI);
1659 if (use_icount)
1660 gen_io_end();
1661}
1662
1663/* same method as Valgrind : we generate jumps to current or next
1664 instruction */
1665#ifndef VBOX
1666#define GEN_REPZ(op) \
1667static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1668 target_ulong cur_eip, target_ulong next_eip) \
1669{ \
1670 int l2; \
1671 gen_update_cc_op(s); \
1672 l2 = gen_jz_ecx_string(s, next_eip); \
1673 gen_ ## op(s, ot); \
1674 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1675 /* a loop would cause two single step exceptions if ECX = 1 \
1676 before rep string_insn */ \
1677 if (!s->jmp_opt) \
1678 gen_op_jz_ecx(s->aflag, l2); \
1679 gen_jmp(s, cur_eip); \
1680}
1681#else /* VBOX */
1682#define GEN_REPZ(op) \
1683DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1684 target_ulong cur_eip, target_ulong next_eip) \
1685{ \
1686 int l2; \
1687 gen_update_cc_op(s); \
1688 l2 = gen_jz_ecx_string(s, next_eip); \
1689 gen_ ## op(s, ot); \
1690 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1691 /* a loop would cause two single step exceptions if ECX = 1 \
1692 before rep string_insn */ \
1693 if (!s->jmp_opt) \
1694 gen_op_jz_ecx(s->aflag, l2); \
1695 gen_jmp(s, cur_eip); \
1696}
1697#endif /* VBOX */
1698
1699#ifndef VBOX
1700#define GEN_REPZ2(op) \
1701static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1702 target_ulong cur_eip, \
1703 target_ulong next_eip, \
1704 int nz) \
1705{ \
1706 int l2; \
1707 gen_update_cc_op(s); \
1708 l2 = gen_jz_ecx_string(s, next_eip); \
1709 gen_ ## op(s, ot); \
1710 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1711 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1712 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1713 if (!s->jmp_opt) \
1714 gen_op_jz_ecx(s->aflag, l2); \
1715 gen_jmp(s, cur_eip); \
1716}
1717#else /* VBOX */
1718#define GEN_REPZ2(op) \
1719DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1720 target_ulong cur_eip, \
1721 target_ulong next_eip, \
1722 int nz) \
1723{ \
1724 int l2;\
1725 gen_update_cc_op(s); \
1726 l2 = gen_jz_ecx_string(s, next_eip); \
1727 gen_ ## op(s, ot); \
1728 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1729 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1730 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1731 if (!s->jmp_opt) \
1732 gen_op_jz_ecx(s->aflag, l2); \
1733 gen_jmp(s, cur_eip); \
1734}
1735#endif /* VBOX */
1736
1737GEN_REPZ(movs)
1738GEN_REPZ(stos)
1739GEN_REPZ(lods)
1740GEN_REPZ(ins)
1741GEN_REPZ(outs)
1742GEN_REPZ2(scas)
1743GEN_REPZ2(cmps)
1744
1745static void *helper_fp_arith_ST0_FT0[8] = {
1746 helper_fadd_ST0_FT0,
1747 helper_fmul_ST0_FT0,
1748 helper_fcom_ST0_FT0,
1749 helper_fcom_ST0_FT0,
1750 helper_fsub_ST0_FT0,
1751 helper_fsubr_ST0_FT0,
1752 helper_fdiv_ST0_FT0,
1753 helper_fdivr_ST0_FT0,
1754};
1755
1756/* NOTE the exception in "r" op ordering */
1757static void *helper_fp_arith_STN_ST0[8] = {
1758 helper_fadd_STN_ST0,
1759 helper_fmul_STN_ST0,
1760 NULL,
1761 NULL,
1762 helper_fsubr_STN_ST0,
1763 helper_fsub_STN_ST0,
1764 helper_fdivr_STN_ST0,
1765 helper_fdiv_STN_ST0,
1766};
1767
1768/* if d == OR_TMP0, it means memory operand (address in A0) */
1769static void gen_op(DisasContext *s1, int op, int ot, int d)
1770{
1771 if (d != OR_TMP0) {
1772 gen_op_mov_TN_reg(ot, 0, d);
1773 } else {
1774 gen_op_ld_T0_A0(ot + s1->mem_index);
1775 }
1776 switch(op) {
1777 case OP_ADCL:
1778 if (s1->cc_op != CC_OP_DYNAMIC)
1779 gen_op_set_cc_op(s1->cc_op);
1780 gen_compute_eflags_c(cpu_tmp4);
1781 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1782 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1783 if (d != OR_TMP0)
1784 gen_op_mov_reg_T0(ot, d);
1785 else
1786 gen_op_st_T0_A0(ot + s1->mem_index);
1787 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1788 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1789 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1790 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1791 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1792 s1->cc_op = CC_OP_DYNAMIC;
1793 break;
1794 case OP_SBBL:
1795 if (s1->cc_op != CC_OP_DYNAMIC)
1796 gen_op_set_cc_op(s1->cc_op);
1797 gen_compute_eflags_c(cpu_tmp4);
1798 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1799 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1800 if (d != OR_TMP0)
1801 gen_op_mov_reg_T0(ot, d);
1802 else
1803 gen_op_st_T0_A0(ot + s1->mem_index);
1804 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1805 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1806 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1807 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1808 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1809 s1->cc_op = CC_OP_DYNAMIC;
1810 break;
1811 case OP_ADDL:
1812 gen_op_addl_T0_T1();
1813 if (d != OR_TMP0)
1814 gen_op_mov_reg_T0(ot, d);
1815 else
1816 gen_op_st_T0_A0(ot + s1->mem_index);
1817 gen_op_update2_cc();
1818 s1->cc_op = CC_OP_ADDB + ot;
1819 break;
1820 case OP_SUBL:
1821 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1822 if (d != OR_TMP0)
1823 gen_op_mov_reg_T0(ot, d);
1824 else
1825 gen_op_st_T0_A0(ot + s1->mem_index);
1826 gen_op_update2_cc();
1827 s1->cc_op = CC_OP_SUBB + ot;
1828 break;
1829 default:
1830 case OP_ANDL:
1831 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1832 if (d != OR_TMP0)
1833 gen_op_mov_reg_T0(ot, d);
1834 else
1835 gen_op_st_T0_A0(ot + s1->mem_index);
1836 gen_op_update1_cc();
1837 s1->cc_op = CC_OP_LOGICB + ot;
1838 break;
1839 case OP_ORL:
1840 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1841 if (d != OR_TMP0)
1842 gen_op_mov_reg_T0(ot, d);
1843 else
1844 gen_op_st_T0_A0(ot + s1->mem_index);
1845 gen_op_update1_cc();
1846 s1->cc_op = CC_OP_LOGICB + ot;
1847 break;
1848 case OP_XORL:
1849 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1850 if (d != OR_TMP0)
1851 gen_op_mov_reg_T0(ot, d);
1852 else
1853 gen_op_st_T0_A0(ot + s1->mem_index);
1854 gen_op_update1_cc();
1855 s1->cc_op = CC_OP_LOGICB + ot;
1856 break;
1857 case OP_CMPL:
1858 gen_op_cmpl_T0_T1_cc();
1859 s1->cc_op = CC_OP_SUBB + ot;
1860 break;
1861 }
1862}
1863
1864/* if d == OR_TMP0, it means memory operand (address in A0) */
1865static void gen_inc(DisasContext *s1, int ot, int d, int c)
1866{
1867 if (d != OR_TMP0)
1868 gen_op_mov_TN_reg(ot, 0, d);
1869 else
1870 gen_op_ld_T0_A0(ot + s1->mem_index);
1871 if (s1->cc_op != CC_OP_DYNAMIC)
1872 gen_op_set_cc_op(s1->cc_op);
1873 if (c > 0) {
1874 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1875 s1->cc_op = CC_OP_INCB + ot;
1876 } else {
1877 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1878 s1->cc_op = CC_OP_DECB + ot;
1879 }
1880 if (d != OR_TMP0)
1881 gen_op_mov_reg_T0(ot, d);
1882 else
1883 gen_op_st_T0_A0(ot + s1->mem_index);
1884 gen_compute_eflags_c(cpu_cc_src);
1885 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1886}
1887
1888static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1889 int is_right, int is_arith)
1890{
1891 target_ulong mask;
1892 int shift_label;
1893 TCGv t0, t1;
1894
1895 if (ot == OT_QUAD)
1896 mask = 0x3f;
1897 else
1898 mask = 0x1f;
1899
1900 /* load */
1901 if (op1 == OR_TMP0)
1902 gen_op_ld_T0_A0(ot + s->mem_index);
1903 else
1904 gen_op_mov_TN_reg(ot, 0, op1);
1905
1906 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1907
1908 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1909
1910 if (is_right) {
1911 if (is_arith) {
1912 gen_exts(ot, cpu_T[0]);
1913 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1914 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1915 } else {
1916 gen_extu(ot, cpu_T[0]);
1917 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1918 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1919 }
1920 } else {
1921 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1922 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1923 }
1924
1925 /* store */
1926 if (op1 == OR_TMP0)
1927 gen_op_st_T0_A0(ot + s->mem_index);
1928 else
1929 gen_op_mov_reg_T0(ot, op1);
1930
1931 /* update eflags if non zero shift */
1932 if (s->cc_op != CC_OP_DYNAMIC)
1933 gen_op_set_cc_op(s->cc_op);
1934
1935 /* XXX: inefficient */
1936 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1937 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1938
1939 tcg_gen_mov_tl(t0, cpu_T[0]);
1940 tcg_gen_mov_tl(t1, cpu_T3);
1941
1942 shift_label = gen_new_label();
1943 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1944
1945 tcg_gen_mov_tl(cpu_cc_src, t1);
1946 tcg_gen_mov_tl(cpu_cc_dst, t0);
1947 if (is_right)
1948 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1949 else
1950 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1951
1952 gen_set_label(shift_label);
1953 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1954
1955 tcg_temp_free(t0);
1956 tcg_temp_free(t1);
1957}
1958
1959static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1960 int is_right, int is_arith)
1961{
1962 int mask;
1963
1964 if (ot == OT_QUAD)
1965 mask = 0x3f;
1966 else
1967 mask = 0x1f;
1968
1969 /* load */
1970 if (op1 == OR_TMP0)
1971 gen_op_ld_T0_A0(ot + s->mem_index);
1972 else
1973 gen_op_mov_TN_reg(ot, 0, op1);
1974
1975 op2 &= mask;
1976 if (op2 != 0) {
1977 if (is_right) {
1978 if (is_arith) {
1979 gen_exts(ot, cpu_T[0]);
1980 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1981 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1982 } else {
1983 gen_extu(ot, cpu_T[0]);
1984 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1985 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1986 }
1987 } else {
1988 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1989 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1990 }
1991 }
1992
1993 /* store */
1994 if (op1 == OR_TMP0)
1995 gen_op_st_T0_A0(ot + s->mem_index);
1996 else
1997 gen_op_mov_reg_T0(ot, op1);
1998
1999 /* update eflags if non zero shift */
2000 if (op2 != 0) {
2001 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
2002 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
2003 if (is_right)
2004 s->cc_op = CC_OP_SARB + ot;
2005 else
2006 s->cc_op = CC_OP_SHLB + ot;
2007 }
2008}
2009
2010#ifndef VBOX
2011static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2012#else /* VBOX */
2013DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2014#endif /* VBOX */
2015{
2016 if (arg2 >= 0)
2017 tcg_gen_shli_tl(ret, arg1, arg2);
2018 else
2019 tcg_gen_shri_tl(ret, arg1, -arg2);
2020}
2021
2022/* XXX: add faster immediate case */
2023static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2024 int is_right)
2025{
2026 target_ulong mask;
2027 int label1, label2, data_bits;
2028 TCGv t0, t1, t2, a0;
2029
2030 /* XXX: inefficient, but we must use local temps */
2031 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2032 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2033 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2034 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2035
2036 if (ot == OT_QUAD)
2037 mask = 0x3f;
2038 else
2039 mask = 0x1f;
2040
2041 /* load */
2042 if (op1 == OR_TMP0) {
2043 tcg_gen_mov_tl(a0, cpu_A0);
2044 gen_op_ld_v(ot + s->mem_index, t0, a0);
2045 } else {
2046 gen_op_mov_v_reg(ot, t0, op1);
2047 }
2048
2049 tcg_gen_mov_tl(t1, cpu_T[1]);
2050
2051 tcg_gen_andi_tl(t1, t1, mask);
2052
2053 /* Must test zero case to avoid using undefined behaviour in TCG
2054 shifts. */
2055 label1 = gen_new_label();
2056 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2057
2058 if (ot <= OT_WORD)
2059 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2060 else
2061 tcg_gen_mov_tl(cpu_tmp0, t1);
2062
2063 gen_extu(ot, t0);
2064 tcg_gen_mov_tl(t2, t0);
2065
2066 data_bits = 8 << ot;
2067 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2068 fix TCG definition) */
2069 if (is_right) {
2070 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2071 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2072 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2073 } else {
2074 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2075 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2076 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2077 }
2078 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2079
2080 gen_set_label(label1);
2081 /* store */
2082 if (op1 == OR_TMP0) {
2083 gen_op_st_v(ot + s->mem_index, t0, a0);
2084 } else {
2085 gen_op_mov_reg_v(ot, op1, t0);
2086 }
2087
2088 /* update eflags */
2089 if (s->cc_op != CC_OP_DYNAMIC)
2090 gen_op_set_cc_op(s->cc_op);
2091
2092 label2 = gen_new_label();
2093 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2094
2095 gen_compute_eflags(cpu_cc_src);
2096 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2097 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2098 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2099 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2100 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2101 if (is_right) {
2102 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2103 }
2104 tcg_gen_andi_tl(t0, t0, CC_C);
2105 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2106
2107 tcg_gen_discard_tl(cpu_cc_dst);
2108 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2109
2110 gen_set_label(label2);
2111 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2112
2113 tcg_temp_free(t0);
2114 tcg_temp_free(t1);
2115 tcg_temp_free(t2);
2116 tcg_temp_free(a0);
2117}
2118
2119static void *helper_rotc[8] = {
2120 helper_rclb,
2121 helper_rclw,
2122 helper_rcll,
2123 X86_64_ONLY(helper_rclq),
2124 helper_rcrb,
2125 helper_rcrw,
2126 helper_rcrl,
2127 X86_64_ONLY(helper_rcrq),
2128};
2129
2130/* XXX: add faster immediate = 1 case */
2131static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2132 int is_right)
2133{
2134 int label1;
2135
2136 if (s->cc_op != CC_OP_DYNAMIC)
2137 gen_op_set_cc_op(s->cc_op);
2138
2139 /* load */
2140 if (op1 == OR_TMP0)
2141 gen_op_ld_T0_A0(ot + s->mem_index);
2142 else
2143 gen_op_mov_TN_reg(ot, 0, op1);
2144
2145 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2146 cpu_T[0], cpu_T[0], cpu_T[1]);
2147 /* store */
2148 if (op1 == OR_TMP0)
2149 gen_op_st_T0_A0(ot + s->mem_index);
2150 else
2151 gen_op_mov_reg_T0(ot, op1);
2152
2153 /* update eflags */
2154 label1 = gen_new_label();
2155 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2156
2157 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2158 tcg_gen_discard_tl(cpu_cc_dst);
2159 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2160
2161 gen_set_label(label1);
2162 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2163}
2164
2165/* XXX: add faster immediate case */
2166static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2167 int is_right)
2168{
2169 int label1, label2, data_bits;
2170 target_ulong mask;
2171 TCGv t0, t1, t2, a0;
2172
2173 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2174 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2175 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2176 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2177
2178 if (ot == OT_QUAD)
2179 mask = 0x3f;
2180 else
2181 mask = 0x1f;
2182
2183 /* load */
2184 if (op1 == OR_TMP0) {
2185 tcg_gen_mov_tl(a0, cpu_A0);
2186 gen_op_ld_v(ot + s->mem_index, t0, a0);
2187 } else {
2188 gen_op_mov_v_reg(ot, t0, op1);
2189 }
2190
2191 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2192
2193 tcg_gen_mov_tl(t1, cpu_T[1]);
2194 tcg_gen_mov_tl(t2, cpu_T3);
2195
2196 /* Must test zero case to avoid using undefined behaviour in TCG
2197 shifts. */
2198 label1 = gen_new_label();
2199 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2200
2201 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2202 if (ot == OT_WORD) {
2203 /* Note: we implement the Intel behaviour for shift count > 16 */
2204 if (is_right) {
2205 tcg_gen_andi_tl(t0, t0, 0xffff);
2206 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2207 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2208 tcg_gen_ext32u_tl(t0, t0);
2209
2210 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2211
2212 /* only needed if count > 16, but a test would complicate */
2213 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2214 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2215
2216 tcg_gen_shr_tl(t0, t0, t2);
2217
2218 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2219 } else {
2220 /* XXX: not optimal */
2221 tcg_gen_andi_tl(t0, t0, 0xffff);
2222 tcg_gen_shli_tl(t1, t1, 16);
2223 tcg_gen_or_tl(t1, t1, t0);
2224 tcg_gen_ext32u_tl(t1, t1);
2225
2226 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2227 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2228 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2229 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2230
2231 tcg_gen_shl_tl(t0, t0, t2);
2232 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2233 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2234 tcg_gen_or_tl(t0, t0, t1);
2235 }
2236 } else {
2237 data_bits = 8 << ot;
2238 if (is_right) {
2239 if (ot == OT_LONG)
2240 tcg_gen_ext32u_tl(t0, t0);
2241
2242 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2243
2244 tcg_gen_shr_tl(t0, t0, t2);
2245 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2246 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2247 tcg_gen_or_tl(t0, t0, t1);
2248
2249 } else {
2250 if (ot == OT_LONG)
2251 tcg_gen_ext32u_tl(t1, t1);
2252
2253 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2254
2255 tcg_gen_shl_tl(t0, t0, t2);
2256 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2257 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2258 tcg_gen_or_tl(t0, t0, t1);
2259 }
2260 }
2261 tcg_gen_mov_tl(t1, cpu_tmp4);
2262
2263 gen_set_label(label1);
2264 /* store */
2265 if (op1 == OR_TMP0) {
2266 gen_op_st_v(ot + s->mem_index, t0, a0);
2267 } else {
2268 gen_op_mov_reg_v(ot, op1, t0);
2269 }
2270
2271 /* update eflags */
2272 if (s->cc_op != CC_OP_DYNAMIC)
2273 gen_op_set_cc_op(s->cc_op);
2274
2275 label2 = gen_new_label();
2276 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2277
2278 tcg_gen_mov_tl(cpu_cc_src, t1);
2279 tcg_gen_mov_tl(cpu_cc_dst, t0);
2280 if (is_right) {
2281 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2282 } else {
2283 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2284 }
2285 gen_set_label(label2);
2286 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2287
2288 tcg_temp_free(t0);
2289 tcg_temp_free(t1);
2290 tcg_temp_free(t2);
2291 tcg_temp_free(a0);
2292}
2293
2294static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2295{
2296 if (s != OR_TMP1)
2297 gen_op_mov_TN_reg(ot, 1, s);
2298 switch(op) {
2299 case OP_ROL:
2300 gen_rot_rm_T1(s1, ot, d, 0);
2301 break;
2302 case OP_ROR:
2303 gen_rot_rm_T1(s1, ot, d, 1);
2304 break;
2305 case OP_SHL:
2306 case OP_SHL1:
2307 gen_shift_rm_T1(s1, ot, d, 0, 0);
2308 break;
2309 case OP_SHR:
2310 gen_shift_rm_T1(s1, ot, d, 1, 0);
2311 break;
2312 case OP_SAR:
2313 gen_shift_rm_T1(s1, ot, d, 1, 1);
2314 break;
2315 case OP_RCL:
2316 gen_rotc_rm_T1(s1, ot, d, 0);
2317 break;
2318 case OP_RCR:
2319 gen_rotc_rm_T1(s1, ot, d, 1);
2320 break;
2321 }
2322}
2323
2324static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2325{
2326 switch(op) {
2327 case OP_SHL:
2328 case OP_SHL1:
2329 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2330 break;
2331 case OP_SHR:
2332 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2333 break;
2334 case OP_SAR:
2335 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2336 break;
2337 default:
2338 /* currently not optimized */
2339 gen_op_movl_T1_im(c);
2340 gen_shift(s1, op, ot, d, OR_TMP1);
2341 break;
2342 }
2343}
2344
2345static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2346{
2347 target_long disp;
2348 int havesib;
2349 int base;
2350 int index;
2351 int scale;
2352 int opreg;
2353 int mod, rm, code, override, must_add_seg;
2354
2355 override = s->override;
2356 must_add_seg = s->addseg;
2357 if (override >= 0)
2358 must_add_seg = 1;
2359 mod = (modrm >> 6) & 3;
2360 rm = modrm & 7;
2361
2362 if (s->aflag) {
2363
2364 havesib = 0;
2365 base = rm;
2366 index = 0;
2367 scale = 0;
2368
2369 if (base == 4) {
2370 havesib = 1;
2371 code = ldub_code(s->pc++);
2372 scale = (code >> 6) & 3;
2373 index = ((code >> 3) & 7) | REX_X(s);
2374 base = (code & 7);
2375 }
2376 base |= REX_B(s);
2377
2378 switch (mod) {
2379 case 0:
2380 if ((base & 7) == 5) {
2381 base = -1;
2382 disp = (int32_t)ldl_code(s->pc);
2383 s->pc += 4;
2384 if (CODE64(s) && !havesib) {
2385 disp += s->pc + s->rip_offset;
2386 }
2387 } else {
2388 disp = 0;
2389 }
2390 break;
2391 case 1:
2392 disp = (int8_t)ldub_code(s->pc++);
2393 break;
2394 default:
2395 case 2:
2396#ifdef VBOX
2397 disp = (int32_t)ldl_code(s->pc);
2398#else
2399 disp = ldl_code(s->pc);
2400#endif
2401 s->pc += 4;
2402 break;
2403 }
2404
2405 if (base >= 0) {
2406 /* for correct popl handling with esp */
2407 if (base == 4 && s->popl_esp_hack)
2408 disp += s->popl_esp_hack;
2409#ifdef TARGET_X86_64
2410 if (s->aflag == 2) {
2411 gen_op_movq_A0_reg(base);
2412 if (disp != 0) {
2413 gen_op_addq_A0_im(disp);
2414 }
2415 } else
2416#endif
2417 {
2418 gen_op_movl_A0_reg(base);
2419 if (disp != 0)
2420 gen_op_addl_A0_im(disp);
2421 }
2422 } else {
2423#ifdef TARGET_X86_64
2424 if (s->aflag == 2) {
2425 gen_op_movq_A0_im(disp);
2426 } else
2427#endif
2428 {
2429 gen_op_movl_A0_im(disp);
2430 }
2431 }
2432 /* XXX: index == 4 is always invalid */
2433 if (havesib && (index != 4 || scale != 0)) {
2434#ifdef TARGET_X86_64
2435 if (s->aflag == 2) {
2436 gen_op_addq_A0_reg_sN(scale, index);
2437 } else
2438#endif
2439 {
2440 gen_op_addl_A0_reg_sN(scale, index);
2441 }
2442 }
2443 if (must_add_seg) {
2444 if (override < 0) {
2445 if (base == R_EBP || base == R_ESP)
2446 override = R_SS;
2447 else
2448 override = R_DS;
2449 }
2450#ifdef TARGET_X86_64
2451 if (s->aflag == 2) {
2452 gen_op_addq_A0_seg(override);
2453 } else
2454#endif
2455 {
2456 gen_op_addl_A0_seg(override);
2457 }
2458 }
2459 } else {
2460 switch (mod) {
2461 case 0:
2462 if (rm == 6) {
2463 disp = lduw_code(s->pc);
2464 s->pc += 2;
2465 gen_op_movl_A0_im(disp);
2466 rm = 0; /* avoid SS override */
2467 goto no_rm;
2468 } else {
2469 disp = 0;
2470 }
2471 break;
2472 case 1:
2473 disp = (int8_t)ldub_code(s->pc++);
2474 break;
2475 default:
2476 case 2:
2477 disp = lduw_code(s->pc);
2478 s->pc += 2;
2479 break;
2480 }
2481 switch(rm) {
2482 case 0:
2483 gen_op_movl_A0_reg(R_EBX);
2484 gen_op_addl_A0_reg_sN(0, R_ESI);
2485 break;
2486 case 1:
2487 gen_op_movl_A0_reg(R_EBX);
2488 gen_op_addl_A0_reg_sN(0, R_EDI);
2489 break;
2490 case 2:
2491 gen_op_movl_A0_reg(R_EBP);
2492 gen_op_addl_A0_reg_sN(0, R_ESI);
2493 break;
2494 case 3:
2495 gen_op_movl_A0_reg(R_EBP);
2496 gen_op_addl_A0_reg_sN(0, R_EDI);
2497 break;
2498 case 4:
2499 gen_op_movl_A0_reg(R_ESI);
2500 break;
2501 case 5:
2502 gen_op_movl_A0_reg(R_EDI);
2503 break;
2504 case 6:
2505 gen_op_movl_A0_reg(R_EBP);
2506 break;
2507 default:
2508 case 7:
2509 gen_op_movl_A0_reg(R_EBX);
2510 break;
2511 }
2512 if (disp != 0)
2513 gen_op_addl_A0_im(disp);
2514 gen_op_andl_A0_ffff();
2515 no_rm:
2516 if (must_add_seg) {
2517 if (override < 0) {
2518 if (rm == 2 || rm == 3 || rm == 6)
2519 override = R_SS;
2520 else
2521 override = R_DS;
2522 }
2523 gen_op_addl_A0_seg(override);
2524 }
2525 }
2526
2527 opreg = OR_A0;
2528 disp = 0;
2529 *reg_ptr = opreg;
2530 *offset_ptr = disp;
2531}
2532
2533static void gen_nop_modrm(DisasContext *s, int modrm)
2534{
2535 int mod, rm, base, code;
2536
2537 mod = (modrm >> 6) & 3;
2538 if (mod == 3)
2539 return;
2540 rm = modrm & 7;
2541
2542 if (s->aflag) {
2543
2544 base = rm;
2545
2546 if (base == 4) {
2547 code = ldub_code(s->pc++);
2548 base = (code & 7);
2549 }
2550
2551 switch (mod) {
2552 case 0:
2553 if (base == 5) {
2554 s->pc += 4;
2555 }
2556 break;
2557 case 1:
2558 s->pc++;
2559 break;
2560 default:
2561 case 2:
2562 s->pc += 4;
2563 break;
2564 }
2565 } else {
2566 switch (mod) {
2567 case 0:
2568 if (rm == 6) {
2569 s->pc += 2;
2570 }
2571 break;
2572 case 1:
2573 s->pc++;
2574 break;
2575 default:
2576 case 2:
2577 s->pc += 2;
2578 break;
2579 }
2580 }
2581}
2582
2583/* used for LEA and MOV AX, mem */
2584static void gen_add_A0_ds_seg(DisasContext *s)
2585{
2586 int override, must_add_seg;
2587 must_add_seg = s->addseg;
2588 override = R_DS;
2589 if (s->override >= 0) {
2590 override = s->override;
2591 must_add_seg = 1;
2592 } else {
2593 override = R_DS;
2594 }
2595 if (must_add_seg) {
2596#ifdef TARGET_X86_64
2597 if (CODE64(s)) {
2598 gen_op_addq_A0_seg(override);
2599 } else
2600#endif
2601 {
2602 gen_op_addl_A0_seg(override);
2603 }
2604 }
2605}
2606
2607/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2608 OR_TMP0 */
2609static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2610{
2611 int mod, rm, opreg, disp;
2612
2613 mod = (modrm >> 6) & 3;
2614 rm = (modrm & 7) | REX_B(s);
2615 if (mod == 3) {
2616 if (is_store) {
2617 if (reg != OR_TMP0)
2618 gen_op_mov_TN_reg(ot, 0, reg);
2619 gen_op_mov_reg_T0(ot, rm);
2620 } else {
2621 gen_op_mov_TN_reg(ot, 0, rm);
2622 if (reg != OR_TMP0)
2623 gen_op_mov_reg_T0(ot, reg);
2624 }
2625 } else {
2626 gen_lea_modrm(s, modrm, &opreg, &disp);
2627 if (is_store) {
2628 if (reg != OR_TMP0)
2629 gen_op_mov_TN_reg(ot, 0, reg);
2630 gen_op_st_T0_A0(ot + s->mem_index);
2631 } else {
2632 gen_op_ld_T0_A0(ot + s->mem_index);
2633 if (reg != OR_TMP0)
2634 gen_op_mov_reg_T0(ot, reg);
2635 }
2636 }
2637}
2638
2639#ifndef VBOX
2640static inline uint32_t insn_get(DisasContext *s, int ot)
2641#else /* VBOX */
2642DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2643#endif /* VBOX */
2644{
2645 uint32_t ret;
2646
2647 switch(ot) {
2648 case OT_BYTE:
2649 ret = ldub_code(s->pc);
2650 s->pc++;
2651 break;
2652 case OT_WORD:
2653 ret = lduw_code(s->pc);
2654 s->pc += 2;
2655 break;
2656 default:
2657 case OT_LONG:
2658 ret = ldl_code(s->pc);
2659 s->pc += 4;
2660 break;
2661 }
2662 return ret;
2663}
2664
2665#ifndef VBOX
2666static inline int insn_const_size(unsigned int ot)
2667#else /* VBOX */
2668DECLINLINE(int) insn_const_size(unsigned int ot)
2669#endif /* VBOX */
2670{
2671 if (ot <= OT_LONG)
2672 return 1 << ot;
2673 else
2674 return 4;
2675}
2676
2677#ifndef VBOX
2678static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2679#else /* VBOX */
2680DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2681#endif /* VBOX */
2682{
2683 TranslationBlock *tb;
2684 target_ulong pc;
2685
2686 pc = s->cs_base + eip;
2687 tb = s->tb;
2688 /* NOTE: we handle the case where the TB spans two pages here */
2689 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2690 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2691#ifdef VBOX
2692 gen_check_external_event(s);
2693#endif /* VBOX */
2694 /* jump to same page: we can use a direct jump */
2695 tcg_gen_goto_tb(tb_num);
2696 gen_jmp_im(eip);
2697 tcg_gen_exit_tb((long)tb + tb_num);
2698 } else {
2699 /* jump to another page: currently not optimized */
2700 gen_jmp_im(eip);
2701 gen_eob(s);
2702 }
2703}
2704
2705#ifndef VBOX
2706static inline void gen_jcc(DisasContext *s, int b,
2707#else /* VBOX */
2708DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2709#endif /* VBOX */
2710 target_ulong val, target_ulong next_eip)
2711{
2712 int l1, l2, cc_op;
2713
2714 cc_op = s->cc_op;
2715 if (s->cc_op != CC_OP_DYNAMIC) {
2716 gen_op_set_cc_op(s->cc_op);
2717 s->cc_op = CC_OP_DYNAMIC;
2718 }
2719 if (s->jmp_opt) {
2720 l1 = gen_new_label();
2721 gen_jcc1(s, cc_op, b, l1);
2722
2723 gen_goto_tb(s, 0, next_eip);
2724
2725 gen_set_label(l1);
2726 gen_goto_tb(s, 1, val);
2727 s->is_jmp = 3;
2728 } else {
2729
2730 l1 = gen_new_label();
2731 l2 = gen_new_label();
2732 gen_jcc1(s, cc_op, b, l1);
2733
2734 gen_jmp_im(next_eip);
2735 tcg_gen_br(l2);
2736
2737 gen_set_label(l1);
2738 gen_jmp_im(val);
2739 gen_set_label(l2);
2740 gen_eob(s);
2741 }
2742}
2743
2744static void gen_setcc(DisasContext *s, int b)
2745{
2746 int inv, jcc_op, l1;
2747 TCGv t0;
2748
2749 if (is_fast_jcc_case(s, b)) {
2750 /* nominal case: we use a jump */
2751 /* XXX: make it faster by adding new instructions in TCG */
2752 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2753 tcg_gen_movi_tl(t0, 0);
2754 l1 = gen_new_label();
2755 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2756 tcg_gen_movi_tl(t0, 1);
2757 gen_set_label(l1);
2758 tcg_gen_mov_tl(cpu_T[0], t0);
2759 tcg_temp_free(t0);
2760 } else {
2761 /* slow case: it is more efficient not to generate a jump,
2762 although it is questionnable whether this optimization is
2763 worth to */
2764 inv = b & 1;
2765 jcc_op = (b >> 1) & 7;
2766 gen_setcc_slow_T0(s, jcc_op);
2767 if (inv) {
2768 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2769 }
2770 }
2771}
2772
2773#ifndef VBOX
2774static inline void gen_op_movl_T0_seg(int seg_reg)
2775#else /* VBOX */
2776DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2777#endif /* VBOX */
2778{
2779 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2780 offsetof(CPUX86State,segs[seg_reg].selector));
2781}
2782
2783#ifndef VBOX
2784static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2785#else /* VBOX */
2786DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2787#endif /* VBOX */
2788{
2789 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2790 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2791 offsetof(CPUX86State,segs[seg_reg].selector));
2792 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2793 tcg_gen_st_tl(cpu_T[0], cpu_env,
2794 offsetof(CPUX86State,segs[seg_reg].base));
2795#ifdef VBOX
2796 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2797 if (seg_reg == R_CS)
2798 flags |= DESC_CS_MASK;
2799 gen_op_movl_T0_im(flags);
2800 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2801
2802 /* Set the limit to 0xffff. */
2803 gen_op_movl_T0_im(0xffff);
2804 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].limit));
2805#endif
2806}
2807
2808/* move T0 to seg_reg and compute if the CPU state may change. Never
2809 call this function with seg_reg == R_CS */
2810static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2811{
2812 if (s->pe && !s->vm86) {
2813 /* XXX: optimize by finding processor state dynamically */
2814 if (s->cc_op != CC_OP_DYNAMIC)
2815 gen_op_set_cc_op(s->cc_op);
2816 gen_jmp_im(cur_eip);
2817 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2818 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2819 /* abort translation because the addseg value may change or
2820 because ss32 may change. For R_SS, translation must always
2821 stop as a special handling must be done to disable hardware
2822 interrupts for the next instruction */
2823 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2824 s->is_jmp = 3;
2825 } else {
2826 gen_op_movl_seg_T0_vm(seg_reg);
2827 if (seg_reg == R_SS)
2828 s->is_jmp = 3;
2829 }
2830}
2831
2832#ifndef VBOX
2833static inline int svm_is_rep(int prefixes)
2834#else /* VBOX */
2835DECLINLINE(int) svm_is_rep(int prefixes)
2836#endif /* VBOX */
2837{
2838 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2839}
2840
2841#ifndef VBOX
2842static inline void
2843#else /* VBOX */
2844DECLINLINE(void)
2845#endif /* VBOX */
2846gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2847 uint32_t type, uint64_t param)
2848{
2849 /* no SVM activated; fast case */
2850 if (likely(!(s->flags & HF_SVMI_MASK)))
2851 return;
2852 if (s->cc_op != CC_OP_DYNAMIC)
2853 gen_op_set_cc_op(s->cc_op);
2854 gen_jmp_im(pc_start - s->cs_base);
2855 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2856 tcg_const_i32(type), tcg_const_i64(param));
2857}
2858
2859#ifndef VBOX
2860static inline void
2861#else /* VBOX */
2862DECLINLINE(void)
2863#endif
2864gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2865{
2866 gen_svm_check_intercept_param(s, pc_start, type, 0);
2867}
2868
2869#ifndef VBOX
2870static inline void gen_stack_update(DisasContext *s, int addend)
2871#else /* VBOX */
2872DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2873#endif /* VBOX */
2874{
2875#ifdef TARGET_X86_64
2876 if (CODE64(s)) {
2877 gen_op_add_reg_im(2, R_ESP, addend);
2878 } else
2879#endif
2880 if (s->ss32) {
2881 gen_op_add_reg_im(1, R_ESP, addend);
2882 } else {
2883 gen_op_add_reg_im(0, R_ESP, addend);
2884 }
2885}
2886
2887/* generate a push. It depends on ss32, addseg and dflag */
2888static void gen_push_T0(DisasContext *s)
2889{
2890#ifdef TARGET_X86_64
2891 if (CODE64(s)) {
2892 gen_op_movq_A0_reg(R_ESP);
2893 if (s->dflag) {
2894 gen_op_addq_A0_im(-8);
2895 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2896 } else {
2897 gen_op_addq_A0_im(-2);
2898 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2899 }
2900 gen_op_mov_reg_A0(2, R_ESP);
2901 } else
2902#endif
2903 {
2904 gen_op_movl_A0_reg(R_ESP);
2905 if (!s->dflag)
2906 gen_op_addl_A0_im(-2);
2907 else
2908 gen_op_addl_A0_im(-4);
2909 if (s->ss32) {
2910 if (s->addseg) {
2911 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2912 gen_op_addl_A0_seg(R_SS);
2913 }
2914 } else {
2915 gen_op_andl_A0_ffff();
2916 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2917 gen_op_addl_A0_seg(R_SS);
2918 }
2919 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2920 if (s->ss32 && !s->addseg)
2921 gen_op_mov_reg_A0(1, R_ESP);
2922 else
2923 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2924 }
2925}
2926
2927/* generate a push. It depends on ss32, addseg and dflag */
2928/* slower version for T1, only used for call Ev */
2929static void gen_push_T1(DisasContext *s)
2930{
2931#ifdef TARGET_X86_64
2932 if (CODE64(s)) {
2933 gen_op_movq_A0_reg(R_ESP);
2934 if (s->dflag) {
2935 gen_op_addq_A0_im(-8);
2936 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2937 } else {
2938 gen_op_addq_A0_im(-2);
2939 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2940 }
2941 gen_op_mov_reg_A0(2, R_ESP);
2942 } else
2943#endif
2944 {
2945 gen_op_movl_A0_reg(R_ESP);
2946 if (!s->dflag)
2947 gen_op_addl_A0_im(-2);
2948 else
2949 gen_op_addl_A0_im(-4);
2950 if (s->ss32) {
2951 if (s->addseg) {
2952 gen_op_addl_A0_seg(R_SS);
2953 }
2954 } else {
2955 gen_op_andl_A0_ffff();
2956 gen_op_addl_A0_seg(R_SS);
2957 }
2958 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2959
2960 if (s->ss32 && !s->addseg)
2961 gen_op_mov_reg_A0(1, R_ESP);
2962 else
2963 gen_stack_update(s, (-2) << s->dflag);
2964 }
2965}
2966
2967/* two step pop is necessary for precise exceptions */
2968static void gen_pop_T0(DisasContext *s)
2969{
2970#ifdef TARGET_X86_64
2971 if (CODE64(s)) {
2972 gen_op_movq_A0_reg(R_ESP);
2973 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2974 } else
2975#endif
2976 {
2977 gen_op_movl_A0_reg(R_ESP);
2978 if (s->ss32) {
2979 if (s->addseg)
2980 gen_op_addl_A0_seg(R_SS);
2981 } else {
2982 gen_op_andl_A0_ffff();
2983 gen_op_addl_A0_seg(R_SS);
2984 }
2985 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2986 }
2987}
2988
2989static void gen_pop_update(DisasContext *s)
2990{
2991#ifdef TARGET_X86_64
2992 if (CODE64(s) && s->dflag) {
2993 gen_stack_update(s, 8);
2994 } else
2995#endif
2996 {
2997 gen_stack_update(s, 2 << s->dflag);
2998 }
2999}
3000
3001static void gen_stack_A0(DisasContext *s)
3002{
3003 gen_op_movl_A0_reg(R_ESP);
3004 if (!s->ss32)
3005 gen_op_andl_A0_ffff();
3006 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3007 if (s->addseg)
3008 gen_op_addl_A0_seg(R_SS);
3009}
3010
3011/* NOTE: wrap around in 16 bit not fully handled */
3012static void gen_pusha(DisasContext *s)
3013{
3014 int i;
3015 gen_op_movl_A0_reg(R_ESP);
3016 gen_op_addl_A0_im(-16 << s->dflag);
3017 if (!s->ss32)
3018 gen_op_andl_A0_ffff();
3019 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3020 if (s->addseg)
3021 gen_op_addl_A0_seg(R_SS);
3022 for(i = 0;i < 8; i++) {
3023 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3024 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3025 gen_op_addl_A0_im(2 << s->dflag);
3026 }
3027 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3028}
3029
3030/* NOTE: wrap around in 16 bit not fully handled */
3031static void gen_popa(DisasContext *s)
3032{
3033 int i;
3034 gen_op_movl_A0_reg(R_ESP);
3035 if (!s->ss32)
3036 gen_op_andl_A0_ffff();
3037 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3038 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3039 if (s->addseg)
3040 gen_op_addl_A0_seg(R_SS);
3041 for(i = 0;i < 8; i++) {
3042 /* ESP is not reloaded */
3043 if (i != 3) {
3044 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3045 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3046 }
3047 gen_op_addl_A0_im(2 << s->dflag);
3048 }
3049 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3050}
3051
3052static void gen_enter(DisasContext *s, int esp_addend, int level)
3053{
3054 int ot, opsize;
3055
3056 level &= 0x1f;
3057#ifdef TARGET_X86_64
3058 if (CODE64(s)) {
3059 ot = s->dflag ? OT_QUAD : OT_WORD;
3060 opsize = 1 << ot;
3061
3062 gen_op_movl_A0_reg(R_ESP);
3063 gen_op_addq_A0_im(-opsize);
3064 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3065
3066 /* push bp */
3067 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3068 gen_op_st_T0_A0(ot + s->mem_index);
3069 if (level) {
3070 /* XXX: must save state */
3071 tcg_gen_helper_0_3(helper_enter64_level,
3072 tcg_const_i32(level),
3073 tcg_const_i32((ot == OT_QUAD)),
3074 cpu_T[1]);
3075 }
3076 gen_op_mov_reg_T1(ot, R_EBP);
3077 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3078 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3079 } else
3080#endif
3081 {
3082 ot = s->dflag + OT_WORD;
3083 opsize = 2 << s->dflag;
3084
3085 gen_op_movl_A0_reg(R_ESP);
3086 gen_op_addl_A0_im(-opsize);
3087 if (!s->ss32)
3088 gen_op_andl_A0_ffff();
3089 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3090 if (s->addseg)
3091 gen_op_addl_A0_seg(R_SS);
3092 /* push bp */
3093 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3094 gen_op_st_T0_A0(ot + s->mem_index);
3095 if (level) {
3096 /* XXX: must save state */
3097 tcg_gen_helper_0_3(helper_enter_level,
3098 tcg_const_i32(level),
3099 tcg_const_i32(s->dflag),
3100 cpu_T[1]);
3101 }
3102 gen_op_mov_reg_T1(ot, R_EBP);
3103 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3104 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3105 }
3106}
3107
3108static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3109{
3110 if (s->cc_op != CC_OP_DYNAMIC)
3111 gen_op_set_cc_op(s->cc_op);
3112 gen_jmp_im(cur_eip);
3113 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3114 s->is_jmp = 3;
3115}
3116
3117/* an interrupt is different from an exception because of the
3118 privilege checks */
3119static void gen_interrupt(DisasContext *s, int intno,
3120 target_ulong cur_eip, target_ulong next_eip)
3121{
3122 if (s->cc_op != CC_OP_DYNAMIC)
3123 gen_op_set_cc_op(s->cc_op);
3124 gen_jmp_im(cur_eip);
3125 tcg_gen_helper_0_2(helper_raise_interrupt,
3126 tcg_const_i32(intno),
3127 tcg_const_i32(next_eip - cur_eip));
3128 s->is_jmp = 3;
3129}
3130
3131static void gen_debug(DisasContext *s, target_ulong cur_eip)
3132{
3133 if (s->cc_op != CC_OP_DYNAMIC)
3134 gen_op_set_cc_op(s->cc_op);
3135 gen_jmp_im(cur_eip);
3136 tcg_gen_helper_0_0(helper_debug);
3137 s->is_jmp = 3;
3138}
3139
3140/* generate a generic end of block. Trace exception is also generated
3141 if needed */
3142static void gen_eob(DisasContext *s)
3143{
3144#ifdef VBOX
3145 gen_check_external_event(s);
3146#endif /* VBOX */
3147 if (s->cc_op != CC_OP_DYNAMIC)
3148 gen_op_set_cc_op(s->cc_op);
3149 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3150 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3151 }
3152 if (s->singlestep_enabled) {
3153 tcg_gen_helper_0_0(helper_debug);
3154 } else if (s->tf) {
3155 tcg_gen_helper_0_0(helper_single_step);
3156 } else {
3157 tcg_gen_exit_tb(0);
3158 }
3159 s->is_jmp = 3;
3160}
3161
3162/* generate a jump to eip. No segment change must happen before as a
3163 direct call to the next block may occur */
3164static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3165{
3166 if (s->jmp_opt) {
3167 if (s->cc_op != CC_OP_DYNAMIC) {
3168 gen_op_set_cc_op(s->cc_op);
3169 s->cc_op = CC_OP_DYNAMIC;
3170 }
3171 gen_goto_tb(s, tb_num, eip);
3172 s->is_jmp = 3;
3173 } else {
3174 gen_jmp_im(eip);
3175 gen_eob(s);
3176 }
3177}
3178
3179static void gen_jmp(DisasContext *s, target_ulong eip)
3180{
3181 gen_jmp_tb(s, eip, 0);
3182}
3183
3184#ifndef VBOX
3185static inline void gen_ldq_env_A0(int idx, int offset)
3186#else /* VBOX */
3187DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3188#endif /* VBOX */
3189{
3190 int mem_index = (idx >> 2) - 1;
3191 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3192 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3193}
3194
3195#ifndef VBOX
3196static inline void gen_stq_env_A0(int idx, int offset)
3197#else /* VBOX */
3198DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3199#endif /* VBOX */
3200{
3201 int mem_index = (idx >> 2) - 1;
3202 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3203 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3204}
3205
3206#ifndef VBOX
3207static inline void gen_ldo_env_A0(int idx, int offset)
3208#else /* VBOX */
3209DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3210#endif /* VBOX */
3211{
3212 int mem_index = (idx >> 2) - 1;
3213 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3214 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3215 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3216 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3217 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3218}
3219
3220#ifndef VBOX
3221static inline void gen_sto_env_A0(int idx, int offset)
3222#else /* VBOX */
3223DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3224#endif /* VBOX */
3225{
3226 int mem_index = (idx >> 2) - 1;
3227 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3228 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3229 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3230 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3231 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3232}
3233
3234#ifndef VBOX
3235static inline void gen_op_movo(int d_offset, int s_offset)
3236#else /* VBOX */
3237DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3238#endif /* VBOX */
3239{
3240 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3241 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3242 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3243 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3244}
3245
3246#ifndef VBOX
3247static inline void gen_op_movq(int d_offset, int s_offset)
3248#else /* VBOX */
3249DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3250#endif /* VBOX */
3251{
3252 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3253 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3254}
3255
3256#ifndef VBOX
3257static inline void gen_op_movl(int d_offset, int s_offset)
3258#else /* VBOX */
3259DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3260#endif /* VBOX */
3261{
3262 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3263 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3264}
3265
3266#ifndef VBOX
3267static inline void gen_op_movq_env_0(int d_offset)
3268#else /* VBOX */
3269DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3270#endif /* VBOX */
3271{
3272 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3273 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3274}
3275
3276#define SSE_SPECIAL ((void *)1)
3277#define SSE_DUMMY ((void *)2)
3278
3279#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3280#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3281 helper_ ## x ## ss, helper_ ## x ## sd, }
3282
3283static void *sse_op_table1[256][4] = {
3284 /* 3DNow! extensions */
3285 [0x0e] = { SSE_DUMMY }, /* femms */
3286 [0x0f] = { SSE_DUMMY }, /* pf... */
3287 /* pure SSE operations */
3288 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3289 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3290 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3291 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3292 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3293 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3294 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3295 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3296
3297 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3298 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3299 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3300 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3301 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3302 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3303 [0x2e] = { helper_ucomiss, helper_ucomisd },
3304 [0x2f] = { helper_comiss, helper_comisd },
3305 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3306 [0x51] = SSE_FOP(sqrt),
3307 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3308 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3309 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3310 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3311 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3312 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3313 [0x58] = SSE_FOP(add),
3314 [0x59] = SSE_FOP(mul),
3315 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3316 helper_cvtss2sd, helper_cvtsd2ss },
3317 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3318 [0x5c] = SSE_FOP(sub),
3319 [0x5d] = SSE_FOP(min),
3320 [0x5e] = SSE_FOP(div),
3321 [0x5f] = SSE_FOP(max),
3322
3323 [0xc2] = SSE_FOP(cmpeq),
3324 [0xc6] = { helper_shufps, helper_shufpd },
3325
3326 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3327 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3328
3329 /* MMX ops and their SSE extensions */
3330 [0x60] = MMX_OP2(punpcklbw),
3331 [0x61] = MMX_OP2(punpcklwd),
3332 [0x62] = MMX_OP2(punpckldq),
3333 [0x63] = MMX_OP2(packsswb),
3334 [0x64] = MMX_OP2(pcmpgtb),
3335 [0x65] = MMX_OP2(pcmpgtw),
3336 [0x66] = MMX_OP2(pcmpgtl),
3337 [0x67] = MMX_OP2(packuswb),
3338 [0x68] = MMX_OP2(punpckhbw),
3339 [0x69] = MMX_OP2(punpckhwd),
3340 [0x6a] = MMX_OP2(punpckhdq),
3341 [0x6b] = MMX_OP2(packssdw),
3342 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3343 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3344 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3345 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3346 [0x70] = { helper_pshufw_mmx,
3347 helper_pshufd_xmm,
3348 helper_pshufhw_xmm,
3349 helper_pshuflw_xmm },
3350 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3351 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3352 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3353 [0x74] = MMX_OP2(pcmpeqb),
3354 [0x75] = MMX_OP2(pcmpeqw),
3355 [0x76] = MMX_OP2(pcmpeql),
3356 [0x77] = { SSE_DUMMY }, /* emms */
3357 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3358 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3359 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3360 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3361 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3362 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3363 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3364 [0xd1] = MMX_OP2(psrlw),
3365 [0xd2] = MMX_OP2(psrld),
3366 [0xd3] = MMX_OP2(psrlq),
3367 [0xd4] = MMX_OP2(paddq),
3368 [0xd5] = MMX_OP2(pmullw),
3369 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3370 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3371 [0xd8] = MMX_OP2(psubusb),
3372 [0xd9] = MMX_OP2(psubusw),
3373 [0xda] = MMX_OP2(pminub),
3374 [0xdb] = MMX_OP2(pand),
3375 [0xdc] = MMX_OP2(paddusb),
3376 [0xdd] = MMX_OP2(paddusw),
3377 [0xde] = MMX_OP2(pmaxub),
3378 [0xdf] = MMX_OP2(pandn),
3379 [0xe0] = MMX_OP2(pavgb),
3380 [0xe1] = MMX_OP2(psraw),
3381 [0xe2] = MMX_OP2(psrad),
3382 [0xe3] = MMX_OP2(pavgw),
3383 [0xe4] = MMX_OP2(pmulhuw),
3384 [0xe5] = MMX_OP2(pmulhw),
3385 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3386 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3387 [0xe8] = MMX_OP2(psubsb),
3388 [0xe9] = MMX_OP2(psubsw),
3389 [0xea] = MMX_OP2(pminsw),
3390 [0xeb] = MMX_OP2(por),
3391 [0xec] = MMX_OP2(paddsb),
3392 [0xed] = MMX_OP2(paddsw),
3393 [0xee] = MMX_OP2(pmaxsw),
3394 [0xef] = MMX_OP2(pxor),
3395 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3396 [0xf1] = MMX_OP2(psllw),
3397 [0xf2] = MMX_OP2(pslld),
3398 [0xf3] = MMX_OP2(psllq),
3399 [0xf4] = MMX_OP2(pmuludq),
3400 [0xf5] = MMX_OP2(pmaddwd),
3401 [0xf6] = MMX_OP2(psadbw),
3402 [0xf7] = MMX_OP2(maskmov),
3403 [0xf8] = MMX_OP2(psubb),
3404 [0xf9] = MMX_OP2(psubw),
3405 [0xfa] = MMX_OP2(psubl),
3406 [0xfb] = MMX_OP2(psubq),
3407 [0xfc] = MMX_OP2(paddb),
3408 [0xfd] = MMX_OP2(paddw),
3409 [0xfe] = MMX_OP2(paddl),
3410};
3411
3412static void *sse_op_table2[3 * 8][2] = {
3413 [0 + 2] = MMX_OP2(psrlw),
3414 [0 + 4] = MMX_OP2(psraw),
3415 [0 + 6] = MMX_OP2(psllw),
3416 [8 + 2] = MMX_OP2(psrld),
3417 [8 + 4] = MMX_OP2(psrad),
3418 [8 + 6] = MMX_OP2(pslld),
3419 [16 + 2] = MMX_OP2(psrlq),
3420 [16 + 3] = { NULL, helper_psrldq_xmm },
3421 [16 + 6] = MMX_OP2(psllq),
3422 [16 + 7] = { NULL, helper_pslldq_xmm },
3423};
3424
3425static void *sse_op_table3[4 * 3] = {
3426 helper_cvtsi2ss,
3427 helper_cvtsi2sd,
3428 X86_64_ONLY(helper_cvtsq2ss),
3429 X86_64_ONLY(helper_cvtsq2sd),
3430
3431 helper_cvttss2si,
3432 helper_cvttsd2si,
3433 X86_64_ONLY(helper_cvttss2sq),
3434 X86_64_ONLY(helper_cvttsd2sq),
3435
3436 helper_cvtss2si,
3437 helper_cvtsd2si,
3438 X86_64_ONLY(helper_cvtss2sq),
3439 X86_64_ONLY(helper_cvtsd2sq),
3440};
3441
3442static void *sse_op_table4[8][4] = {
3443 SSE_FOP(cmpeq),
3444 SSE_FOP(cmplt),
3445 SSE_FOP(cmple),
3446 SSE_FOP(cmpunord),
3447 SSE_FOP(cmpneq),
3448 SSE_FOP(cmpnlt),
3449 SSE_FOP(cmpnle),
3450 SSE_FOP(cmpord),
3451};
3452
3453static void *sse_op_table5[256] = {
3454 [0x0c] = helper_pi2fw,
3455 [0x0d] = helper_pi2fd,
3456 [0x1c] = helper_pf2iw,
3457 [0x1d] = helper_pf2id,
3458 [0x8a] = helper_pfnacc,
3459 [0x8e] = helper_pfpnacc,
3460 [0x90] = helper_pfcmpge,
3461 [0x94] = helper_pfmin,
3462 [0x96] = helper_pfrcp,
3463 [0x97] = helper_pfrsqrt,
3464 [0x9a] = helper_pfsub,
3465 [0x9e] = helper_pfadd,
3466 [0xa0] = helper_pfcmpgt,
3467 [0xa4] = helper_pfmax,
3468 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3469 [0xa7] = helper_movq, /* pfrsqit1 */
3470 [0xaa] = helper_pfsubr,
3471 [0xae] = helper_pfacc,
3472 [0xb0] = helper_pfcmpeq,
3473 [0xb4] = helper_pfmul,
3474 [0xb6] = helper_movq, /* pfrcpit2 */
3475 [0xb7] = helper_pmulhrw_mmx,
3476 [0xbb] = helper_pswapd,
3477 [0xbf] = helper_pavgb_mmx /* pavgusb */
3478};
3479
3480struct sse_op_helper_s {
3481 void *op[2]; uint32_t ext_mask;
3482};
3483#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3484#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3485#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3486#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3487static struct sse_op_helper_s sse_op_table6[256] = {
3488 [0x00] = SSSE3_OP(pshufb),
3489 [0x01] = SSSE3_OP(phaddw),
3490 [0x02] = SSSE3_OP(phaddd),
3491 [0x03] = SSSE3_OP(phaddsw),
3492 [0x04] = SSSE3_OP(pmaddubsw),
3493 [0x05] = SSSE3_OP(phsubw),
3494 [0x06] = SSSE3_OP(phsubd),
3495 [0x07] = SSSE3_OP(phsubsw),
3496 [0x08] = SSSE3_OP(psignb),
3497 [0x09] = SSSE3_OP(psignw),
3498 [0x0a] = SSSE3_OP(psignd),
3499 [0x0b] = SSSE3_OP(pmulhrsw),
3500 [0x10] = SSE41_OP(pblendvb),
3501 [0x14] = SSE41_OP(blendvps),
3502 [0x15] = SSE41_OP(blendvpd),
3503 [0x17] = SSE41_OP(ptest),
3504 [0x1c] = SSSE3_OP(pabsb),
3505 [0x1d] = SSSE3_OP(pabsw),
3506 [0x1e] = SSSE3_OP(pabsd),
3507 [0x20] = SSE41_OP(pmovsxbw),
3508 [0x21] = SSE41_OP(pmovsxbd),
3509 [0x22] = SSE41_OP(pmovsxbq),
3510 [0x23] = SSE41_OP(pmovsxwd),
3511 [0x24] = SSE41_OP(pmovsxwq),
3512 [0x25] = SSE41_OP(pmovsxdq),
3513 [0x28] = SSE41_OP(pmuldq),
3514 [0x29] = SSE41_OP(pcmpeqq),
3515 [0x2a] = SSE41_SPECIAL, /* movntqda */
3516 [0x2b] = SSE41_OP(packusdw),
3517 [0x30] = SSE41_OP(pmovzxbw),
3518 [0x31] = SSE41_OP(pmovzxbd),
3519 [0x32] = SSE41_OP(pmovzxbq),
3520 [0x33] = SSE41_OP(pmovzxwd),
3521 [0x34] = SSE41_OP(pmovzxwq),
3522 [0x35] = SSE41_OP(pmovzxdq),
3523 [0x37] = SSE42_OP(pcmpgtq),
3524 [0x38] = SSE41_OP(pminsb),
3525 [0x39] = SSE41_OP(pminsd),
3526 [0x3a] = SSE41_OP(pminuw),
3527 [0x3b] = SSE41_OP(pminud),
3528 [0x3c] = SSE41_OP(pmaxsb),
3529 [0x3d] = SSE41_OP(pmaxsd),
3530 [0x3e] = SSE41_OP(pmaxuw),
3531 [0x3f] = SSE41_OP(pmaxud),
3532 [0x40] = SSE41_OP(pmulld),
3533 [0x41] = SSE41_OP(phminposuw),
3534};
3535
3536static struct sse_op_helper_s sse_op_table7[256] = {
3537 [0x08] = SSE41_OP(roundps),
3538 [0x09] = SSE41_OP(roundpd),
3539 [0x0a] = SSE41_OP(roundss),
3540 [0x0b] = SSE41_OP(roundsd),
3541 [0x0c] = SSE41_OP(blendps),
3542 [0x0d] = SSE41_OP(blendpd),
3543 [0x0e] = SSE41_OP(pblendw),
3544 [0x0f] = SSSE3_OP(palignr),
3545 [0x14] = SSE41_SPECIAL, /* pextrb */
3546 [0x15] = SSE41_SPECIAL, /* pextrw */
3547 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3548 [0x17] = SSE41_SPECIAL, /* extractps */
3549 [0x20] = SSE41_SPECIAL, /* pinsrb */
3550 [0x21] = SSE41_SPECIAL, /* insertps */
3551 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3552 [0x40] = SSE41_OP(dpps),
3553 [0x41] = SSE41_OP(dppd),
3554 [0x42] = SSE41_OP(mpsadbw),
3555 [0x60] = SSE42_OP(pcmpestrm),
3556 [0x61] = SSE42_OP(pcmpestri),
3557 [0x62] = SSE42_OP(pcmpistrm),
3558 [0x63] = SSE42_OP(pcmpistri),
3559};
3560
3561static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3562{
3563 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3564 int modrm, mod, rm, reg, reg_addr, offset_addr;
3565 void *sse_op2;
3566
3567 b &= 0xff;
3568 if (s->prefix & PREFIX_DATA)
3569 b1 = 1;
3570 else if (s->prefix & PREFIX_REPZ)
3571 b1 = 2;
3572 else if (s->prefix & PREFIX_REPNZ)
3573 b1 = 3;
3574 else
3575 b1 = 0;
3576 sse_op2 = sse_op_table1[b][b1];
3577 if (!sse_op2)
3578 goto illegal_op;
3579 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3580 is_xmm = 1;
3581 } else {
3582 if (b1 == 0) {
3583 /* MMX case */
3584 is_xmm = 0;
3585 } else {
3586 is_xmm = 1;
3587 }
3588 }
3589 /* simple MMX/SSE operation */
3590 if (s->flags & HF_TS_MASK) {
3591 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3592 return;
3593 }
3594 if (s->flags & HF_EM_MASK) {
3595 illegal_op:
3596 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3597 return;
3598 }
3599 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3600 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3601 goto illegal_op;
3602 if (b == 0x0e) {
3603 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3604 goto illegal_op;
3605 /* femms */
3606 tcg_gen_helper_0_0(helper_emms);
3607 return;
3608 }
3609 if (b == 0x77) {
3610 /* emms */
3611 tcg_gen_helper_0_0(helper_emms);
3612 return;
3613 }
3614 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3615 the static cpu state) */
3616 if (!is_xmm) {
3617 tcg_gen_helper_0_0(helper_enter_mmx);
3618 }
3619
3620 modrm = ldub_code(s->pc++);
3621 reg = ((modrm >> 3) & 7);
3622 if (is_xmm)
3623 reg |= rex_r;
3624 mod = (modrm >> 6) & 3;
3625 if (sse_op2 == SSE_SPECIAL) {
3626 b |= (b1 << 8);
3627 switch(b) {
3628 case 0x0e7: /* movntq */
3629 if (mod == 3)
3630 goto illegal_op;
3631 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3632 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3633 break;
3634 case 0x1e7: /* movntdq */
3635 case 0x02b: /* movntps */
3636 case 0x12b: /* movntps */
3637 case 0x3f0: /* lddqu */
3638 if (mod == 3)
3639 goto illegal_op;
3640 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3641 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3642 break;
3643 case 0x6e: /* movd mm, ea */
3644#ifdef TARGET_X86_64
3645 if (s->dflag == 2) {
3646 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3647 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3648 } else
3649#endif
3650 {
3651 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3652 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3653 offsetof(CPUX86State,fpregs[reg].mmx));
3654 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3655 }
3656 break;
3657 case 0x16e: /* movd xmm, ea */
3658#ifdef TARGET_X86_64
3659 if (s->dflag == 2) {
3660 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3661 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3662 offsetof(CPUX86State,xmm_regs[reg]));
3663 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3664 } else
3665#endif
3666 {
3667 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3668 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3669 offsetof(CPUX86State,xmm_regs[reg]));
3670 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3671 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3672 }
3673 break;
3674 case 0x6f: /* movq mm, ea */
3675 if (mod != 3) {
3676 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3677 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3678 } else {
3679 rm = (modrm & 7);
3680 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3681 offsetof(CPUX86State,fpregs[rm].mmx));
3682 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3683 offsetof(CPUX86State,fpregs[reg].mmx));
3684 }
3685 break;
3686 case 0x010: /* movups */
3687 case 0x110: /* movupd */
3688 case 0x028: /* movaps */
3689 case 0x128: /* movapd */
3690 case 0x16f: /* movdqa xmm, ea */
3691 case 0x26f: /* movdqu xmm, ea */
3692 if (mod != 3) {
3693 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3694 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3695 } else {
3696 rm = (modrm & 7) | REX_B(s);
3697 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3698 offsetof(CPUX86State,xmm_regs[rm]));
3699 }
3700 break;
3701 case 0x210: /* movss xmm, ea */
3702 if (mod != 3) {
3703 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3704 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3705 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3706 gen_op_movl_T0_0();
3707 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3708 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3709 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3710 } else {
3711 rm = (modrm & 7) | REX_B(s);
3712 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3713 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3714 }
3715 break;
3716 case 0x310: /* movsd xmm, ea */
3717 if (mod != 3) {
3718 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3719 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3720 gen_op_movl_T0_0();
3721 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3722 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3723 } else {
3724 rm = (modrm & 7) | REX_B(s);
3725 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3726 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3727 }
3728 break;
3729 case 0x012: /* movlps */
3730 case 0x112: /* movlpd */
3731 if (mod != 3) {
3732 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3733 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3734 } else {
3735 /* movhlps */
3736 rm = (modrm & 7) | REX_B(s);
3737 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3738 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3739 }
3740 break;
3741 case 0x212: /* movsldup */
3742 if (mod != 3) {
3743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3744 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3745 } else {
3746 rm = (modrm & 7) | REX_B(s);
3747 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3748 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3749 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3750 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3751 }
3752 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3753 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3754 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3755 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3756 break;
3757 case 0x312: /* movddup */
3758 if (mod != 3) {
3759 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3760 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3761 } else {
3762 rm = (modrm & 7) | REX_B(s);
3763 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3764 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3765 }
3766 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3767 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3768 break;
3769 case 0x016: /* movhps */
3770 case 0x116: /* movhpd */
3771 if (mod != 3) {
3772 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3773 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3774 } else {
3775 /* movlhps */
3776 rm = (modrm & 7) | REX_B(s);
3777 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3778 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3779 }
3780 break;
3781 case 0x216: /* movshdup */
3782 if (mod != 3) {
3783 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3784 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3785 } else {
3786 rm = (modrm & 7) | REX_B(s);
3787 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3788 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3789 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3790 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3791 }
3792 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3793 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3794 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3795 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3796 break;
3797 case 0x7e: /* movd ea, mm */
3798#ifdef TARGET_X86_64
3799 if (s->dflag == 2) {
3800 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3801 offsetof(CPUX86State,fpregs[reg].mmx));
3802 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3803 } else
3804#endif
3805 {
3806 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3807 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3808 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3809 }
3810 break;
3811 case 0x17e: /* movd ea, xmm */
3812#ifdef TARGET_X86_64
3813 if (s->dflag == 2) {
3814 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3815 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3816 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3817 } else
3818#endif
3819 {
3820 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3821 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3822 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3823 }
3824 break;
3825 case 0x27e: /* movq xmm, ea */
3826 if (mod != 3) {
3827 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3828 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3829 } else {
3830 rm = (modrm & 7) | REX_B(s);
3831 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3832 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3833 }
3834 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3835 break;
3836 case 0x7f: /* movq ea, mm */
3837 if (mod != 3) {
3838 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3839 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3840 } else {
3841 rm = (modrm & 7);
3842 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3843 offsetof(CPUX86State,fpregs[reg].mmx));
3844 }
3845 break;
3846 case 0x011: /* movups */
3847 case 0x111: /* movupd */
3848 case 0x029: /* movaps */
3849 case 0x129: /* movapd */
3850 case 0x17f: /* movdqa ea, xmm */
3851 case 0x27f: /* movdqu ea, xmm */
3852 if (mod != 3) {
3853 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3854 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3855 } else {
3856 rm = (modrm & 7) | REX_B(s);
3857 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3858 offsetof(CPUX86State,xmm_regs[reg]));
3859 }
3860 break;
3861 case 0x211: /* movss ea, xmm */
3862 if (mod != 3) {
3863 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3864 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3865 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3866 } else {
3867 rm = (modrm & 7) | REX_B(s);
3868 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3869 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3870 }
3871 break;
3872 case 0x311: /* movsd ea, xmm */
3873 if (mod != 3) {
3874 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3875 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3876 } else {
3877 rm = (modrm & 7) | REX_B(s);
3878 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3879 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3880 }
3881 break;
3882 case 0x013: /* movlps */
3883 case 0x113: /* movlpd */
3884 if (mod != 3) {
3885 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3886 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3887 } else {
3888 goto illegal_op;
3889 }
3890 break;
3891 case 0x017: /* movhps */
3892 case 0x117: /* movhpd */
3893 if (mod != 3) {
3894 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3895 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3896 } else {
3897 goto illegal_op;
3898 }
3899 break;
3900 case 0x71: /* shift mm, im */
3901 case 0x72:
3902 case 0x73:
3903 case 0x171: /* shift xmm, im */
3904 case 0x172:
3905 case 0x173:
3906 val = ldub_code(s->pc++);
3907 if (is_xmm) {
3908 gen_op_movl_T0_im(val);
3909 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3910 gen_op_movl_T0_0();
3911 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3912 op1_offset = offsetof(CPUX86State,xmm_t0);
3913 } else {
3914 gen_op_movl_T0_im(val);
3915 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3916 gen_op_movl_T0_0();
3917 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3918 op1_offset = offsetof(CPUX86State,mmx_t0);
3919 }
3920 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3921 if (!sse_op2)
3922 goto illegal_op;
3923 if (is_xmm) {
3924 rm = (modrm & 7) | REX_B(s);
3925 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3926 } else {
3927 rm = (modrm & 7);
3928 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3929 }
3930 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3931 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3932 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3933 break;
3934 case 0x050: /* movmskps */
3935 rm = (modrm & 7) | REX_B(s);
3936 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3937 offsetof(CPUX86State,xmm_regs[rm]));
3938 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3939 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3940 gen_op_mov_reg_T0(OT_LONG, reg);
3941 break;
3942 case 0x150: /* movmskpd */
3943 rm = (modrm & 7) | REX_B(s);
3944 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3945 offsetof(CPUX86State,xmm_regs[rm]));
3946 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3947 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3948 gen_op_mov_reg_T0(OT_LONG, reg);
3949 break;
3950 case 0x02a: /* cvtpi2ps */
3951 case 0x12a: /* cvtpi2pd */
3952 tcg_gen_helper_0_0(helper_enter_mmx);
3953 if (mod != 3) {
3954 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3955 op2_offset = offsetof(CPUX86State,mmx_t0);
3956 gen_ldq_env_A0(s->mem_index, op2_offset);
3957 } else {
3958 rm = (modrm & 7);
3959 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3960 }
3961 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3962 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3963 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3964 switch(b >> 8) {
3965 case 0x0:
3966 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3967 break;
3968 default:
3969 case 0x1:
3970 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3971 break;
3972 }
3973 break;
3974 case 0x22a: /* cvtsi2ss */
3975 case 0x32a: /* cvtsi2sd */
3976 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3977 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3978 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3979 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3980 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3981 if (ot == OT_LONG) {
3982 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3983 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3984 } else {
3985 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3986 }
3987 break;
3988 case 0x02c: /* cvttps2pi */
3989 case 0x12c: /* cvttpd2pi */
3990 case 0x02d: /* cvtps2pi */
3991 case 0x12d: /* cvtpd2pi */
3992 tcg_gen_helper_0_0(helper_enter_mmx);
3993 if (mod != 3) {
3994 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3995 op2_offset = offsetof(CPUX86State,xmm_t0);
3996 gen_ldo_env_A0(s->mem_index, op2_offset);
3997 } else {
3998 rm = (modrm & 7) | REX_B(s);
3999 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4000 }
4001 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
4002 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4003 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4004 switch(b) {
4005 case 0x02c:
4006 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
4007 break;
4008 case 0x12c:
4009 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
4010 break;
4011 case 0x02d:
4012 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
4013 break;
4014 case 0x12d:
4015 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
4016 break;
4017 }
4018 break;
4019 case 0x22c: /* cvttss2si */
4020 case 0x32c: /* cvttsd2si */
4021 case 0x22d: /* cvtss2si */
4022 case 0x32d: /* cvtsd2si */
4023 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4024 if (mod != 3) {
4025 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4026 if ((b >> 8) & 1) {
4027 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4028 } else {
4029 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4030 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4031 }
4032 op2_offset = offsetof(CPUX86State,xmm_t0);
4033 } else {
4034 rm = (modrm & 7) | REX_B(s);
4035 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4036 }
4037 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4038 (b & 1) * 4];
4039 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4040 if (ot == OT_LONG) {
4041 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4042 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4043 } else {
4044 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4045 }
4046 gen_op_mov_reg_T0(ot, reg);
4047 break;
4048 case 0xc4: /* pinsrw */
4049 case 0x1c4:
4050 s->rip_offset = 1;
4051 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4052 val = ldub_code(s->pc++);
4053 if (b1) {
4054 val &= 7;
4055 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4056 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4057 } else {
4058 val &= 3;
4059 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4060 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4061 }
4062 break;
4063 case 0xc5: /* pextrw */
4064 case 0x1c5:
4065 if (mod != 3)
4066 goto illegal_op;
4067 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4068 val = ldub_code(s->pc++);
4069 if (b1) {
4070 val &= 7;
4071 rm = (modrm & 7) | REX_B(s);
4072 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4073 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4074 } else {
4075 val &= 3;
4076 rm = (modrm & 7);
4077 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4078 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4079 }
4080 reg = ((modrm >> 3) & 7) | rex_r;
4081 gen_op_mov_reg_T0(ot, reg);
4082 break;
4083 case 0x1d6: /* movq ea, xmm */
4084 if (mod != 3) {
4085 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4086 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4087 } else {
4088 rm = (modrm & 7) | REX_B(s);
4089 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4090 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4091 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4092 }
4093 break;
4094 case 0x2d6: /* movq2dq */
4095 tcg_gen_helper_0_0(helper_enter_mmx);
4096 rm = (modrm & 7);
4097 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4098 offsetof(CPUX86State,fpregs[rm].mmx));
4099 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4100 break;
4101 case 0x3d6: /* movdq2q */
4102 tcg_gen_helper_0_0(helper_enter_mmx);
4103 rm = (modrm & 7) | REX_B(s);
4104 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4105 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4106 break;
4107 case 0xd7: /* pmovmskb */
4108 case 0x1d7:
4109 if (mod != 3)
4110 goto illegal_op;
4111 if (b1) {
4112 rm = (modrm & 7) | REX_B(s);
4113 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4114 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4115 } else {
4116 rm = (modrm & 7);
4117 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4118 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4119 }
4120 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4121 reg = ((modrm >> 3) & 7) | rex_r;
4122 gen_op_mov_reg_T0(OT_LONG, reg);
4123 break;
4124 case 0x138:
4125 if (s->prefix & PREFIX_REPNZ)
4126 goto crc32;
4127 case 0x038:
4128 b = modrm;
4129 modrm = ldub_code(s->pc++);
4130 rm = modrm & 7;
4131 reg = ((modrm >> 3) & 7) | rex_r;
4132 mod = (modrm >> 6) & 3;
4133
4134 sse_op2 = sse_op_table6[b].op[b1];
4135 if (!sse_op2)
4136 goto illegal_op;
4137 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4138 goto illegal_op;
4139
4140 if (b1) {
4141 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4142 if (mod == 3) {
4143 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4144 } else {
4145 op2_offset = offsetof(CPUX86State,xmm_t0);
4146 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4147 switch (b) {
4148 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4149 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4150 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4151 gen_ldq_env_A0(s->mem_index, op2_offset +
4152 offsetof(XMMReg, XMM_Q(0)));
4153 break;
4154 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4155 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4156 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
4157 (s->mem_index >> 2) - 1);
4158 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4159 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4160 offsetof(XMMReg, XMM_L(0)));
4161 break;
4162 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4163 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4164 (s->mem_index >> 2) - 1);
4165 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4166 offsetof(XMMReg, XMM_W(0)));
4167 break;
4168 case 0x2a: /* movntqda */
4169 gen_ldo_env_A0(s->mem_index, op1_offset);
4170 return;
4171 default:
4172 gen_ldo_env_A0(s->mem_index, op2_offset);
4173 }
4174 }
4175 } else {
4176 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4177 if (mod == 3) {
4178 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4179 } else {
4180 op2_offset = offsetof(CPUX86State,mmx_t0);
4181 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4182 gen_ldq_env_A0(s->mem_index, op2_offset);
4183 }
4184 }
4185 if (sse_op2 == SSE_SPECIAL)
4186 goto illegal_op;
4187
4188 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4189 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4190 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4191
4192 if (b == 0x17)
4193 s->cc_op = CC_OP_EFLAGS;
4194 break;
4195 case 0x338: /* crc32 */
4196 crc32:
4197 b = modrm;
4198 modrm = ldub_code(s->pc++);
4199 reg = ((modrm >> 3) & 7) | rex_r;
4200
4201 if (b != 0xf0 && b != 0xf1)
4202 goto illegal_op;
4203 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4204 goto illegal_op;
4205
4206 if (b == 0xf0)
4207 ot = OT_BYTE;
4208 else if (b == 0xf1 && s->dflag != 2)
4209 if (s->prefix & PREFIX_DATA)
4210 ot = OT_WORD;
4211 else
4212 ot = OT_LONG;
4213 else
4214 ot = OT_QUAD;
4215
4216 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4217 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4218 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4219 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4220 cpu_T[0], tcg_const_i32(8 << ot));
4221
4222 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4223 gen_op_mov_reg_T0(ot, reg);
4224 break;
4225 case 0x03a:
4226 case 0x13a:
4227 b = modrm;
4228 modrm = ldub_code(s->pc++);
4229 rm = modrm & 7;
4230 reg = ((modrm >> 3) & 7) | rex_r;
4231 mod = (modrm >> 6) & 3;
4232
4233 sse_op2 = sse_op_table7[b].op[b1];
4234 if (!sse_op2)
4235 goto illegal_op;
4236 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4237 goto illegal_op;
4238
4239 if (sse_op2 == SSE_SPECIAL) {
4240 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4241 rm = (modrm & 7) | REX_B(s);
4242 if (mod != 3)
4243 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4244 reg = ((modrm >> 3) & 7) | rex_r;
4245 val = ldub_code(s->pc++);
4246 switch (b) {
4247 case 0x14: /* pextrb */
4248 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4249 xmm_regs[reg].XMM_B(val & 15)));
4250 if (mod == 3)
4251 gen_op_mov_reg_T0(ot, rm);
4252 else
4253 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4254 (s->mem_index >> 2) - 1);
4255 break;
4256 case 0x15: /* pextrw */
4257 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4258 xmm_regs[reg].XMM_W(val & 7)));
4259 if (mod == 3)
4260 gen_op_mov_reg_T0(ot, rm);
4261 else
4262 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4263 (s->mem_index >> 2) - 1);
4264 break;
4265 case 0x16:
4266 if (ot == OT_LONG) { /* pextrd */
4267 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4268 offsetof(CPUX86State,
4269 xmm_regs[reg].XMM_L(val & 3)));
4270 if (mod == 3)
4271 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4272 else
4273 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4274 (s->mem_index >> 2) - 1);
4275 } else { /* pextrq */
4276 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4277 offsetof(CPUX86State,
4278 xmm_regs[reg].XMM_Q(val & 1)));
4279 if (mod == 3)
4280 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4281 else
4282 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4283 (s->mem_index >> 2) - 1);
4284 }
4285 break;
4286 case 0x17: /* extractps */
4287 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4288 xmm_regs[reg].XMM_L(val & 3)));
4289 if (mod == 3)
4290 gen_op_mov_reg_T0(ot, rm);
4291 else
4292 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4293 (s->mem_index >> 2) - 1);
4294 break;
4295 case 0x20: /* pinsrb */
4296 if (mod == 3)
4297 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4298 else
4299 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4300 (s->mem_index >> 2) - 1);
4301 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4302 xmm_regs[reg].XMM_B(val & 15)));
4303 break;
4304 case 0x21: /* insertps */
4305 if (mod == 3)
4306 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4307 offsetof(CPUX86State,xmm_regs[rm]
4308 .XMM_L((val >> 6) & 3)));
4309 else
4310 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4311 (s->mem_index >> 2) - 1);
4312 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4313 offsetof(CPUX86State,xmm_regs[reg]
4314 .XMM_L((val >> 4) & 3)));
4315 if ((val >> 0) & 1)
4316 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4317 cpu_env, offsetof(CPUX86State,
4318 xmm_regs[reg].XMM_L(0)));
4319 if ((val >> 1) & 1)
4320 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4321 cpu_env, offsetof(CPUX86State,
4322 xmm_regs[reg].XMM_L(1)));
4323 if ((val >> 2) & 1)
4324 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4325 cpu_env, offsetof(CPUX86State,
4326 xmm_regs[reg].XMM_L(2)));
4327 if ((val >> 3) & 1)
4328 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4329 cpu_env, offsetof(CPUX86State,
4330 xmm_regs[reg].XMM_L(3)));
4331 break;
4332 case 0x22:
4333 if (ot == OT_LONG) { /* pinsrd */
4334 if (mod == 3)
4335 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4336 else
4337 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4338 (s->mem_index >> 2) - 1);
4339 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4340 offsetof(CPUX86State,
4341 xmm_regs[reg].XMM_L(val & 3)));
4342 } else { /* pinsrq */
4343 if (mod == 3)
4344 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4345 else
4346 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4347 (s->mem_index >> 2) - 1);
4348 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4349 offsetof(CPUX86State,
4350 xmm_regs[reg].XMM_Q(val & 1)));
4351 }
4352 break;
4353 }
4354 return;
4355 }
4356
4357 if (b1) {
4358 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4359 if (mod == 3) {
4360 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4361 } else {
4362 op2_offset = offsetof(CPUX86State,xmm_t0);
4363 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4364 gen_ldo_env_A0(s->mem_index, op2_offset);
4365 }
4366 } else {
4367 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4368 if (mod == 3) {
4369 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4370 } else {
4371 op2_offset = offsetof(CPUX86State,mmx_t0);
4372 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4373 gen_ldq_env_A0(s->mem_index, op2_offset);
4374 }
4375 }
4376 val = ldub_code(s->pc++);
4377
4378 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4379 s->cc_op = CC_OP_EFLAGS;
4380
4381 if (s->dflag == 2)
4382 /* The helper must use entire 64-bit gp registers */
4383 val |= 1 << 8;
4384 }
4385
4386 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4387 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4388 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4389 break;
4390 default:
4391 goto illegal_op;
4392 }
4393 } else {
4394 /* generic MMX or SSE operation */
4395 switch(b) {
4396 case 0x70: /* pshufx insn */
4397 case 0xc6: /* pshufx insn */
4398 case 0xc2: /* compare insns */
4399 s->rip_offset = 1;
4400 break;
4401 default:
4402 break;
4403 }
4404 if (is_xmm) {
4405 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4406 if (mod != 3) {
4407 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4408 op2_offset = offsetof(CPUX86State,xmm_t0);
4409 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4410 b == 0xc2)) {
4411 /* specific case for SSE single instructions */
4412 if (b1 == 2) {
4413 /* 32 bit access */
4414 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4415 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4416 } else {
4417 /* 64 bit access */
4418 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4419 }
4420 } else {
4421 gen_ldo_env_A0(s->mem_index, op2_offset);
4422 }
4423 } else {
4424 rm = (modrm & 7) | REX_B(s);
4425 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4426 }
4427 } else {
4428 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4429 if (mod != 3) {
4430 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4431 op2_offset = offsetof(CPUX86State,mmx_t0);
4432 gen_ldq_env_A0(s->mem_index, op2_offset);
4433 } else {
4434 rm = (modrm & 7);
4435 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4436 }
4437 }
4438 switch(b) {
4439 case 0x0f: /* 3DNow! data insns */
4440 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4441 goto illegal_op;
4442 val = ldub_code(s->pc++);
4443 sse_op2 = sse_op_table5[val];
4444 if (!sse_op2)
4445 goto illegal_op;
4446 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4447 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4448 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4449 break;
4450 case 0x70: /* pshufx insn */
4451 case 0xc6: /* pshufx insn */
4452 val = ldub_code(s->pc++);
4453 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4454 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4455 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4456 break;
4457 case 0xc2:
4458 /* compare insns */
4459 val = ldub_code(s->pc++);
4460 if (val >= 8)
4461 goto illegal_op;
4462 sse_op2 = sse_op_table4[val][b1];
4463 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4464 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4465 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4466 break;
4467 case 0xf7:
4468 /* maskmov : we must prepare A0 */
4469 if (mod != 3)
4470 goto illegal_op;
4471#ifdef TARGET_X86_64
4472 if (s->aflag == 2) {
4473 gen_op_movq_A0_reg(R_EDI);
4474 } else
4475#endif
4476 {
4477 gen_op_movl_A0_reg(R_EDI);
4478 if (s->aflag == 0)
4479 gen_op_andl_A0_ffff();
4480 }
4481 gen_add_A0_ds_seg(s);
4482
4483 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4484 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4485 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4486 break;
4487 default:
4488 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4489 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4490 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4491 break;
4492 }
4493 if (b == 0x2e || b == 0x2f) {
4494 s->cc_op = CC_OP_EFLAGS;
4495 }
4496 }
4497}
4498
4499#ifdef VBOX
4500/* Checks if it's an invalid lock sequence. Only a few instructions
4501 can be used together with the lock prefix and of those only the
4502 form that write a memory operand. So, this is kind of annoying
4503 work to do...
4504 The AMD manual lists the following instructions.
4505 ADC
4506 ADD
4507 AND
4508 BTC
4509 BTR
4510 BTS
4511 CMPXCHG
4512 CMPXCHG8B
4513 CMPXCHG16B
4514 DEC
4515 INC
4516 NEG
4517 NOT
4518 OR
4519 SBB
4520 SUB
4521 XADD
4522 XCHG
4523 XOR */
4524static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4525{
4526 target_ulong pc = s->pc;
4527 int modrm, mod, op;
4528
4529 /* X={8,16,32,64} Y={16,32,64} */
4530 switch (b)
4531 {
4532 /* /2: ADC reg/memX, immX */
4533 /* /0: ADD reg/memX, immX */
4534 /* /4: AND reg/memX, immX */
4535 /* /1: OR reg/memX, immX */
4536 /* /3: SBB reg/memX, immX */
4537 /* /5: SUB reg/memX, immX */
4538 /* /6: XOR reg/memX, immX */
4539 case 0x80:
4540 case 0x81:
4541 case 0x83:
4542 modrm = ldub_code(pc++);
4543 op = (modrm >> 3) & 7;
4544 if (op == 7) /* /7: CMP */
4545 break;
4546 mod = (modrm >> 6) & 3;
4547 if (mod == 3) /* register destination */
4548 break;
4549 return false;
4550
4551 case 0x10: /* /r: ADC reg/mem8, reg8 */
4552 case 0x11: /* /r: ADC reg/memX, regY */
4553 case 0x00: /* /r: ADD reg/mem8, reg8 */
4554 case 0x01: /* /r: ADD reg/memX, regY */
4555 case 0x20: /* /r: AND reg/mem8, reg8 */
4556 case 0x21: /* /r: AND reg/memY, regY */
4557 case 0x08: /* /r: OR reg/mem8, reg8 */
4558 case 0x09: /* /r: OR reg/memY, regY */
4559 case 0x18: /* /r: SBB reg/mem8, reg8 */
4560 case 0x19: /* /r: SBB reg/memY, regY */
4561 case 0x28: /* /r: SUB reg/mem8, reg8 */
4562 case 0x29: /* /r: SUB reg/memY, regY */
4563 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4564 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4565 case 0x30: /* /r: XOR reg/mem8, reg8 */
4566 case 0x31: /* /r: XOR reg/memY, regY */
4567 modrm = ldub_code(pc++);
4568 mod = (modrm >> 6) & 3;
4569 if (mod == 3) /* register destination */
4570 break;
4571 return false;
4572
4573 /* /1: DEC reg/memX */
4574 /* /0: INC reg/memX */
4575 case 0xfe:
4576 case 0xff:
4577 modrm = ldub_code(pc++);
4578 mod = (modrm >> 6) & 3;
4579 if (mod == 3) /* register destination */
4580 break;
4581 return false;
4582
4583 /* /3: NEG reg/memX */
4584 /* /2: NOT reg/memX */
4585 case 0xf6:
4586 case 0xf7:
4587 modrm = ldub_code(pc++);
4588 mod = (modrm >> 6) & 3;
4589 if (mod == 3) /* register destination */
4590 break;
4591 return false;
4592
4593 case 0x0f:
4594 b = ldub_code(pc++);
4595 switch (b)
4596 {
4597 /* /7: BTC reg/memY, imm8 */
4598 /* /6: BTR reg/memY, imm8 */
4599 /* /5: BTS reg/memY, imm8 */
4600 case 0xba:
4601 modrm = ldub_code(pc++);
4602 op = (modrm >> 3) & 7;
4603 if (op < 5)
4604 break;
4605 mod = (modrm >> 6) & 3;
4606 if (mod == 3) /* register destination */
4607 break;
4608 return false;
4609
4610 case 0xbb: /* /r: BTC reg/memY, regY */
4611 case 0xb3: /* /r: BTR reg/memY, regY */
4612 case 0xab: /* /r: BTS reg/memY, regY */
4613 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4614 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4615 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4616 case 0xc1: /* /r: XADD reg/memY, regY */
4617 modrm = ldub_code(pc++);
4618 mod = (modrm >> 6) & 3;
4619 if (mod == 3) /* register destination */
4620 break;
4621 return false;
4622
4623 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4624 case 0xc7:
4625 modrm = ldub_code(pc++);
4626 op = (modrm >> 3) & 7;
4627 if (op != 1)
4628 break;
4629 return false;
4630 }
4631 break;
4632 }
4633
4634 /* illegal sequence. The s->pc is past the lock prefix and that
4635 is sufficient for the TB, I think. */
4636 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4637 return true;
4638}
4639#endif /* VBOX */
4640
4641
4642/* convert one instruction. s->is_jmp is set if the translation must
4643 be stopped. Return the next pc value */
4644static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4645{
4646 int b, prefixes, aflag, dflag;
4647 int shift, ot;
4648 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4649 target_ulong next_eip, tval;
4650 int rex_w, rex_r;
4651
4652 if (unlikely(loglevel & CPU_LOG_TB_OP))
4653 tcg_gen_debug_insn_start(pc_start);
4654
4655 s->pc = pc_start;
4656 prefixes = 0;
4657 aflag = s->code32;
4658 dflag = s->code32;
4659 s->override = -1;
4660 rex_w = -1;
4661 rex_r = 0;
4662#ifdef TARGET_X86_64
4663 s->rex_x = 0;
4664 s->rex_b = 0;
4665 x86_64_hregs = 0;
4666#endif
4667 s->rip_offset = 0; /* for relative ip address */
4668#ifdef VBOX
4669 /* nike: seems only slow down things */
4670# if 0
4671 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4672
4673 gen_update_eip(pc_start - s->cs_base);
4674# endif
4675#endif
4676
4677 next_byte:
4678 b = ldub_code(s->pc);
4679 s->pc++;
4680 /* check prefixes */
4681#ifdef TARGET_X86_64
4682 if (CODE64(s)) {
4683 switch (b) {
4684 case 0xf3:
4685 prefixes |= PREFIX_REPZ;
4686 goto next_byte;
4687 case 0xf2:
4688 prefixes |= PREFIX_REPNZ;
4689 goto next_byte;
4690 case 0xf0:
4691 prefixes |= PREFIX_LOCK;
4692 goto next_byte;
4693 case 0x2e:
4694 s->override = R_CS;
4695 goto next_byte;
4696 case 0x36:
4697 s->override = R_SS;
4698 goto next_byte;
4699 case 0x3e:
4700 s->override = R_DS;
4701 goto next_byte;
4702 case 0x26:
4703 s->override = R_ES;
4704 goto next_byte;
4705 case 0x64:
4706 s->override = R_FS;
4707 goto next_byte;
4708 case 0x65:
4709 s->override = R_GS;
4710 goto next_byte;
4711 case 0x66:
4712 prefixes |= PREFIX_DATA;
4713 goto next_byte;
4714 case 0x67:
4715 prefixes |= PREFIX_ADR;
4716 goto next_byte;
4717 case 0x40 ... 0x4f:
4718 /* REX prefix */
4719 rex_w = (b >> 3) & 1;
4720 rex_r = (b & 0x4) << 1;
4721 s->rex_x = (b & 0x2) << 2;
4722 REX_B(s) = (b & 0x1) << 3;
4723 x86_64_hregs = 1; /* select uniform byte register addressing */
4724 goto next_byte;
4725 }
4726 if (rex_w == 1) {
4727 /* 0x66 is ignored if rex.w is set */
4728 dflag = 2;
4729 } else {
4730 if (prefixes & PREFIX_DATA)
4731 dflag ^= 1;
4732 }
4733 if (!(prefixes & PREFIX_ADR))
4734 aflag = 2;
4735 } else
4736#endif
4737 {
4738 switch (b) {
4739 case 0xf3:
4740 prefixes |= PREFIX_REPZ;
4741 goto next_byte;
4742 case 0xf2:
4743 prefixes |= PREFIX_REPNZ;
4744 goto next_byte;
4745 case 0xf0:
4746 prefixes |= PREFIX_LOCK;
4747 goto next_byte;
4748 case 0x2e:
4749 s->override = R_CS;
4750 goto next_byte;
4751 case 0x36:
4752 s->override = R_SS;
4753 goto next_byte;
4754 case 0x3e:
4755 s->override = R_DS;
4756 goto next_byte;
4757 case 0x26:
4758 s->override = R_ES;
4759 goto next_byte;
4760 case 0x64:
4761 s->override = R_FS;
4762 goto next_byte;
4763 case 0x65:
4764 s->override = R_GS;
4765 goto next_byte;
4766 case 0x66:
4767 prefixes |= PREFIX_DATA;
4768 goto next_byte;
4769 case 0x67:
4770 prefixes |= PREFIX_ADR;
4771 goto next_byte;
4772 }
4773 if (prefixes & PREFIX_DATA)
4774 dflag ^= 1;
4775 if (prefixes & PREFIX_ADR)
4776 aflag ^= 1;
4777 }
4778
4779 s->prefix = prefixes;
4780 s->aflag = aflag;
4781 s->dflag = dflag;
4782
4783 /* lock generation */
4784#ifndef VBOX
4785 if (prefixes & PREFIX_LOCK)
4786 tcg_gen_helper_0_0(helper_lock);
4787#else /* VBOX */
4788 if (prefixes & PREFIX_LOCK) {
4789 if (is_invalid_lock_sequence(s, pc_start, b)) {
4790 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4791 return s->pc;
4792 }
4793 tcg_gen_helper_0_0(helper_lock);
4794 }
4795#endif /* VBOX */
4796
4797 /* now check op code */
4798 reswitch:
4799 switch(b) {
4800 case 0x0f:
4801 /**************************/
4802 /* extended op code */
4803 b = ldub_code(s->pc++) | 0x100;
4804 goto reswitch;
4805
4806 /**************************/
4807 /* arith & logic */
4808 case 0x00 ... 0x05:
4809 case 0x08 ... 0x0d:
4810 case 0x10 ... 0x15:
4811 case 0x18 ... 0x1d:
4812 case 0x20 ... 0x25:
4813 case 0x28 ... 0x2d:
4814 case 0x30 ... 0x35:
4815 case 0x38 ... 0x3d:
4816 {
4817 int op, f, val;
4818 op = (b >> 3) & 7;
4819 f = (b >> 1) & 3;
4820
4821 if ((b & 1) == 0)
4822 ot = OT_BYTE;
4823 else
4824 ot = dflag + OT_WORD;
4825
4826 switch(f) {
4827 case 0: /* OP Ev, Gv */
4828 modrm = ldub_code(s->pc++);
4829 reg = ((modrm >> 3) & 7) | rex_r;
4830 mod = (modrm >> 6) & 3;
4831 rm = (modrm & 7) | REX_B(s);
4832 if (mod != 3) {
4833 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4834 opreg = OR_TMP0;
4835 } else if (op == OP_XORL && rm == reg) {
4836 xor_zero:
4837 /* xor reg, reg optimisation */
4838 gen_op_movl_T0_0();
4839 s->cc_op = CC_OP_LOGICB + ot;
4840 gen_op_mov_reg_T0(ot, reg);
4841 gen_op_update1_cc();
4842 break;
4843 } else {
4844 opreg = rm;
4845 }
4846 gen_op_mov_TN_reg(ot, 1, reg);
4847 gen_op(s, op, ot, opreg);
4848 break;
4849 case 1: /* OP Gv, Ev */
4850 modrm = ldub_code(s->pc++);
4851 mod = (modrm >> 6) & 3;
4852 reg = ((modrm >> 3) & 7) | rex_r;
4853 rm = (modrm & 7) | REX_B(s);
4854 if (mod != 3) {
4855 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4856 gen_op_ld_T1_A0(ot + s->mem_index);
4857 } else if (op == OP_XORL && rm == reg) {
4858 goto xor_zero;
4859 } else {
4860 gen_op_mov_TN_reg(ot, 1, rm);
4861 }
4862 gen_op(s, op, ot, reg);
4863 break;
4864 case 2: /* OP A, Iv */
4865 val = insn_get(s, ot);
4866 gen_op_movl_T1_im(val);
4867 gen_op(s, op, ot, OR_EAX);
4868 break;
4869 }
4870 }
4871 break;
4872
4873 case 0x82:
4874 if (CODE64(s))
4875 goto illegal_op;
4876 case 0x80: /* GRP1 */
4877 case 0x81:
4878 case 0x83:
4879 {
4880 int val;
4881
4882 if ((b & 1) == 0)
4883 ot = OT_BYTE;
4884 else
4885 ot = dflag + OT_WORD;
4886
4887 modrm = ldub_code(s->pc++);
4888 mod = (modrm >> 6) & 3;
4889 rm = (modrm & 7) | REX_B(s);
4890 op = (modrm >> 3) & 7;
4891
4892 if (mod != 3) {
4893 if (b == 0x83)
4894 s->rip_offset = 1;
4895 else
4896 s->rip_offset = insn_const_size(ot);
4897 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4898 opreg = OR_TMP0;
4899 } else {
4900 opreg = rm;
4901 }
4902
4903 switch(b) {
4904 default:
4905 case 0x80:
4906 case 0x81:
4907 case 0x82:
4908 val = insn_get(s, ot);
4909 break;
4910 case 0x83:
4911 val = (int8_t)insn_get(s, OT_BYTE);
4912 break;
4913 }
4914 gen_op_movl_T1_im(val);
4915 gen_op(s, op, ot, opreg);
4916 }
4917 break;
4918
4919 /**************************/
4920 /* inc, dec, and other misc arith */
4921 case 0x40 ... 0x47: /* inc Gv */
4922 ot = dflag ? OT_LONG : OT_WORD;
4923 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4924 break;
4925 case 0x48 ... 0x4f: /* dec Gv */
4926 ot = dflag ? OT_LONG : OT_WORD;
4927 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4928 break;
4929 case 0xf6: /* GRP3 */
4930 case 0xf7:
4931 if ((b & 1) == 0)
4932 ot = OT_BYTE;
4933 else
4934 ot = dflag + OT_WORD;
4935
4936 modrm = ldub_code(s->pc++);
4937 mod = (modrm >> 6) & 3;
4938 rm = (modrm & 7) | REX_B(s);
4939 op = (modrm >> 3) & 7;
4940 if (mod != 3) {
4941 if (op == 0)
4942 s->rip_offset = insn_const_size(ot);
4943 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4944 gen_op_ld_T0_A0(ot + s->mem_index);
4945 } else {
4946 gen_op_mov_TN_reg(ot, 0, rm);
4947 }
4948
4949 switch(op) {
4950 case 0: /* test */
4951 val = insn_get(s, ot);
4952 gen_op_movl_T1_im(val);
4953 gen_op_testl_T0_T1_cc();
4954 s->cc_op = CC_OP_LOGICB + ot;
4955 break;
4956 case 2: /* not */
4957 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4958 if (mod != 3) {
4959 gen_op_st_T0_A0(ot + s->mem_index);
4960 } else {
4961 gen_op_mov_reg_T0(ot, rm);
4962 }
4963 break;
4964 case 3: /* neg */
4965 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4966 if (mod != 3) {
4967 gen_op_st_T0_A0(ot + s->mem_index);
4968 } else {
4969 gen_op_mov_reg_T0(ot, rm);
4970 }
4971 gen_op_update_neg_cc();
4972 s->cc_op = CC_OP_SUBB + ot;
4973 break;
4974 case 4: /* mul */
4975 switch(ot) {
4976 case OT_BYTE:
4977 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4978 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4979 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4980 /* XXX: use 32 bit mul which could be faster */
4981 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4982 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4983 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4984 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4985 s->cc_op = CC_OP_MULB;
4986 break;
4987 case OT_WORD:
4988 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4989 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4990 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4991 /* XXX: use 32 bit mul which could be faster */
4992 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4993 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4994 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4995 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4996 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4997 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4998 s->cc_op = CC_OP_MULW;
4999 break;
5000 default:
5001 case OT_LONG:
5002#ifdef TARGET_X86_64
5003 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5004 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
5005 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
5006 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5007 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5008 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5009 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5010 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5011 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5012#else
5013 {
5014 TCGv t0, t1;
5015 t0 = tcg_temp_new(TCG_TYPE_I64);
5016 t1 = tcg_temp_new(TCG_TYPE_I64);
5017 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5018 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
5019 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
5020 tcg_gen_mul_i64(t0, t0, t1);
5021 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5022 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5023 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5024 tcg_gen_shri_i64(t0, t0, 32);
5025 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5026 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5027 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5028 }
5029#endif
5030 s->cc_op = CC_OP_MULL;
5031 break;
5032#ifdef TARGET_X86_64
5033 case OT_QUAD:
5034 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5035 s->cc_op = CC_OP_MULQ;
5036 break;
5037#endif
5038 }
5039 break;
5040 case 5: /* imul */
5041 switch(ot) {
5042 case OT_BYTE:
5043 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5044 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5045 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5046 /* XXX: use 32 bit mul which could be faster */
5047 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5048 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5049 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5050 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5051 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5052 s->cc_op = CC_OP_MULB;
5053 break;
5054 case OT_WORD:
5055 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5056 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5057 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5058 /* XXX: use 32 bit mul which could be faster */
5059 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5060 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5061 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5062 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5063 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5064 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5065 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5066 s->cc_op = CC_OP_MULW;
5067 break;
5068 default:
5069 case OT_LONG:
5070#ifdef TARGET_X86_64
5071 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5072 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5073 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5074 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5075 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5076 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5077 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5078 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5079 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5080 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5081#else
5082 {
5083 TCGv t0, t1;
5084 t0 = tcg_temp_new(TCG_TYPE_I64);
5085 t1 = tcg_temp_new(TCG_TYPE_I64);
5086 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5087 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5088 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5089 tcg_gen_mul_i64(t0, t0, t1);
5090 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5091 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5092 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5093 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5094 tcg_gen_shri_i64(t0, t0, 32);
5095 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5096 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5097 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5098 }
5099#endif
5100 s->cc_op = CC_OP_MULL;
5101 break;
5102#ifdef TARGET_X86_64
5103 case OT_QUAD:
5104 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5105 s->cc_op = CC_OP_MULQ;
5106 break;
5107#endif
5108 }
5109 break;
5110 case 6: /* div */
5111 switch(ot) {
5112 case OT_BYTE:
5113 gen_jmp_im(pc_start - s->cs_base);
5114 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5115 break;
5116 case OT_WORD:
5117 gen_jmp_im(pc_start - s->cs_base);
5118 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5119 break;
5120 default:
5121 case OT_LONG:
5122 gen_jmp_im(pc_start - s->cs_base);
5123 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5124 break;
5125#ifdef TARGET_X86_64
5126 case OT_QUAD:
5127 gen_jmp_im(pc_start - s->cs_base);
5128 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5129 break;
5130#endif
5131 }
5132 break;
5133 case 7: /* idiv */
5134 switch(ot) {
5135 case OT_BYTE:
5136 gen_jmp_im(pc_start - s->cs_base);
5137 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5138 break;
5139 case OT_WORD:
5140 gen_jmp_im(pc_start - s->cs_base);
5141 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5142 break;
5143 default:
5144 case OT_LONG:
5145 gen_jmp_im(pc_start - s->cs_base);
5146 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5147 break;
5148#ifdef TARGET_X86_64
5149 case OT_QUAD:
5150 gen_jmp_im(pc_start - s->cs_base);
5151 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5152 break;
5153#endif
5154 }
5155 break;
5156 default:
5157 goto illegal_op;
5158 }
5159 break;
5160
5161 case 0xfe: /* GRP4 */
5162 case 0xff: /* GRP5 */
5163 if ((b & 1) == 0)
5164 ot = OT_BYTE;
5165 else
5166 ot = dflag + OT_WORD;
5167
5168 modrm = ldub_code(s->pc++);
5169 mod = (modrm >> 6) & 3;
5170 rm = (modrm & 7) | REX_B(s);
5171 op = (modrm >> 3) & 7;
5172 if (op >= 2 && b == 0xfe) {
5173 goto illegal_op;
5174 }
5175 if (CODE64(s)) {
5176 if (op == 2 || op == 4) {
5177 /* operand size for jumps is 64 bit */
5178 ot = OT_QUAD;
5179 } else if (op == 3 || op == 5) {
5180 /* for call calls, the operand is 16 or 32 bit, even
5181 in long mode */
5182 ot = dflag ? OT_LONG : OT_WORD;
5183 } else if (op == 6) {
5184 /* default push size is 64 bit */
5185 ot = dflag ? OT_QUAD : OT_WORD;
5186 }
5187 }
5188 if (mod != 3) {
5189 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5190 if (op >= 2 && op != 3 && op != 5)
5191 gen_op_ld_T0_A0(ot + s->mem_index);
5192 } else {
5193 gen_op_mov_TN_reg(ot, 0, rm);
5194 }
5195
5196 switch(op) {
5197 case 0: /* inc Ev */
5198 if (mod != 3)
5199 opreg = OR_TMP0;
5200 else
5201 opreg = rm;
5202 gen_inc(s, ot, opreg, 1);
5203 break;
5204 case 1: /* dec Ev */
5205 if (mod != 3)
5206 opreg = OR_TMP0;
5207 else
5208 opreg = rm;
5209 gen_inc(s, ot, opreg, -1);
5210 break;
5211 case 2: /* call Ev */
5212 /* XXX: optimize if memory (no 'and' is necessary) */
5213#ifdef VBOX_WITH_CALL_RECORD
5214 if (s->record_call)
5215 gen_op_record_call();
5216#endif
5217 if (s->dflag == 0)
5218 gen_op_andl_T0_ffff();
5219 next_eip = s->pc - s->cs_base;
5220 gen_movtl_T1_im(next_eip);
5221 gen_push_T1(s);
5222 gen_op_jmp_T0();
5223 gen_eob(s);
5224 break;
5225 case 3: /* lcall Ev */
5226 gen_op_ld_T1_A0(ot + s->mem_index);
5227 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5228 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5229 do_lcall:
5230 if (s->pe && !s->vm86) {
5231 if (s->cc_op != CC_OP_DYNAMIC)
5232 gen_op_set_cc_op(s->cc_op);
5233 gen_jmp_im(pc_start - s->cs_base);
5234 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5235 tcg_gen_helper_0_4(helper_lcall_protected,
5236 cpu_tmp2_i32, cpu_T[1],
5237 tcg_const_i32(dflag),
5238 tcg_const_i32(s->pc - pc_start));
5239 } else {
5240 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5241 tcg_gen_helper_0_4(helper_lcall_real,
5242 cpu_tmp2_i32, cpu_T[1],
5243 tcg_const_i32(dflag),
5244 tcg_const_i32(s->pc - s->cs_base));
5245 }
5246 gen_eob(s);
5247 break;
5248 case 4: /* jmp Ev */
5249 if (s->dflag == 0)
5250 gen_op_andl_T0_ffff();
5251 gen_op_jmp_T0();
5252 gen_eob(s);
5253 break;
5254 case 5: /* ljmp Ev */
5255 gen_op_ld_T1_A0(ot + s->mem_index);
5256 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5257 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5258 do_ljmp:
5259 if (s->pe && !s->vm86) {
5260 if (s->cc_op != CC_OP_DYNAMIC)
5261 gen_op_set_cc_op(s->cc_op);
5262 gen_jmp_im(pc_start - s->cs_base);
5263 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5264 tcg_gen_helper_0_3(helper_ljmp_protected,
5265 cpu_tmp2_i32,
5266 cpu_T[1],
5267 tcg_const_i32(s->pc - pc_start));
5268 } else {
5269 gen_op_movl_seg_T0_vm(R_CS);
5270 gen_op_movl_T0_T1();
5271 gen_op_jmp_T0();
5272 }
5273 gen_eob(s);
5274 break;
5275 case 6: /* push Ev */
5276 gen_push_T0(s);
5277 break;
5278 default:
5279 goto illegal_op;
5280 }
5281 break;
5282
5283 case 0x84: /* test Ev, Gv */
5284 case 0x85:
5285 if ((b & 1) == 0)
5286 ot = OT_BYTE;
5287 else
5288 ot = dflag + OT_WORD;
5289
5290 modrm = ldub_code(s->pc++);
5291 mod = (modrm >> 6) & 3;
5292 rm = (modrm & 7) | REX_B(s);
5293 reg = ((modrm >> 3) & 7) | rex_r;
5294
5295 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5296 gen_op_mov_TN_reg(ot, 1, reg);
5297 gen_op_testl_T0_T1_cc();
5298 s->cc_op = CC_OP_LOGICB + ot;
5299 break;
5300
5301 case 0xa8: /* test eAX, Iv */
5302 case 0xa9:
5303 if ((b & 1) == 0)
5304 ot = OT_BYTE;
5305 else
5306 ot = dflag + OT_WORD;
5307 val = insn_get(s, ot);
5308
5309 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5310 gen_op_movl_T1_im(val);
5311 gen_op_testl_T0_T1_cc();
5312 s->cc_op = CC_OP_LOGICB + ot;
5313 break;
5314
5315 case 0x98: /* CWDE/CBW */
5316#ifdef TARGET_X86_64
5317 if (dflag == 2) {
5318 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5319 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5320 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5321 } else
5322#endif
5323 if (dflag == 1) {
5324 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5325 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5326 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5327 } else {
5328 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5329 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5330 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5331 }
5332 break;
5333 case 0x99: /* CDQ/CWD */
5334#ifdef TARGET_X86_64
5335 if (dflag == 2) {
5336 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5337 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5338 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5339 } else
5340#endif
5341 if (dflag == 1) {
5342 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5343 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5344 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5345 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5346 } else {
5347 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5348 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5349 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5350 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5351 }
5352 break;
5353 case 0x1af: /* imul Gv, Ev */
5354 case 0x69: /* imul Gv, Ev, I */
5355 case 0x6b:
5356 ot = dflag + OT_WORD;
5357 modrm = ldub_code(s->pc++);
5358 reg = ((modrm >> 3) & 7) | rex_r;
5359 if (b == 0x69)
5360 s->rip_offset = insn_const_size(ot);
5361 else if (b == 0x6b)
5362 s->rip_offset = 1;
5363 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5364 if (b == 0x69) {
5365 val = insn_get(s, ot);
5366 gen_op_movl_T1_im(val);
5367 } else if (b == 0x6b) {
5368 val = (int8_t)insn_get(s, OT_BYTE);
5369 gen_op_movl_T1_im(val);
5370 } else {
5371 gen_op_mov_TN_reg(ot, 1, reg);
5372 }
5373
5374#ifdef TARGET_X86_64
5375 if (ot == OT_QUAD) {
5376 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5377 } else
5378#endif
5379 if (ot == OT_LONG) {
5380#ifdef TARGET_X86_64
5381 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5382 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5383 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5384 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5385 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5386 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5387#else
5388 {
5389 TCGv t0, t1;
5390 t0 = tcg_temp_new(TCG_TYPE_I64);
5391 t1 = tcg_temp_new(TCG_TYPE_I64);
5392 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5393 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5394 tcg_gen_mul_i64(t0, t0, t1);
5395 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5396 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5397 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5398 tcg_gen_shri_i64(t0, t0, 32);
5399 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5400 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5401 }
5402#endif
5403 } else {
5404 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5405 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5406 /* XXX: use 32 bit mul which could be faster */
5407 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5408 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5409 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5410 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5411 }
5412 gen_op_mov_reg_T0(ot, reg);
5413 s->cc_op = CC_OP_MULB + ot;
5414 break;
5415 case 0x1c0:
5416 case 0x1c1: /* xadd Ev, Gv */
5417 if ((b & 1) == 0)
5418 ot = OT_BYTE;
5419 else
5420 ot = dflag + OT_WORD;
5421 modrm = ldub_code(s->pc++);
5422 reg = ((modrm >> 3) & 7) | rex_r;
5423 mod = (modrm >> 6) & 3;
5424 if (mod == 3) {
5425 rm = (modrm & 7) | REX_B(s);
5426 gen_op_mov_TN_reg(ot, 0, reg);
5427 gen_op_mov_TN_reg(ot, 1, rm);
5428 gen_op_addl_T0_T1();
5429 gen_op_mov_reg_T1(ot, reg);
5430 gen_op_mov_reg_T0(ot, rm);
5431 } else {
5432 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5433 gen_op_mov_TN_reg(ot, 0, reg);
5434 gen_op_ld_T1_A0(ot + s->mem_index);
5435 gen_op_addl_T0_T1();
5436 gen_op_st_T0_A0(ot + s->mem_index);
5437 gen_op_mov_reg_T1(ot, reg);
5438 }
5439 gen_op_update2_cc();
5440 s->cc_op = CC_OP_ADDB + ot;
5441 break;
5442 case 0x1b0:
5443 case 0x1b1: /* cmpxchg Ev, Gv */
5444 {
5445 int label1, label2;
5446 TCGv t0, t1, t2, a0;
5447
5448 if ((b & 1) == 0)
5449 ot = OT_BYTE;
5450 else
5451 ot = dflag + OT_WORD;
5452 modrm = ldub_code(s->pc++);
5453 reg = ((modrm >> 3) & 7) | rex_r;
5454 mod = (modrm >> 6) & 3;
5455 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5456 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5457 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5458 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5459 gen_op_mov_v_reg(ot, t1, reg);
5460 if (mod == 3) {
5461 rm = (modrm & 7) | REX_B(s);
5462 gen_op_mov_v_reg(ot, t0, rm);
5463 } else {
5464 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5465 tcg_gen_mov_tl(a0, cpu_A0);
5466 gen_op_ld_v(ot + s->mem_index, t0, a0);
5467 rm = 0; /* avoid warning */
5468 }
5469 label1 = gen_new_label();
5470 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5471 tcg_gen_sub_tl(t2, t2, t0);
5472 gen_extu(ot, t2);
5473 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5474 if (mod == 3) {
5475 label2 = gen_new_label();
5476 gen_op_mov_reg_v(ot, R_EAX, t0);
5477 tcg_gen_br(label2);
5478 gen_set_label(label1);
5479 gen_op_mov_reg_v(ot, rm, t1);
5480 gen_set_label(label2);
5481 } else {
5482 tcg_gen_mov_tl(t1, t0);
5483 gen_op_mov_reg_v(ot, R_EAX, t0);
5484 gen_set_label(label1);
5485 /* always store */
5486 gen_op_st_v(ot + s->mem_index, t1, a0);
5487 }
5488 tcg_gen_mov_tl(cpu_cc_src, t0);
5489 tcg_gen_mov_tl(cpu_cc_dst, t2);
5490 s->cc_op = CC_OP_SUBB + ot;
5491 tcg_temp_free(t0);
5492 tcg_temp_free(t1);
5493 tcg_temp_free(t2);
5494 tcg_temp_free(a0);
5495 }
5496 break;
5497 case 0x1c7: /* cmpxchg8b */
5498 modrm = ldub_code(s->pc++);
5499 mod = (modrm >> 6) & 3;
5500 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5501 goto illegal_op;
5502#ifdef TARGET_X86_64
5503 if (dflag == 2) {
5504 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5505 goto illegal_op;
5506 gen_jmp_im(pc_start - s->cs_base);
5507 if (s->cc_op != CC_OP_DYNAMIC)
5508 gen_op_set_cc_op(s->cc_op);
5509 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5510 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5511 } else
5512#endif
5513 {
5514 if (!(s->cpuid_features & CPUID_CX8))
5515 goto illegal_op;
5516 gen_jmp_im(pc_start - s->cs_base);
5517 if (s->cc_op != CC_OP_DYNAMIC)
5518 gen_op_set_cc_op(s->cc_op);
5519 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5520 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5521 }
5522 s->cc_op = CC_OP_EFLAGS;
5523 break;
5524
5525 /**************************/
5526 /* push/pop */
5527 case 0x50 ... 0x57: /* push */
5528 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5529 gen_push_T0(s);
5530 break;
5531 case 0x58 ... 0x5f: /* pop */
5532 if (CODE64(s)) {
5533 ot = dflag ? OT_QUAD : OT_WORD;
5534 } else {
5535 ot = dflag + OT_WORD;
5536 }
5537 gen_pop_T0(s);
5538 /* NOTE: order is important for pop %sp */
5539 gen_pop_update(s);
5540 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5541 break;
5542 case 0x60: /* pusha */
5543 if (CODE64(s))
5544 goto illegal_op;
5545 gen_pusha(s);
5546 break;
5547 case 0x61: /* popa */
5548 if (CODE64(s))
5549 goto illegal_op;
5550 gen_popa(s);
5551 break;
5552 case 0x68: /* push Iv */
5553 case 0x6a:
5554 if (CODE64(s)) {
5555 ot = dflag ? OT_QUAD : OT_WORD;
5556 } else {
5557 ot = dflag + OT_WORD;
5558 }
5559 if (b == 0x68)
5560 val = insn_get(s, ot);
5561 else
5562 val = (int8_t)insn_get(s, OT_BYTE);
5563 gen_op_movl_T0_im(val);
5564 gen_push_T0(s);
5565 break;
5566 case 0x8f: /* pop Ev */
5567 if (CODE64(s)) {
5568 ot = dflag ? OT_QUAD : OT_WORD;
5569 } else {
5570 ot = dflag + OT_WORD;
5571 }
5572 modrm = ldub_code(s->pc++);
5573 mod = (modrm >> 6) & 3;
5574 gen_pop_T0(s);
5575 if (mod == 3) {
5576 /* NOTE: order is important for pop %sp */
5577 gen_pop_update(s);
5578 rm = (modrm & 7) | REX_B(s);
5579 gen_op_mov_reg_T0(ot, rm);
5580 } else {
5581 /* NOTE: order is important too for MMU exceptions */
5582 s->popl_esp_hack = 1 << ot;
5583 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5584 s->popl_esp_hack = 0;
5585 gen_pop_update(s);
5586 }
5587 break;
5588 case 0xc8: /* enter */
5589 {
5590 int level;
5591 val = lduw_code(s->pc);
5592 s->pc += 2;
5593 level = ldub_code(s->pc++);
5594 gen_enter(s, val, level);
5595 }
5596 break;
5597 case 0xc9: /* leave */
5598 /* XXX: exception not precise (ESP is updated before potential exception) */
5599 if (CODE64(s)) {
5600 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5601 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5602 } else if (s->ss32) {
5603 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5604 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5605 } else {
5606 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5607 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5608 }
5609 gen_pop_T0(s);
5610 if (CODE64(s)) {
5611 ot = dflag ? OT_QUAD : OT_WORD;
5612 } else {
5613 ot = dflag + OT_WORD;
5614 }
5615 gen_op_mov_reg_T0(ot, R_EBP);
5616 gen_pop_update(s);
5617 break;
5618 case 0x06: /* push es */
5619 case 0x0e: /* push cs */
5620 case 0x16: /* push ss */
5621 case 0x1e: /* push ds */
5622 if (CODE64(s))
5623 goto illegal_op;
5624 gen_op_movl_T0_seg(b >> 3);
5625 gen_push_T0(s);
5626 break;
5627 case 0x1a0: /* push fs */
5628 case 0x1a8: /* push gs */
5629 gen_op_movl_T0_seg((b >> 3) & 7);
5630 gen_push_T0(s);
5631 break;
5632 case 0x07: /* pop es */
5633 case 0x17: /* pop ss */
5634 case 0x1f: /* pop ds */
5635 if (CODE64(s))
5636 goto illegal_op;
5637 reg = b >> 3;
5638 gen_pop_T0(s);
5639 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5640 gen_pop_update(s);
5641 if (reg == R_SS) {
5642 /* if reg == SS, inhibit interrupts/trace. */
5643 /* If several instructions disable interrupts, only the
5644 _first_ does it */
5645 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5646 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5647 s->tf = 0;
5648 }
5649 if (s->is_jmp) {
5650 gen_jmp_im(s->pc - s->cs_base);
5651 gen_eob(s);
5652 }
5653 break;
5654 case 0x1a1: /* pop fs */
5655 case 0x1a9: /* pop gs */
5656 gen_pop_T0(s);
5657 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5658 gen_pop_update(s);
5659 if (s->is_jmp) {
5660 gen_jmp_im(s->pc - s->cs_base);
5661 gen_eob(s);
5662 }
5663 break;
5664
5665 /**************************/
5666 /* mov */
5667 case 0x88:
5668 case 0x89: /* mov Gv, Ev */
5669 if ((b & 1) == 0)
5670 ot = OT_BYTE;
5671 else
5672 ot = dflag + OT_WORD;
5673 modrm = ldub_code(s->pc++);
5674 reg = ((modrm >> 3) & 7) | rex_r;
5675
5676 /* generate a generic store */
5677 gen_ldst_modrm(s, modrm, ot, reg, 1);
5678 break;
5679 case 0xc6:
5680 case 0xc7: /* mov Ev, Iv */
5681 if ((b & 1) == 0)
5682 ot = OT_BYTE;
5683 else
5684 ot = dflag + OT_WORD;
5685 modrm = ldub_code(s->pc++);
5686 mod = (modrm >> 6) & 3;
5687 if (mod != 3) {
5688 s->rip_offset = insn_const_size(ot);
5689 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5690 }
5691 val = insn_get(s, ot);
5692 gen_op_movl_T0_im(val);
5693 if (mod != 3)
5694 gen_op_st_T0_A0(ot + s->mem_index);
5695 else
5696 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5697 break;
5698 case 0x8a:
5699 case 0x8b: /* mov Ev, Gv */
5700#ifdef VBOX /* dtrace hot fix */
5701 if (prefixes & PREFIX_LOCK)
5702 goto illegal_op;
5703#endif
5704 if ((b & 1) == 0)
5705 ot = OT_BYTE;
5706 else
5707 ot = OT_WORD + dflag;
5708 modrm = ldub_code(s->pc++);
5709 reg = ((modrm >> 3) & 7) | rex_r;
5710
5711 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5712 gen_op_mov_reg_T0(ot, reg);
5713 break;
5714 case 0x8e: /* mov seg, Gv */
5715 modrm = ldub_code(s->pc++);
5716 reg = (modrm >> 3) & 7;
5717 if (reg >= 6 || reg == R_CS)
5718 goto illegal_op;
5719 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5720 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5721 if (reg == R_SS) {
5722 /* if reg == SS, inhibit interrupts/trace */
5723 /* If several instructions disable interrupts, only the
5724 _first_ does it */
5725 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5726 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5727 s->tf = 0;
5728 }
5729 if (s->is_jmp) {
5730 gen_jmp_im(s->pc - s->cs_base);
5731 gen_eob(s);
5732 }
5733 break;
5734 case 0x8c: /* mov Gv, seg */
5735 modrm = ldub_code(s->pc++);
5736 reg = (modrm >> 3) & 7;
5737 mod = (modrm >> 6) & 3;
5738 if (reg >= 6)
5739 goto illegal_op;
5740 gen_op_movl_T0_seg(reg);
5741 if (mod == 3)
5742 ot = OT_WORD + dflag;
5743 else
5744 ot = OT_WORD;
5745 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5746 break;
5747
5748 case 0x1b6: /* movzbS Gv, Eb */
5749 case 0x1b7: /* movzwS Gv, Eb */
5750 case 0x1be: /* movsbS Gv, Eb */
5751 case 0x1bf: /* movswS Gv, Eb */
5752 {
5753 int d_ot;
5754 /* d_ot is the size of destination */
5755 d_ot = dflag + OT_WORD;
5756 /* ot is the size of source */
5757 ot = (b & 1) + OT_BYTE;
5758 modrm = ldub_code(s->pc++);
5759 reg = ((modrm >> 3) & 7) | rex_r;
5760 mod = (modrm >> 6) & 3;
5761 rm = (modrm & 7) | REX_B(s);
5762
5763 if (mod == 3) {
5764 gen_op_mov_TN_reg(ot, 0, rm);
5765 switch(ot | (b & 8)) {
5766 case OT_BYTE:
5767 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5768 break;
5769 case OT_BYTE | 8:
5770 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5771 break;
5772 case OT_WORD:
5773 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5774 break;
5775 default:
5776 case OT_WORD | 8:
5777 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5778 break;
5779 }
5780 gen_op_mov_reg_T0(d_ot, reg);
5781 } else {
5782 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5783 if (b & 8) {
5784 gen_op_lds_T0_A0(ot + s->mem_index);
5785 } else {
5786 gen_op_ldu_T0_A0(ot + s->mem_index);
5787 }
5788 gen_op_mov_reg_T0(d_ot, reg);
5789 }
5790 }
5791 break;
5792
5793 case 0x8d: /* lea */
5794 ot = dflag + OT_WORD;
5795 modrm = ldub_code(s->pc++);
5796 mod = (modrm >> 6) & 3;
5797 if (mod == 3)
5798 goto illegal_op;
5799 reg = ((modrm >> 3) & 7) | rex_r;
5800 /* we must ensure that no segment is added */
5801 s->override = -1;
5802 val = s->addseg;
5803 s->addseg = 0;
5804 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5805 s->addseg = val;
5806 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5807 break;
5808
5809 case 0xa0: /* mov EAX, Ov */
5810 case 0xa1:
5811 case 0xa2: /* mov Ov, EAX */
5812 case 0xa3:
5813 {
5814 target_ulong offset_addr;
5815
5816 if ((b & 1) == 0)
5817 ot = OT_BYTE;
5818 else
5819 ot = dflag + OT_WORD;
5820#ifdef TARGET_X86_64
5821 if (s->aflag == 2) {
5822 offset_addr = ldq_code(s->pc);
5823 s->pc += 8;
5824 gen_op_movq_A0_im(offset_addr);
5825 } else
5826#endif
5827 {
5828 if (s->aflag) {
5829 offset_addr = insn_get(s, OT_LONG);
5830 } else {
5831 offset_addr = insn_get(s, OT_WORD);
5832 }
5833 gen_op_movl_A0_im(offset_addr);
5834 }
5835 gen_add_A0_ds_seg(s);
5836 if ((b & 2) == 0) {
5837 gen_op_ld_T0_A0(ot + s->mem_index);
5838 gen_op_mov_reg_T0(ot, R_EAX);
5839 } else {
5840 gen_op_mov_TN_reg(ot, 0, R_EAX);
5841 gen_op_st_T0_A0(ot + s->mem_index);
5842 }
5843 }
5844 break;
5845 case 0xd7: /* xlat */
5846#ifdef TARGET_X86_64
5847 if (s->aflag == 2) {
5848 gen_op_movq_A0_reg(R_EBX);
5849 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5850 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5851 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5852 } else
5853#endif
5854 {
5855 gen_op_movl_A0_reg(R_EBX);
5856 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5857 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5858 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5859 if (s->aflag == 0)
5860 gen_op_andl_A0_ffff();
5861 else
5862 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5863 }
5864 gen_add_A0_ds_seg(s);
5865 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5866 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5867 break;
5868 case 0xb0 ... 0xb7: /* mov R, Ib */
5869 val = insn_get(s, OT_BYTE);
5870 gen_op_movl_T0_im(val);
5871 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5872 break;
5873 case 0xb8 ... 0xbf: /* mov R, Iv */
5874#ifdef TARGET_X86_64
5875 if (dflag == 2) {
5876 uint64_t tmp;
5877 /* 64 bit case */
5878 tmp = ldq_code(s->pc);
5879 s->pc += 8;
5880 reg = (b & 7) | REX_B(s);
5881 gen_movtl_T0_im(tmp);
5882 gen_op_mov_reg_T0(OT_QUAD, reg);
5883 } else
5884#endif
5885 {
5886 ot = dflag ? OT_LONG : OT_WORD;
5887 val = insn_get(s, ot);
5888 reg = (b & 7) | REX_B(s);
5889 gen_op_movl_T0_im(val);
5890 gen_op_mov_reg_T0(ot, reg);
5891 }
5892 break;
5893
5894 case 0x91 ... 0x97: /* xchg R, EAX */
5895 ot = dflag + OT_WORD;
5896 reg = (b & 7) | REX_B(s);
5897 rm = R_EAX;
5898 goto do_xchg_reg;
5899 case 0x86:
5900 case 0x87: /* xchg Ev, Gv */
5901 if ((b & 1) == 0)
5902 ot = OT_BYTE;
5903 else
5904 ot = dflag + OT_WORD;
5905 modrm = ldub_code(s->pc++);
5906 reg = ((modrm >> 3) & 7) | rex_r;
5907 mod = (modrm >> 6) & 3;
5908 if (mod == 3) {
5909 rm = (modrm & 7) | REX_B(s);
5910 do_xchg_reg:
5911 gen_op_mov_TN_reg(ot, 0, reg);
5912 gen_op_mov_TN_reg(ot, 1, rm);
5913 gen_op_mov_reg_T0(ot, rm);
5914 gen_op_mov_reg_T1(ot, reg);
5915 } else {
5916 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5917 gen_op_mov_TN_reg(ot, 0, reg);
5918 /* for xchg, lock is implicit */
5919 if (!(prefixes & PREFIX_LOCK))
5920 tcg_gen_helper_0_0(helper_lock);
5921 gen_op_ld_T1_A0(ot + s->mem_index);
5922 gen_op_st_T0_A0(ot + s->mem_index);
5923 if (!(prefixes & PREFIX_LOCK))
5924 tcg_gen_helper_0_0(helper_unlock);
5925 gen_op_mov_reg_T1(ot, reg);
5926 }
5927 break;
5928 case 0xc4: /* les Gv */
5929 if (CODE64(s))
5930 goto illegal_op;
5931 op = R_ES;
5932 goto do_lxx;
5933 case 0xc5: /* lds Gv */
5934 if (CODE64(s))
5935 goto illegal_op;
5936 op = R_DS;
5937 goto do_lxx;
5938 case 0x1b2: /* lss Gv */
5939 op = R_SS;
5940 goto do_lxx;
5941 case 0x1b4: /* lfs Gv */
5942 op = R_FS;
5943 goto do_lxx;
5944 case 0x1b5: /* lgs Gv */
5945 op = R_GS;
5946 do_lxx:
5947 ot = dflag ? OT_LONG : OT_WORD;
5948 modrm = ldub_code(s->pc++);
5949 reg = ((modrm >> 3) & 7) | rex_r;
5950 mod = (modrm >> 6) & 3;
5951 if (mod == 3)
5952 goto illegal_op;
5953 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5954 gen_op_ld_T1_A0(ot + s->mem_index);
5955 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5956 /* load the segment first to handle exceptions properly */
5957 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5958 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5959 /* then put the data */
5960 gen_op_mov_reg_T1(ot, reg);
5961 if (s->is_jmp) {
5962 gen_jmp_im(s->pc - s->cs_base);
5963 gen_eob(s);
5964 }
5965 break;
5966
5967 /************************/
5968 /* shifts */
5969 case 0xc0:
5970 case 0xc1:
5971 /* shift Ev,Ib */
5972 shift = 2;
5973 grp2:
5974 {
5975 if ((b & 1) == 0)
5976 ot = OT_BYTE;
5977 else
5978 ot = dflag + OT_WORD;
5979
5980 modrm = ldub_code(s->pc++);
5981 mod = (modrm >> 6) & 3;
5982 op = (modrm >> 3) & 7;
5983
5984 if (mod != 3) {
5985 if (shift == 2) {
5986 s->rip_offset = 1;
5987 }
5988 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5989 opreg = OR_TMP0;
5990 } else {
5991 opreg = (modrm & 7) | REX_B(s);
5992 }
5993
5994 /* simpler op */
5995 if (shift == 0) {
5996 gen_shift(s, op, ot, opreg, OR_ECX);
5997 } else {
5998 if (shift == 2) {
5999 shift = ldub_code(s->pc++);
6000 }
6001 gen_shifti(s, op, ot, opreg, shift);
6002 }
6003 }
6004 break;
6005 case 0xd0:
6006 case 0xd1:
6007 /* shift Ev,1 */
6008 shift = 1;
6009 goto grp2;
6010 case 0xd2:
6011 case 0xd3:
6012 /* shift Ev,cl */
6013 shift = 0;
6014 goto grp2;
6015
6016 case 0x1a4: /* shld imm */
6017 op = 0;
6018 shift = 1;
6019 goto do_shiftd;
6020 case 0x1a5: /* shld cl */
6021 op = 0;
6022 shift = 0;
6023 goto do_shiftd;
6024 case 0x1ac: /* shrd imm */
6025 op = 1;
6026 shift = 1;
6027 goto do_shiftd;
6028 case 0x1ad: /* shrd cl */
6029 op = 1;
6030 shift = 0;
6031 do_shiftd:
6032 ot = dflag + OT_WORD;
6033 modrm = ldub_code(s->pc++);
6034 mod = (modrm >> 6) & 3;
6035 rm = (modrm & 7) | REX_B(s);
6036 reg = ((modrm >> 3) & 7) | rex_r;
6037 if (mod != 3) {
6038 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6039 opreg = OR_TMP0;
6040 } else {
6041 opreg = rm;
6042 }
6043 gen_op_mov_TN_reg(ot, 1, reg);
6044
6045 if (shift) {
6046 val = ldub_code(s->pc++);
6047 tcg_gen_movi_tl(cpu_T3, val);
6048 } else {
6049 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6050 }
6051 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6052 break;
6053
6054 /************************/
6055 /* floats */
6056 case 0xd8 ... 0xdf:
6057 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6058 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6059 /* XXX: what to do if illegal op ? */
6060 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6061 break;
6062 }
6063 modrm = ldub_code(s->pc++);
6064 mod = (modrm >> 6) & 3;
6065 rm = modrm & 7;
6066 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6067 if (mod != 3) {
6068 /* memory op */
6069 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6070 switch(op) {
6071 case 0x00 ... 0x07: /* fxxxs */
6072 case 0x10 ... 0x17: /* fixxxl */
6073 case 0x20 ... 0x27: /* fxxxl */
6074 case 0x30 ... 0x37: /* fixxx */
6075 {
6076 int op1;
6077 op1 = op & 7;
6078
6079 switch(op >> 4) {
6080 case 0:
6081 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6082 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6083 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6084 break;
6085 case 1:
6086 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6087 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6088 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6089 break;
6090 case 2:
6091 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6092 (s->mem_index >> 2) - 1);
6093 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6094 break;
6095 case 3:
6096 default:
6097 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6098 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6099 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6100 break;
6101 }
6102
6103 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6104 if (op1 == 3) {
6105 /* fcomp needs pop */
6106 tcg_gen_helper_0_0(helper_fpop);
6107 }
6108 }
6109 break;
6110 case 0x08: /* flds */
6111 case 0x0a: /* fsts */
6112 case 0x0b: /* fstps */
6113 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6114 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6115 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6116 switch(op & 7) {
6117 case 0:
6118 switch(op >> 4) {
6119 case 0:
6120 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6121 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6122 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6123 break;
6124 case 1:
6125 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6126 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6127 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6128 break;
6129 case 2:
6130 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6131 (s->mem_index >> 2) - 1);
6132 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6133 break;
6134 case 3:
6135 default:
6136 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6137 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6138 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6139 break;
6140 }
6141 break;
6142 case 1:
6143 /* XXX: the corresponding CPUID bit must be tested ! */
6144 switch(op >> 4) {
6145 case 1:
6146 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6147 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6148 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6149 break;
6150 case 2:
6151 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6152 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6153 (s->mem_index >> 2) - 1);
6154 break;
6155 case 3:
6156 default:
6157 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6158 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6159 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6160 break;
6161 }
6162 tcg_gen_helper_0_0(helper_fpop);
6163 break;
6164 default:
6165 switch(op >> 4) {
6166 case 0:
6167 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6168 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6169 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6170 break;
6171 case 1:
6172 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6173 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6174 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6175 break;
6176 case 2:
6177 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6178 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6179 (s->mem_index >> 2) - 1);
6180 break;
6181 case 3:
6182 default:
6183 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6184 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6185 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6186 break;
6187 }
6188 if ((op & 7) == 3)
6189 tcg_gen_helper_0_0(helper_fpop);
6190 break;
6191 }
6192 break;
6193 case 0x0c: /* fldenv mem */
6194 if (s->cc_op != CC_OP_DYNAMIC)
6195 gen_op_set_cc_op(s->cc_op);
6196 gen_jmp_im(pc_start - s->cs_base);
6197 tcg_gen_helper_0_2(helper_fldenv,
6198 cpu_A0, tcg_const_i32(s->dflag));
6199 break;
6200 case 0x0d: /* fldcw mem */
6201 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6202 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6203 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6204 break;
6205 case 0x0e: /* fnstenv mem */
6206 if (s->cc_op != CC_OP_DYNAMIC)
6207 gen_op_set_cc_op(s->cc_op);
6208 gen_jmp_im(pc_start - s->cs_base);
6209 tcg_gen_helper_0_2(helper_fstenv,
6210 cpu_A0, tcg_const_i32(s->dflag));
6211 break;
6212 case 0x0f: /* fnstcw mem */
6213 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6214 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6215 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6216 break;
6217 case 0x1d: /* fldt mem */
6218 if (s->cc_op != CC_OP_DYNAMIC)
6219 gen_op_set_cc_op(s->cc_op);
6220 gen_jmp_im(pc_start - s->cs_base);
6221 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6222 break;
6223 case 0x1f: /* fstpt mem */
6224 if (s->cc_op != CC_OP_DYNAMIC)
6225 gen_op_set_cc_op(s->cc_op);
6226 gen_jmp_im(pc_start - s->cs_base);
6227 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6228 tcg_gen_helper_0_0(helper_fpop);
6229 break;
6230 case 0x2c: /* frstor mem */
6231 if (s->cc_op != CC_OP_DYNAMIC)
6232 gen_op_set_cc_op(s->cc_op);
6233 gen_jmp_im(pc_start - s->cs_base);
6234 tcg_gen_helper_0_2(helper_frstor,
6235 cpu_A0, tcg_const_i32(s->dflag));
6236 break;
6237 case 0x2e: /* fnsave mem */
6238 if (s->cc_op != CC_OP_DYNAMIC)
6239 gen_op_set_cc_op(s->cc_op);
6240 gen_jmp_im(pc_start - s->cs_base);
6241 tcg_gen_helper_0_2(helper_fsave,
6242 cpu_A0, tcg_const_i32(s->dflag));
6243 break;
6244 case 0x2f: /* fnstsw mem */
6245 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6246 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6247 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6248 break;
6249 case 0x3c: /* fbld */
6250 if (s->cc_op != CC_OP_DYNAMIC)
6251 gen_op_set_cc_op(s->cc_op);
6252 gen_jmp_im(pc_start - s->cs_base);
6253 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6254 break;
6255 case 0x3e: /* fbstp */
6256 if (s->cc_op != CC_OP_DYNAMIC)
6257 gen_op_set_cc_op(s->cc_op);
6258 gen_jmp_im(pc_start - s->cs_base);
6259 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6260 tcg_gen_helper_0_0(helper_fpop);
6261 break;
6262 case 0x3d: /* fildll */
6263 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6264 (s->mem_index >> 2) - 1);
6265 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6266 break;
6267 case 0x3f: /* fistpll */
6268 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6269 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6270 (s->mem_index >> 2) - 1);
6271 tcg_gen_helper_0_0(helper_fpop);
6272 break;
6273 default:
6274 goto illegal_op;
6275 }
6276 } else {
6277 /* register float ops */
6278 opreg = rm;
6279
6280 switch(op) {
6281 case 0x08: /* fld sti */
6282 tcg_gen_helper_0_0(helper_fpush);
6283 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6284 break;
6285 case 0x09: /* fxchg sti */
6286 case 0x29: /* fxchg4 sti, undocumented op */
6287 case 0x39: /* fxchg7 sti, undocumented op */
6288 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6289 break;
6290 case 0x0a: /* grp d9/2 */
6291 switch(rm) {
6292 case 0: /* fnop */
6293 /* check exceptions (FreeBSD FPU probe) */
6294 if (s->cc_op != CC_OP_DYNAMIC)
6295 gen_op_set_cc_op(s->cc_op);
6296 gen_jmp_im(pc_start - s->cs_base);
6297 tcg_gen_helper_0_0(helper_fwait);
6298 break;
6299 default:
6300 goto illegal_op;
6301 }
6302 break;
6303 case 0x0c: /* grp d9/4 */
6304 switch(rm) {
6305 case 0: /* fchs */
6306 tcg_gen_helper_0_0(helper_fchs_ST0);
6307 break;
6308 case 1: /* fabs */
6309 tcg_gen_helper_0_0(helper_fabs_ST0);
6310 break;
6311 case 4: /* ftst */
6312 tcg_gen_helper_0_0(helper_fldz_FT0);
6313 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6314 break;
6315 case 5: /* fxam */
6316 tcg_gen_helper_0_0(helper_fxam_ST0);
6317 break;
6318 default:
6319 goto illegal_op;
6320 }
6321 break;
6322 case 0x0d: /* grp d9/5 */
6323 {
6324 switch(rm) {
6325 case 0:
6326 tcg_gen_helper_0_0(helper_fpush);
6327 tcg_gen_helper_0_0(helper_fld1_ST0);
6328 break;
6329 case 1:
6330 tcg_gen_helper_0_0(helper_fpush);
6331 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6332 break;
6333 case 2:
6334 tcg_gen_helper_0_0(helper_fpush);
6335 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6336 break;
6337 case 3:
6338 tcg_gen_helper_0_0(helper_fpush);
6339 tcg_gen_helper_0_0(helper_fldpi_ST0);
6340 break;
6341 case 4:
6342 tcg_gen_helper_0_0(helper_fpush);
6343 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6344 break;
6345 case 5:
6346 tcg_gen_helper_0_0(helper_fpush);
6347 tcg_gen_helper_0_0(helper_fldln2_ST0);
6348 break;
6349 case 6:
6350 tcg_gen_helper_0_0(helper_fpush);
6351 tcg_gen_helper_0_0(helper_fldz_ST0);
6352 break;
6353 default:
6354 goto illegal_op;
6355 }
6356 }
6357 break;
6358 case 0x0e: /* grp d9/6 */
6359 switch(rm) {
6360 case 0: /* f2xm1 */
6361 tcg_gen_helper_0_0(helper_f2xm1);
6362 break;
6363 case 1: /* fyl2x */
6364 tcg_gen_helper_0_0(helper_fyl2x);
6365 break;
6366 case 2: /* fptan */
6367 tcg_gen_helper_0_0(helper_fptan);
6368 break;
6369 case 3: /* fpatan */
6370 tcg_gen_helper_0_0(helper_fpatan);
6371 break;
6372 case 4: /* fxtract */
6373 tcg_gen_helper_0_0(helper_fxtract);
6374 break;
6375 case 5: /* fprem1 */
6376 tcg_gen_helper_0_0(helper_fprem1);
6377 break;
6378 case 6: /* fdecstp */
6379 tcg_gen_helper_0_0(helper_fdecstp);
6380 break;
6381 default:
6382 case 7: /* fincstp */
6383 tcg_gen_helper_0_0(helper_fincstp);
6384 break;
6385 }
6386 break;
6387 case 0x0f: /* grp d9/7 */
6388 switch(rm) {
6389 case 0: /* fprem */
6390 tcg_gen_helper_0_0(helper_fprem);
6391 break;
6392 case 1: /* fyl2xp1 */
6393 tcg_gen_helper_0_0(helper_fyl2xp1);
6394 break;
6395 case 2: /* fsqrt */
6396 tcg_gen_helper_0_0(helper_fsqrt);
6397 break;
6398 case 3: /* fsincos */
6399 tcg_gen_helper_0_0(helper_fsincos);
6400 break;
6401 case 5: /* fscale */
6402 tcg_gen_helper_0_0(helper_fscale);
6403 break;
6404 case 4: /* frndint */
6405 tcg_gen_helper_0_0(helper_frndint);
6406 break;
6407 case 6: /* fsin */
6408 tcg_gen_helper_0_0(helper_fsin);
6409 break;
6410 default:
6411 case 7: /* fcos */
6412 tcg_gen_helper_0_0(helper_fcos);
6413 break;
6414 }
6415 break;
6416 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6417 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6418 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6419 {
6420 int op1;
6421
6422 op1 = op & 7;
6423 if (op >= 0x20) {
6424 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6425 if (op >= 0x30)
6426 tcg_gen_helper_0_0(helper_fpop);
6427 } else {
6428 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6429 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6430 }
6431 }
6432 break;
6433 case 0x02: /* fcom */
6434 case 0x22: /* fcom2, undocumented op */
6435 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6436 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6437 break;
6438 case 0x03: /* fcomp */
6439 case 0x23: /* fcomp3, undocumented op */
6440 case 0x32: /* fcomp5, undocumented op */
6441 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6442 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6443 tcg_gen_helper_0_0(helper_fpop);
6444 break;
6445 case 0x15: /* da/5 */
6446 switch(rm) {
6447 case 1: /* fucompp */
6448 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6449 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6450 tcg_gen_helper_0_0(helper_fpop);
6451 tcg_gen_helper_0_0(helper_fpop);
6452 break;
6453 default:
6454 goto illegal_op;
6455 }
6456 break;
6457 case 0x1c:
6458 switch(rm) {
6459 case 0: /* feni (287 only, just do nop here) */
6460 break;
6461 case 1: /* fdisi (287 only, just do nop here) */
6462 break;
6463 case 2: /* fclex */
6464 tcg_gen_helper_0_0(helper_fclex);
6465 break;
6466 case 3: /* fninit */
6467 tcg_gen_helper_0_0(helper_fninit);
6468 break;
6469 case 4: /* fsetpm (287 only, just do nop here) */
6470 break;
6471 default:
6472 goto illegal_op;
6473 }
6474 break;
6475 case 0x1d: /* fucomi */
6476 if (s->cc_op != CC_OP_DYNAMIC)
6477 gen_op_set_cc_op(s->cc_op);
6478 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6479 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6480 s->cc_op = CC_OP_EFLAGS;
6481 break;
6482 case 0x1e: /* fcomi */
6483 if (s->cc_op != CC_OP_DYNAMIC)
6484 gen_op_set_cc_op(s->cc_op);
6485 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6486 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6487 s->cc_op = CC_OP_EFLAGS;
6488 break;
6489 case 0x28: /* ffree sti */
6490 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6491 break;
6492 case 0x2a: /* fst sti */
6493 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6494 break;
6495 case 0x2b: /* fstp sti */
6496 case 0x0b: /* fstp1 sti, undocumented op */
6497 case 0x3a: /* fstp8 sti, undocumented op */
6498 case 0x3b: /* fstp9 sti, undocumented op */
6499 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6500 tcg_gen_helper_0_0(helper_fpop);
6501 break;
6502 case 0x2c: /* fucom st(i) */
6503 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6504 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6505 break;
6506 case 0x2d: /* fucomp st(i) */
6507 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6508 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6509 tcg_gen_helper_0_0(helper_fpop);
6510 break;
6511 case 0x33: /* de/3 */
6512 switch(rm) {
6513 case 1: /* fcompp */
6514 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6515 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6516 tcg_gen_helper_0_0(helper_fpop);
6517 tcg_gen_helper_0_0(helper_fpop);
6518 break;
6519 default:
6520 goto illegal_op;
6521 }
6522 break;
6523 case 0x38: /* ffreep sti, undocumented op */
6524 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6525 tcg_gen_helper_0_0(helper_fpop);
6526 break;
6527 case 0x3c: /* df/4 */
6528 switch(rm) {
6529 case 0:
6530 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6531 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6532 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6533 break;
6534 default:
6535 goto illegal_op;
6536 }
6537 break;
6538 case 0x3d: /* fucomip */
6539 if (s->cc_op != CC_OP_DYNAMIC)
6540 gen_op_set_cc_op(s->cc_op);
6541 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6542 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6543 tcg_gen_helper_0_0(helper_fpop);
6544 s->cc_op = CC_OP_EFLAGS;
6545 break;
6546 case 0x3e: /* fcomip */
6547 if (s->cc_op != CC_OP_DYNAMIC)
6548 gen_op_set_cc_op(s->cc_op);
6549 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6550 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6551 tcg_gen_helper_0_0(helper_fpop);
6552 s->cc_op = CC_OP_EFLAGS;
6553 break;
6554 case 0x10 ... 0x13: /* fcmovxx */
6555 case 0x18 ... 0x1b:
6556 {
6557 int op1, l1;
6558 static const uint8_t fcmov_cc[8] = {
6559 (JCC_B << 1),
6560 (JCC_Z << 1),
6561 (JCC_BE << 1),
6562 (JCC_P << 1),
6563 };
6564 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6565 l1 = gen_new_label();
6566 gen_jcc1(s, s->cc_op, op1, l1);
6567 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6568 gen_set_label(l1);
6569 }
6570 break;
6571 default:
6572 goto illegal_op;
6573 }
6574 }
6575 break;
6576 /************************/
6577 /* string ops */
6578
6579 case 0xa4: /* movsS */
6580 case 0xa5:
6581 if ((b & 1) == 0)
6582 ot = OT_BYTE;
6583 else
6584 ot = dflag + OT_WORD;
6585
6586 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6587 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6588 } else {
6589 gen_movs(s, ot);
6590 }
6591 break;
6592
6593 case 0xaa: /* stosS */
6594 case 0xab:
6595 if ((b & 1) == 0)
6596 ot = OT_BYTE;
6597 else
6598 ot = dflag + OT_WORD;
6599
6600 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6601 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6602 } else {
6603 gen_stos(s, ot);
6604 }
6605 break;
6606 case 0xac: /* lodsS */
6607 case 0xad:
6608 if ((b & 1) == 0)
6609 ot = OT_BYTE;
6610 else
6611 ot = dflag + OT_WORD;
6612 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6613 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6614 } else {
6615 gen_lods(s, ot);
6616 }
6617 break;
6618 case 0xae: /* scasS */
6619 case 0xaf:
6620 if ((b & 1) == 0)
6621 ot = OT_BYTE;
6622 else
6623 ot = dflag + OT_WORD;
6624 if (prefixes & PREFIX_REPNZ) {
6625 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6626 } else if (prefixes & PREFIX_REPZ) {
6627 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6628 } else {
6629 gen_scas(s, ot);
6630 s->cc_op = CC_OP_SUBB + ot;
6631 }
6632 break;
6633
6634 case 0xa6: /* cmpsS */
6635 case 0xa7:
6636 if ((b & 1) == 0)
6637 ot = OT_BYTE;
6638 else
6639 ot = dflag + OT_WORD;
6640 if (prefixes & PREFIX_REPNZ) {
6641 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6642 } else if (prefixes & PREFIX_REPZ) {
6643 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6644 } else {
6645 gen_cmps(s, ot);
6646 s->cc_op = CC_OP_SUBB + ot;
6647 }
6648 break;
6649 case 0x6c: /* insS */
6650 case 0x6d:
6651 if ((b & 1) == 0)
6652 ot = OT_BYTE;
6653 else
6654 ot = dflag ? OT_LONG : OT_WORD;
6655 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6656 gen_op_andl_T0_ffff();
6657 gen_check_io(s, ot, pc_start - s->cs_base,
6658 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6659 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6660 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6661 } else {
6662 gen_ins(s, ot);
6663 if (use_icount) {
6664 gen_jmp(s, s->pc - s->cs_base);
6665 }
6666 }
6667 break;
6668 case 0x6e: /* outsS */
6669 case 0x6f:
6670 if ((b & 1) == 0)
6671 ot = OT_BYTE;
6672 else
6673 ot = dflag ? OT_LONG : OT_WORD;
6674 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6675 gen_op_andl_T0_ffff();
6676 gen_check_io(s, ot, pc_start - s->cs_base,
6677 svm_is_rep(prefixes) | 4);
6678 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6679 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6680 } else {
6681 gen_outs(s, ot);
6682 if (use_icount) {
6683 gen_jmp(s, s->pc - s->cs_base);
6684 }
6685 }
6686 break;
6687
6688 /************************/
6689 /* port I/O */
6690
6691 case 0xe4:
6692 case 0xe5:
6693 if ((b & 1) == 0)
6694 ot = OT_BYTE;
6695 else
6696 ot = dflag ? OT_LONG : OT_WORD;
6697 val = ldub_code(s->pc++);
6698 gen_op_movl_T0_im(val);
6699 gen_check_io(s, ot, pc_start - s->cs_base,
6700 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6701 if (use_icount)
6702 gen_io_start();
6703 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6704 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6705 gen_op_mov_reg_T1(ot, R_EAX);
6706 if (use_icount) {
6707 gen_io_end();
6708 gen_jmp(s, s->pc - s->cs_base);
6709 }
6710 break;
6711 case 0xe6:
6712 case 0xe7:
6713 if ((b & 1) == 0)
6714 ot = OT_BYTE;
6715 else
6716 ot = dflag ? OT_LONG : OT_WORD;
6717 val = ldub_code(s->pc++);
6718 gen_op_movl_T0_im(val);
6719 gen_check_io(s, ot, pc_start - s->cs_base,
6720 svm_is_rep(prefixes));
6721#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6722 if (val == 0x80)
6723 break;
6724#endif /* VBOX */
6725 gen_op_mov_TN_reg(ot, 1, R_EAX);
6726
6727 if (use_icount)
6728 gen_io_start();
6729 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6730 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6731 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6732 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6733 if (use_icount) {
6734 gen_io_end();
6735 gen_jmp(s, s->pc - s->cs_base);
6736 }
6737 break;
6738 case 0xec:
6739 case 0xed:
6740 if ((b & 1) == 0)
6741 ot = OT_BYTE;
6742 else
6743 ot = dflag ? OT_LONG : OT_WORD;
6744 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6745 gen_op_andl_T0_ffff();
6746 gen_check_io(s, ot, pc_start - s->cs_base,
6747 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6748 if (use_icount)
6749 gen_io_start();
6750 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6751 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6752 gen_op_mov_reg_T1(ot, R_EAX);
6753 if (use_icount) {
6754 gen_io_end();
6755 gen_jmp(s, s->pc - s->cs_base);
6756 }
6757 break;
6758 case 0xee:
6759 case 0xef:
6760 if ((b & 1) == 0)
6761 ot = OT_BYTE;
6762 else
6763 ot = dflag ? OT_LONG : OT_WORD;
6764 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6765 gen_op_andl_T0_ffff();
6766 gen_check_io(s, ot, pc_start - s->cs_base,
6767 svm_is_rep(prefixes));
6768 gen_op_mov_TN_reg(ot, 1, R_EAX);
6769
6770 if (use_icount)
6771 gen_io_start();
6772 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6773 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6774 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6775 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6776 if (use_icount) {
6777 gen_io_end();
6778 gen_jmp(s, s->pc - s->cs_base);
6779 }
6780 break;
6781
6782 /************************/
6783 /* control */
6784 case 0xc2: /* ret im */
6785 val = ldsw_code(s->pc);
6786 s->pc += 2;
6787 gen_pop_T0(s);
6788 if (CODE64(s) && s->dflag)
6789 s->dflag = 2;
6790 gen_stack_update(s, val + (2 << s->dflag));
6791 if (s->dflag == 0)
6792 gen_op_andl_T0_ffff();
6793 gen_op_jmp_T0();
6794 gen_eob(s);
6795 break;
6796 case 0xc3: /* ret */
6797 gen_pop_T0(s);
6798 gen_pop_update(s);
6799 if (s->dflag == 0)
6800 gen_op_andl_T0_ffff();
6801 gen_op_jmp_T0();
6802 gen_eob(s);
6803 break;
6804 case 0xca: /* lret im */
6805 val = ldsw_code(s->pc);
6806 s->pc += 2;
6807 do_lret:
6808 if (s->pe && !s->vm86) {
6809 if (s->cc_op != CC_OP_DYNAMIC)
6810 gen_op_set_cc_op(s->cc_op);
6811 gen_jmp_im(pc_start - s->cs_base);
6812 tcg_gen_helper_0_2(helper_lret_protected,
6813 tcg_const_i32(s->dflag),
6814 tcg_const_i32(val));
6815 } else {
6816 gen_stack_A0(s);
6817 /* pop offset */
6818 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6819 if (s->dflag == 0)
6820 gen_op_andl_T0_ffff();
6821 /* NOTE: keeping EIP updated is not a problem in case of
6822 exception */
6823 gen_op_jmp_T0();
6824 /* pop selector */
6825 gen_op_addl_A0_im(2 << s->dflag);
6826 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6827 gen_op_movl_seg_T0_vm(R_CS);
6828 /* add stack offset */
6829 gen_stack_update(s, val + (4 << s->dflag));
6830 }
6831 gen_eob(s);
6832 break;
6833 case 0xcb: /* lret */
6834 val = 0;
6835 goto do_lret;
6836 case 0xcf: /* iret */
6837 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6838 if (!s->pe) {
6839 /* real mode */
6840 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6841 s->cc_op = CC_OP_EFLAGS;
6842 } else if (s->vm86) {
6843#ifdef VBOX
6844 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6845#else
6846 if (s->iopl != 3) {
6847#endif
6848 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6849 } else {
6850 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6851 s->cc_op = CC_OP_EFLAGS;
6852 }
6853 } else {
6854 if (s->cc_op != CC_OP_DYNAMIC)
6855 gen_op_set_cc_op(s->cc_op);
6856 gen_jmp_im(pc_start - s->cs_base);
6857 tcg_gen_helper_0_2(helper_iret_protected,
6858 tcg_const_i32(s->dflag),
6859 tcg_const_i32(s->pc - s->cs_base));
6860 s->cc_op = CC_OP_EFLAGS;
6861 }
6862 gen_eob(s);
6863 break;
6864 case 0xe8: /* call im */
6865 {
6866 if (dflag)
6867 tval = (int32_t)insn_get(s, OT_LONG);
6868 else
6869 tval = (int16_t)insn_get(s, OT_WORD);
6870 next_eip = s->pc - s->cs_base;
6871 tval += next_eip;
6872 if (s->dflag == 0)
6873 tval &= 0xffff;
6874 gen_movtl_T0_im(next_eip);
6875 gen_push_T0(s);
6876 gen_jmp(s, tval);
6877 }
6878 break;
6879 case 0x9a: /* lcall im */
6880 {
6881 unsigned int selector, offset;
6882
6883 if (CODE64(s))
6884 goto illegal_op;
6885 ot = dflag ? OT_LONG : OT_WORD;
6886 offset = insn_get(s, ot);
6887 selector = insn_get(s, OT_WORD);
6888
6889 gen_op_movl_T0_im(selector);
6890 gen_op_movl_T1_imu(offset);
6891 }
6892 goto do_lcall;
6893 case 0xe9: /* jmp im */
6894 if (dflag)
6895 tval = (int32_t)insn_get(s, OT_LONG);
6896 else
6897 tval = (int16_t)insn_get(s, OT_WORD);
6898 tval += s->pc - s->cs_base;
6899 if (s->dflag == 0)
6900 tval &= 0xffff;
6901 else if(!CODE64(s))
6902 tval &= 0xffffffff;
6903 gen_jmp(s, tval);
6904 break;
6905 case 0xea: /* ljmp im */
6906 {
6907 unsigned int selector, offset;
6908
6909 if (CODE64(s))
6910 goto illegal_op;
6911 ot = dflag ? OT_LONG : OT_WORD;
6912 offset = insn_get(s, ot);
6913 selector = insn_get(s, OT_WORD);
6914
6915 gen_op_movl_T0_im(selector);
6916 gen_op_movl_T1_imu(offset);
6917 }
6918 goto do_ljmp;
6919 case 0xeb: /* jmp Jb */
6920 tval = (int8_t)insn_get(s, OT_BYTE);
6921 tval += s->pc - s->cs_base;
6922 if (s->dflag == 0)
6923 tval &= 0xffff;
6924 gen_jmp(s, tval);
6925 break;
6926 case 0x70 ... 0x7f: /* jcc Jb */
6927 tval = (int8_t)insn_get(s, OT_BYTE);
6928 goto do_jcc;
6929 case 0x180 ... 0x18f: /* jcc Jv */
6930 if (dflag) {
6931 tval = (int32_t)insn_get(s, OT_LONG);
6932 } else {
6933 tval = (int16_t)insn_get(s, OT_WORD);
6934 }
6935 do_jcc:
6936 next_eip = s->pc - s->cs_base;
6937 tval += next_eip;
6938 if (s->dflag == 0)
6939 tval &= 0xffff;
6940 gen_jcc(s, b, tval, next_eip);
6941 break;
6942
6943 case 0x190 ... 0x19f: /* setcc Gv */
6944 modrm = ldub_code(s->pc++);
6945 gen_setcc(s, b);
6946 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6947 break;
6948 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6949 {
6950 int l1;
6951 TCGv t0;
6952
6953 ot = dflag + OT_WORD;
6954 modrm = ldub_code(s->pc++);
6955 reg = ((modrm >> 3) & 7) | rex_r;
6956 mod = (modrm >> 6) & 3;
6957 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6958 if (mod != 3) {
6959 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6960 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6961 } else {
6962 rm = (modrm & 7) | REX_B(s);
6963 gen_op_mov_v_reg(ot, t0, rm);
6964 }
6965#ifdef TARGET_X86_64
6966 if (ot == OT_LONG) {
6967 /* XXX: specific Intel behaviour ? */
6968 l1 = gen_new_label();
6969 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6970 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6971 gen_set_label(l1);
6972 tcg_gen_movi_tl(cpu_tmp0, 0);
6973 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6974 } else
6975#endif
6976 {
6977 l1 = gen_new_label();
6978 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6979 gen_op_mov_reg_v(ot, reg, t0);
6980 gen_set_label(l1);
6981 }
6982 tcg_temp_free(t0);
6983 }
6984 break;
6985
6986 /************************/
6987 /* flags */
6988 case 0x9c: /* pushf */
6989 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6990#ifdef VBOX
6991 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6992#else
6993 if (s->vm86 && s->iopl != 3) {
6994#endif
6995 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6996 } else {
6997 if (s->cc_op != CC_OP_DYNAMIC)
6998 gen_op_set_cc_op(s->cc_op);
6999#ifdef VBOX
7000 if (s->vm86 && s->vme && s->iopl != 3)
7001 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
7002 else
7003#endif
7004 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
7005 gen_push_T0(s);
7006 }
7007 break;
7008 case 0x9d: /* popf */
7009 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
7010#ifdef VBOX
7011 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
7012#else
7013 if (s->vm86 && s->iopl != 3) {
7014#endif
7015 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7016 } else {
7017 gen_pop_T0(s);
7018 if (s->cpl == 0) {
7019 if (s->dflag) {
7020 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7021 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
7022 } else {
7023 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7024 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
7025 }
7026 } else {
7027 if (s->cpl <= s->iopl) {
7028 if (s->dflag) {
7029 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7030 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7031 } else {
7032 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7033 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7034 }
7035 } else {
7036 if (s->dflag) {
7037 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7038 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7039 } else {
7040#ifdef VBOX
7041 if (s->vm86 && s->vme)
7042 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7043 else
7044#endif
7045 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7046 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7047 }
7048 }
7049 }
7050 gen_pop_update(s);
7051 s->cc_op = CC_OP_EFLAGS;
7052 /* abort translation because TF flag may change */
7053 gen_jmp_im(s->pc - s->cs_base);
7054 gen_eob(s);
7055 }
7056 break;
7057 case 0x9e: /* sahf */
7058 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7059 goto illegal_op;
7060 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7061 if (s->cc_op != CC_OP_DYNAMIC)
7062 gen_op_set_cc_op(s->cc_op);
7063 gen_compute_eflags(cpu_cc_src);
7064 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7065 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7066 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7067 s->cc_op = CC_OP_EFLAGS;
7068 break;
7069 case 0x9f: /* lahf */
7070 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7071 goto illegal_op;
7072 if (s->cc_op != CC_OP_DYNAMIC)
7073 gen_op_set_cc_op(s->cc_op);
7074 gen_compute_eflags(cpu_T[0]);
7075 /* Note: gen_compute_eflags() only gives the condition codes */
7076 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7077 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7078 break;
7079 case 0xf5: /* cmc */
7080 if (s->cc_op != CC_OP_DYNAMIC)
7081 gen_op_set_cc_op(s->cc_op);
7082 gen_compute_eflags(cpu_cc_src);
7083 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7084 s->cc_op = CC_OP_EFLAGS;
7085 break;
7086 case 0xf8: /* clc */
7087 if (s->cc_op != CC_OP_DYNAMIC)
7088 gen_op_set_cc_op(s->cc_op);
7089 gen_compute_eflags(cpu_cc_src);
7090 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7091 s->cc_op = CC_OP_EFLAGS;
7092 break;
7093 case 0xf9: /* stc */
7094 if (s->cc_op != CC_OP_DYNAMIC)
7095 gen_op_set_cc_op(s->cc_op);
7096 gen_compute_eflags(cpu_cc_src);
7097 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7098 s->cc_op = CC_OP_EFLAGS;
7099 break;
7100 case 0xfc: /* cld */
7101 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7102 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7103 break;
7104 case 0xfd: /* std */
7105 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7106 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7107 break;
7108
7109 /************************/
7110 /* bit operations */
7111 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7112 ot = dflag + OT_WORD;
7113 modrm = ldub_code(s->pc++);
7114 op = (modrm >> 3) & 7;
7115 mod = (modrm >> 6) & 3;
7116 rm = (modrm & 7) | REX_B(s);
7117 if (mod != 3) {
7118 s->rip_offset = 1;
7119 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7120 gen_op_ld_T0_A0(ot + s->mem_index);
7121 } else {
7122 gen_op_mov_TN_reg(ot, 0, rm);
7123 }
7124 /* load shift */
7125 val = ldub_code(s->pc++);
7126 gen_op_movl_T1_im(val);
7127 if (op < 4)
7128 goto illegal_op;
7129 op -= 4;
7130 goto bt_op;
7131 case 0x1a3: /* bt Gv, Ev */
7132 op = 0;
7133 goto do_btx;
7134 case 0x1ab: /* bts */
7135 op = 1;
7136 goto do_btx;
7137 case 0x1b3: /* btr */
7138 op = 2;
7139 goto do_btx;
7140 case 0x1bb: /* btc */
7141 op = 3;
7142 do_btx:
7143 ot = dflag + OT_WORD;
7144 modrm = ldub_code(s->pc++);
7145 reg = ((modrm >> 3) & 7) | rex_r;
7146 mod = (modrm >> 6) & 3;
7147 rm = (modrm & 7) | REX_B(s);
7148 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7149 if (mod != 3) {
7150 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7151 /* specific case: we need to add a displacement */
7152 gen_exts(ot, cpu_T[1]);
7153 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7154 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7155 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7156 gen_op_ld_T0_A0(ot + s->mem_index);
7157 } else {
7158 gen_op_mov_TN_reg(ot, 0, rm);
7159 }
7160 bt_op:
7161 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7162 switch(op) {
7163 case 0:
7164 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7165 tcg_gen_movi_tl(cpu_cc_dst, 0);
7166 break;
7167 case 1:
7168 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7169 tcg_gen_movi_tl(cpu_tmp0, 1);
7170 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7171 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7172 break;
7173 case 2:
7174 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7175 tcg_gen_movi_tl(cpu_tmp0, 1);
7176 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7177 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7178 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7179 break;
7180 default:
7181 case 3:
7182 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7183 tcg_gen_movi_tl(cpu_tmp0, 1);
7184 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7185 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7186 break;
7187 }
7188 s->cc_op = CC_OP_SARB + ot;
7189 if (op != 0) {
7190 if (mod != 3)
7191 gen_op_st_T0_A0(ot + s->mem_index);
7192 else
7193 gen_op_mov_reg_T0(ot, rm);
7194 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7195 tcg_gen_movi_tl(cpu_cc_dst, 0);
7196 }
7197 break;
7198 case 0x1bc: /* bsf */
7199 case 0x1bd: /* bsr */
7200 {
7201 int label1;
7202 TCGv t0;
7203
7204 ot = dflag + OT_WORD;
7205 modrm = ldub_code(s->pc++);
7206 reg = ((modrm >> 3) & 7) | rex_r;
7207 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7208 gen_extu(ot, cpu_T[0]);
7209 label1 = gen_new_label();
7210 tcg_gen_movi_tl(cpu_cc_dst, 0);
7211 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7212 tcg_gen_mov_tl(t0, cpu_T[0]);
7213 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7214 if (b & 1) {
7215 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7216 } else {
7217 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7218 }
7219 gen_op_mov_reg_T0(ot, reg);
7220 tcg_gen_movi_tl(cpu_cc_dst, 1);
7221 gen_set_label(label1);
7222 tcg_gen_discard_tl(cpu_cc_src);
7223 s->cc_op = CC_OP_LOGICB + ot;
7224 tcg_temp_free(t0);
7225 }
7226 break;
7227 /************************/
7228 /* bcd */
7229 case 0x27: /* daa */
7230 if (CODE64(s))
7231 goto illegal_op;
7232 if (s->cc_op != CC_OP_DYNAMIC)
7233 gen_op_set_cc_op(s->cc_op);
7234 tcg_gen_helper_0_0(helper_daa);
7235 s->cc_op = CC_OP_EFLAGS;
7236 break;
7237 case 0x2f: /* das */
7238 if (CODE64(s))
7239 goto illegal_op;
7240 if (s->cc_op != CC_OP_DYNAMIC)
7241 gen_op_set_cc_op(s->cc_op);
7242 tcg_gen_helper_0_0(helper_das);
7243 s->cc_op = CC_OP_EFLAGS;
7244 break;
7245 case 0x37: /* aaa */
7246 if (CODE64(s))
7247 goto illegal_op;
7248 if (s->cc_op != CC_OP_DYNAMIC)
7249 gen_op_set_cc_op(s->cc_op);
7250 tcg_gen_helper_0_0(helper_aaa);
7251 s->cc_op = CC_OP_EFLAGS;
7252 break;
7253 case 0x3f: /* aas */
7254 if (CODE64(s))
7255 goto illegal_op;
7256 if (s->cc_op != CC_OP_DYNAMIC)
7257 gen_op_set_cc_op(s->cc_op);
7258 tcg_gen_helper_0_0(helper_aas);
7259 s->cc_op = CC_OP_EFLAGS;
7260 break;
7261 case 0xd4: /* aam */
7262 if (CODE64(s))
7263 goto illegal_op;
7264 val = ldub_code(s->pc++);
7265 if (val == 0) {
7266 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7267 } else {
7268 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7269 s->cc_op = CC_OP_LOGICB;
7270 }
7271 break;
7272 case 0xd5: /* aad */
7273 if (CODE64(s))
7274 goto illegal_op;
7275 val = ldub_code(s->pc++);
7276 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7277 s->cc_op = CC_OP_LOGICB;
7278 break;
7279 /************************/
7280 /* misc */
7281 case 0x90: /* nop */
7282 /* XXX: xchg + rex handling */
7283 /* XXX: correct lock test for all insn */
7284 if (prefixes & PREFIX_LOCK)
7285 goto illegal_op;
7286 if (prefixes & PREFIX_REPZ) {
7287 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7288 }
7289 break;
7290 case 0x9b: /* fwait */
7291 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7292 (HF_MP_MASK | HF_TS_MASK)) {
7293 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7294 } else {
7295 if (s->cc_op != CC_OP_DYNAMIC)
7296 gen_op_set_cc_op(s->cc_op);
7297 gen_jmp_im(pc_start - s->cs_base);
7298 tcg_gen_helper_0_0(helper_fwait);
7299 }
7300 break;
7301 case 0xcc: /* int3 */
7302#ifdef VBOX
7303 if (s->vm86 && s->iopl != 3 && !s->vme) {
7304 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7305 } else
7306#endif
7307 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7308 break;
7309 case 0xcd: /* int N */
7310 val = ldub_code(s->pc++);
7311#ifdef VBOX
7312 if (s->vm86 && s->iopl != 3 && !s->vme) {
7313#else
7314 if (s->vm86 && s->iopl != 3) {
7315#endif
7316 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7317 } else {
7318 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7319 }
7320 break;
7321 case 0xce: /* into */
7322 if (CODE64(s))
7323 goto illegal_op;
7324 if (s->cc_op != CC_OP_DYNAMIC)
7325 gen_op_set_cc_op(s->cc_op);
7326 gen_jmp_im(pc_start - s->cs_base);
7327 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7328 break;
7329 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7330 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7331#if 1
7332 gen_debug(s, pc_start - s->cs_base);
7333#else
7334 /* start debug */
7335 tb_flush(cpu_single_env);
7336 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7337#endif
7338 break;
7339 case 0xfa: /* cli */
7340 if (!s->vm86) {
7341 if (s->cpl <= s->iopl) {
7342 tcg_gen_helper_0_0(helper_cli);
7343 } else {
7344 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7345 }
7346 } else {
7347 if (s->iopl == 3) {
7348 tcg_gen_helper_0_0(helper_cli);
7349#ifdef VBOX
7350 } else if (s->iopl != 3 && s->vme) {
7351 tcg_gen_helper_0_0(helper_cli_vme);
7352#endif
7353 } else {
7354 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7355 }
7356 }
7357 break;
7358 case 0xfb: /* sti */
7359 if (!s->vm86) {
7360 if (s->cpl <= s->iopl) {
7361 gen_sti:
7362 tcg_gen_helper_0_0(helper_sti);
7363 /* interruptions are enabled only the first insn after sti */
7364 /* If several instructions disable interrupts, only the
7365 _first_ does it */
7366 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7367 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7368 /* give a chance to handle pending irqs */
7369 gen_jmp_im(s->pc - s->cs_base);
7370 gen_eob(s);
7371 } else {
7372 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7373 }
7374 } else {
7375 if (s->iopl == 3) {
7376 goto gen_sti;
7377#ifdef VBOX
7378 } else if (s->iopl != 3 && s->vme) {
7379 tcg_gen_helper_0_0(helper_sti_vme);
7380 /* give a chance to handle pending irqs */
7381 gen_jmp_im(s->pc - s->cs_base);
7382 gen_eob(s);
7383#endif
7384 } else {
7385 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7386 }
7387 }
7388 break;
7389 case 0x62: /* bound */
7390 if (CODE64(s))
7391 goto illegal_op;
7392 ot = dflag ? OT_LONG : OT_WORD;
7393 modrm = ldub_code(s->pc++);
7394 reg = (modrm >> 3) & 7;
7395 mod = (modrm >> 6) & 3;
7396 if (mod == 3)
7397 goto illegal_op;
7398 gen_op_mov_TN_reg(ot, 0, reg);
7399 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7400 gen_jmp_im(pc_start - s->cs_base);
7401 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7402 if (ot == OT_WORD)
7403 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7404 else
7405 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7406 break;
7407 case 0x1c8 ... 0x1cf: /* bswap reg */
7408 reg = (b & 7) | REX_B(s);
7409#ifdef TARGET_X86_64
7410 if (dflag == 2) {
7411 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7412 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7413 gen_op_mov_reg_T0(OT_QUAD, reg);
7414 } else
7415 {
7416 TCGv tmp0;
7417 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7418
7419 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7420 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7421 tcg_gen_bswap_i32(tmp0, tmp0);
7422 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7423 gen_op_mov_reg_T0(OT_LONG, reg);
7424 }
7425#else
7426 {
7427 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7428 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7429 gen_op_mov_reg_T0(OT_LONG, reg);
7430 }
7431#endif
7432 break;
7433 case 0xd6: /* salc */
7434 if (CODE64(s))
7435 goto illegal_op;
7436 if (s->cc_op != CC_OP_DYNAMIC)
7437 gen_op_set_cc_op(s->cc_op);
7438 gen_compute_eflags_c(cpu_T[0]);
7439 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7440 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7441 break;
7442 case 0xe0: /* loopnz */
7443 case 0xe1: /* loopz */
7444 case 0xe2: /* loop */
7445 case 0xe3: /* jecxz */
7446 {
7447 int l1, l2, l3;
7448
7449 tval = (int8_t)insn_get(s, OT_BYTE);
7450 next_eip = s->pc - s->cs_base;
7451 tval += next_eip;
7452 if (s->dflag == 0)
7453 tval &= 0xffff;
7454
7455 l1 = gen_new_label();
7456 l2 = gen_new_label();
7457 l3 = gen_new_label();
7458 b &= 3;
7459 switch(b) {
7460 case 0: /* loopnz */
7461 case 1: /* loopz */
7462 if (s->cc_op != CC_OP_DYNAMIC)
7463 gen_op_set_cc_op(s->cc_op);
7464 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7465 gen_op_jz_ecx(s->aflag, l3);
7466 gen_compute_eflags(cpu_tmp0);
7467 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7468 if (b == 0) {
7469 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7470 } else {
7471 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7472 }
7473 break;
7474 case 2: /* loop */
7475 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7476 gen_op_jnz_ecx(s->aflag, l1);
7477 break;
7478 default:
7479 case 3: /* jcxz */
7480 gen_op_jz_ecx(s->aflag, l1);
7481 break;
7482 }
7483
7484 gen_set_label(l3);
7485 gen_jmp_im(next_eip);
7486 tcg_gen_br(l2);
7487
7488 gen_set_label(l1);
7489 gen_jmp_im(tval);
7490 gen_set_label(l2);
7491 gen_eob(s);
7492 }
7493 break;
7494 case 0x130: /* wrmsr */
7495 case 0x132: /* rdmsr */
7496 if (s->cpl != 0) {
7497 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7498 } else {
7499 if (s->cc_op != CC_OP_DYNAMIC)
7500 gen_op_set_cc_op(s->cc_op);
7501 gen_jmp_im(pc_start - s->cs_base);
7502 if (b & 2) {
7503 tcg_gen_helper_0_0(helper_rdmsr);
7504 } else {
7505 tcg_gen_helper_0_0(helper_wrmsr);
7506 }
7507 }
7508 break;
7509 case 0x131: /* rdtsc */
7510 if (s->cc_op != CC_OP_DYNAMIC)
7511 gen_op_set_cc_op(s->cc_op);
7512 gen_jmp_im(pc_start - s->cs_base);
7513 if (use_icount)
7514 gen_io_start();
7515 tcg_gen_helper_0_0(helper_rdtsc);
7516 if (use_icount) {
7517 gen_io_end();
7518 gen_jmp(s, s->pc - s->cs_base);
7519 }
7520 break;
7521 case 0x133: /* rdpmc */
7522 if (s->cc_op != CC_OP_DYNAMIC)
7523 gen_op_set_cc_op(s->cc_op);
7524 gen_jmp_im(pc_start - s->cs_base);
7525 tcg_gen_helper_0_0(helper_rdpmc);
7526 break;
7527 case 0x134: /* sysenter */
7528#ifndef VBOX
7529 /* For Intel SYSENTER is valid on 64-bit */
7530 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7531#else
7532 /** @todo: make things right */
7533 if (CODE64(s))
7534#endif
7535 goto illegal_op;
7536 if (!s->pe) {
7537 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7538 } else {
7539 if (s->cc_op != CC_OP_DYNAMIC) {
7540 gen_op_set_cc_op(s->cc_op);
7541 s->cc_op = CC_OP_DYNAMIC;
7542 }
7543 gen_jmp_im(pc_start - s->cs_base);
7544 tcg_gen_helper_0_0(helper_sysenter);
7545 gen_eob(s);
7546 }
7547 break;
7548 case 0x135: /* sysexit */
7549#ifndef VBOX
7550 /* For Intel SYSEXIT is valid on 64-bit */
7551 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7552#else
7553 /** @todo: make things right */
7554 if (CODE64(s))
7555#endif
7556 goto illegal_op;
7557 if (!s->pe) {
7558 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7559 } else {
7560 if (s->cc_op != CC_OP_DYNAMIC) {
7561 gen_op_set_cc_op(s->cc_op);
7562 s->cc_op = CC_OP_DYNAMIC;
7563 }
7564 gen_jmp_im(pc_start - s->cs_base);
7565 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7566 gen_eob(s);
7567 }
7568 break;
7569#ifdef TARGET_X86_64
7570 case 0x105: /* syscall */
7571 /* XXX: is it usable in real mode ? */
7572 if (s->cc_op != CC_OP_DYNAMIC) {
7573 gen_op_set_cc_op(s->cc_op);
7574 s->cc_op = CC_OP_DYNAMIC;
7575 }
7576 gen_jmp_im(pc_start - s->cs_base);
7577 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7578 gen_eob(s);
7579 break;
7580 case 0x107: /* sysret */
7581 if (!s->pe) {
7582 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7583 } else {
7584 if (s->cc_op != CC_OP_DYNAMIC) {
7585 gen_op_set_cc_op(s->cc_op);
7586 s->cc_op = CC_OP_DYNAMIC;
7587 }
7588 gen_jmp_im(pc_start - s->cs_base);
7589 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7590 /* condition codes are modified only in long mode */
7591 if (s->lma)
7592 s->cc_op = CC_OP_EFLAGS;
7593 gen_eob(s);
7594 }
7595 break;
7596#endif
7597 case 0x1a2: /* cpuid */
7598 if (s->cc_op != CC_OP_DYNAMIC)
7599 gen_op_set_cc_op(s->cc_op);
7600 gen_jmp_im(pc_start - s->cs_base);
7601 tcg_gen_helper_0_0(helper_cpuid);
7602 break;
7603 case 0xf4: /* hlt */
7604 if (s->cpl != 0) {
7605 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7606 } else {
7607 if (s->cc_op != CC_OP_DYNAMIC)
7608 gen_op_set_cc_op(s->cc_op);
7609 gen_jmp_im(pc_start - s->cs_base);
7610 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7611 s->is_jmp = 3;
7612 }
7613 break;
7614 case 0x100:
7615 modrm = ldub_code(s->pc++);
7616 mod = (modrm >> 6) & 3;
7617 op = (modrm >> 3) & 7;
7618 switch(op) {
7619 case 0: /* sldt */
7620 if (!s->pe || s->vm86)
7621 goto illegal_op;
7622 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7623 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7624 ot = OT_WORD;
7625 if (mod == 3)
7626 ot += s->dflag;
7627 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7628 break;
7629 case 2: /* lldt */
7630 if (!s->pe || s->vm86)
7631 goto illegal_op;
7632 if (s->cpl != 0) {
7633 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7634 } else {
7635 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7636 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7637 gen_jmp_im(pc_start - s->cs_base);
7638 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7639 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7640 }
7641 break;
7642 case 1: /* str */
7643 if (!s->pe || s->vm86)
7644 goto illegal_op;
7645 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7646 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7647 ot = OT_WORD;
7648 if (mod == 3)
7649 ot += s->dflag;
7650 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7651 break;
7652 case 3: /* ltr */
7653 if (!s->pe || s->vm86)
7654 goto illegal_op;
7655 if (s->cpl != 0) {
7656 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7657 } else {
7658 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7659 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7660 gen_jmp_im(pc_start - s->cs_base);
7661 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7662 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7663 }
7664 break;
7665 case 4: /* verr */
7666 case 5: /* verw */
7667 if (!s->pe || s->vm86)
7668 goto illegal_op;
7669 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7670 if (s->cc_op != CC_OP_DYNAMIC)
7671 gen_op_set_cc_op(s->cc_op);
7672 if (op == 4)
7673 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7674 else
7675 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7676 s->cc_op = CC_OP_EFLAGS;
7677 break;
7678 default:
7679 goto illegal_op;
7680 }
7681 break;
7682 case 0x101:
7683 modrm = ldub_code(s->pc++);
7684 mod = (modrm >> 6) & 3;
7685 op = (modrm >> 3) & 7;
7686 rm = modrm & 7;
7687
7688#ifdef VBOX
7689 /* 0f 01 f9 */
7690 if (modrm == 0xf9)
7691 {
7692 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7693 goto illegal_op;
7694 gen_jmp_im(pc_start - s->cs_base);
7695 tcg_gen_helper_0_0(helper_rdtscp);
7696 break;
7697 }
7698#endif
7699 switch(op) {
7700 case 0: /* sgdt */
7701 if (mod == 3)
7702 goto illegal_op;
7703 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7704 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7705 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7706 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7707 gen_add_A0_im(s, 2);
7708 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7709 if (!s->dflag)
7710 gen_op_andl_T0_im(0xffffff);
7711 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7712 break;
7713 case 1:
7714 if (mod == 3) {
7715 switch (rm) {
7716 case 0: /* monitor */
7717 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7718 s->cpl != 0)
7719 goto illegal_op;
7720 if (s->cc_op != CC_OP_DYNAMIC)
7721 gen_op_set_cc_op(s->cc_op);
7722 gen_jmp_im(pc_start - s->cs_base);
7723#ifdef TARGET_X86_64
7724 if (s->aflag == 2) {
7725 gen_op_movq_A0_reg(R_EAX);
7726 } else
7727#endif
7728 {
7729 gen_op_movl_A0_reg(R_EAX);
7730 if (s->aflag == 0)
7731 gen_op_andl_A0_ffff();
7732 }
7733 gen_add_A0_ds_seg(s);
7734 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7735 break;
7736 case 1: /* mwait */
7737 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7738 s->cpl != 0)
7739 goto illegal_op;
7740 if (s->cc_op != CC_OP_DYNAMIC) {
7741 gen_op_set_cc_op(s->cc_op);
7742 s->cc_op = CC_OP_DYNAMIC;
7743 }
7744 gen_jmp_im(pc_start - s->cs_base);
7745 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7746 gen_eob(s);
7747 break;
7748 default:
7749 goto illegal_op;
7750 }
7751 } else { /* sidt */
7752 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7753 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7754 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7755 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7756 gen_add_A0_im(s, 2);
7757 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7758 if (!s->dflag)
7759 gen_op_andl_T0_im(0xffffff);
7760 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7761 }
7762 break;
7763 case 2: /* lgdt */
7764 case 3: /* lidt */
7765 if (mod == 3) {
7766 if (s->cc_op != CC_OP_DYNAMIC)
7767 gen_op_set_cc_op(s->cc_op);
7768 gen_jmp_im(pc_start - s->cs_base);
7769 switch(rm) {
7770 case 0: /* VMRUN */
7771 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7772 goto illegal_op;
7773 if (s->cpl != 0) {
7774 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7775 break;
7776 } else {
7777 tcg_gen_helper_0_2(helper_vmrun,
7778 tcg_const_i32(s->aflag),
7779 tcg_const_i32(s->pc - pc_start));
7780 tcg_gen_exit_tb(0);
7781 s->is_jmp = 3;
7782 }
7783 break;
7784 case 1: /* VMMCALL */
7785 if (!(s->flags & HF_SVME_MASK))
7786 goto illegal_op;
7787 tcg_gen_helper_0_0(helper_vmmcall);
7788 break;
7789 case 2: /* VMLOAD */
7790 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7791 goto illegal_op;
7792 if (s->cpl != 0) {
7793 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7794 break;
7795 } else {
7796 tcg_gen_helper_0_1(helper_vmload,
7797 tcg_const_i32(s->aflag));
7798 }
7799 break;
7800 case 3: /* VMSAVE */
7801 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7802 goto illegal_op;
7803 if (s->cpl != 0) {
7804 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7805 break;
7806 } else {
7807 tcg_gen_helper_0_1(helper_vmsave,
7808 tcg_const_i32(s->aflag));
7809 }
7810 break;
7811 case 4: /* STGI */
7812 if ((!(s->flags & HF_SVME_MASK) &&
7813 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7814 !s->pe)
7815 goto illegal_op;
7816 if (s->cpl != 0) {
7817 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7818 break;
7819 } else {
7820 tcg_gen_helper_0_0(helper_stgi);
7821 }
7822 break;
7823 case 5: /* CLGI */
7824 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7825 goto illegal_op;
7826 if (s->cpl != 0) {
7827 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7828 break;
7829 } else {
7830 tcg_gen_helper_0_0(helper_clgi);
7831 }
7832 break;
7833 case 6: /* SKINIT */
7834 if ((!(s->flags & HF_SVME_MASK) &&
7835 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7836 !s->pe)
7837 goto illegal_op;
7838 tcg_gen_helper_0_0(helper_skinit);
7839 break;
7840 case 7: /* INVLPGA */
7841 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7842 goto illegal_op;
7843 if (s->cpl != 0) {
7844 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7845 break;
7846 } else {
7847 tcg_gen_helper_0_1(helper_invlpga,
7848 tcg_const_i32(s->aflag));
7849 }
7850 break;
7851 default:
7852 goto illegal_op;
7853 }
7854 } else if (s->cpl != 0) {
7855 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7856 } else {
7857 gen_svm_check_intercept(s, pc_start,
7858 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7859 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7860 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7861 gen_add_A0_im(s, 2);
7862 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7863 if (!s->dflag)
7864 gen_op_andl_T0_im(0xffffff);
7865 if (op == 2) {
7866 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7867 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7868 } else {
7869 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7870 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7871 }
7872 }
7873 break;
7874 case 4: /* smsw */
7875 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7876 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7877 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7878 break;
7879 case 6: /* lmsw */
7880 if (s->cpl != 0) {
7881 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7882 } else {
7883 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7884 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7885 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7886 gen_jmp_im(s->pc - s->cs_base);
7887 gen_eob(s);
7888 }
7889 break;
7890 case 7: /* invlpg */
7891 if (s->cpl != 0) {
7892 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7893 } else {
7894 if (mod == 3) {
7895#ifdef TARGET_X86_64
7896 if (CODE64(s) && rm == 0) {
7897 /* swapgs */
7898 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7899 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7900 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7901 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7902 } else
7903#endif
7904 {
7905 goto illegal_op;
7906 }
7907 } else {
7908 if (s->cc_op != CC_OP_DYNAMIC)
7909 gen_op_set_cc_op(s->cc_op);
7910 gen_jmp_im(pc_start - s->cs_base);
7911 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7912 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7913 gen_jmp_im(s->pc - s->cs_base);
7914 gen_eob(s);
7915 }
7916 }
7917 break;
7918 default:
7919 goto illegal_op;
7920 }
7921 break;
7922 case 0x108: /* invd */
7923 case 0x109: /* wbinvd */
7924 if (s->cpl != 0) {
7925 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7926 } else {
7927 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7928 /* nothing to do */
7929 }
7930 break;
7931 case 0x63: /* arpl or movslS (x86_64) */
7932#ifdef TARGET_X86_64
7933 if (CODE64(s)) {
7934 int d_ot;
7935 /* d_ot is the size of destination */
7936 d_ot = dflag + OT_WORD;
7937
7938 modrm = ldub_code(s->pc++);
7939 reg = ((modrm >> 3) & 7) | rex_r;
7940 mod = (modrm >> 6) & 3;
7941 rm = (modrm & 7) | REX_B(s);
7942
7943 if (mod == 3) {
7944 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7945 /* sign extend */
7946 if (d_ot == OT_QUAD)
7947 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7948 gen_op_mov_reg_T0(d_ot, reg);
7949 } else {
7950 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7951 if (d_ot == OT_QUAD) {
7952 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7953 } else {
7954 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7955 }
7956 gen_op_mov_reg_T0(d_ot, reg);
7957 }
7958 } else
7959#endif
7960 {
7961 int label1;
7962 TCGv t0, t1, t2, a0;
7963
7964 if (!s->pe || s->vm86)
7965 goto illegal_op;
7966
7967 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7968 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7969 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7970#ifdef VBOX
7971 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7972#endif
7973 ot = OT_WORD;
7974 modrm = ldub_code(s->pc++);
7975 reg = (modrm >> 3) & 7;
7976 mod = (modrm >> 6) & 3;
7977 rm = modrm & 7;
7978 if (mod != 3) {
7979 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7980#ifdef VBOX
7981 tcg_gen_mov_tl(a0, cpu_A0);
7982#endif
7983 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7984 } else {
7985 gen_op_mov_v_reg(ot, t0, rm);
7986 }
7987 gen_op_mov_v_reg(ot, t1, reg);
7988 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7989 tcg_gen_andi_tl(t1, t1, 3);
7990 tcg_gen_movi_tl(t2, 0);
7991 label1 = gen_new_label();
7992 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7993 tcg_gen_andi_tl(t0, t0, ~3);
7994 tcg_gen_or_tl(t0, t0, t1);
7995 tcg_gen_movi_tl(t2, CC_Z);
7996 gen_set_label(label1);
7997 if (mod != 3) {
7998#ifdef VBOX
7999 /* cpu_A0 doesn't survive branch */
8000 gen_op_st_v(ot + s->mem_index, t0, a0);
8001#else
8002 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
8003#endif
8004 } else {
8005 gen_op_mov_reg_v(ot, rm, t0);
8006 }
8007 if (s->cc_op != CC_OP_DYNAMIC)
8008 gen_op_set_cc_op(s->cc_op);
8009 gen_compute_eflags(cpu_cc_src);
8010 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
8011 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
8012 s->cc_op = CC_OP_EFLAGS;
8013 tcg_temp_free(t0);
8014 tcg_temp_free(t1);
8015 tcg_temp_free(t2);
8016#ifdef VBOX
8017 tcg_temp_free(a0);
8018#endif
8019 }
8020 break;
8021 case 0x102: /* lar */
8022 case 0x103: /* lsl */
8023 {
8024 int label1;
8025 TCGv t0;
8026 if (!s->pe || s->vm86)
8027 goto illegal_op;
8028 ot = dflag ? OT_LONG : OT_WORD;
8029 modrm = ldub_code(s->pc++);
8030 reg = ((modrm >> 3) & 7) | rex_r;
8031 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8032 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8033 if (s->cc_op != CC_OP_DYNAMIC)
8034 gen_op_set_cc_op(s->cc_op);
8035 if (b == 0x102)
8036 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8037 else
8038 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8039 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8040 label1 = gen_new_label();
8041 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8042 gen_op_mov_reg_v(ot, reg, t0);
8043 gen_set_label(label1);
8044 s->cc_op = CC_OP_EFLAGS;
8045 tcg_temp_free(t0);
8046 }
8047 break;
8048 case 0x118:
8049 modrm = ldub_code(s->pc++);
8050 mod = (modrm >> 6) & 3;
8051 op = (modrm >> 3) & 7;
8052 switch(op) {
8053 case 0: /* prefetchnta */
8054 case 1: /* prefetchnt0 */
8055 case 2: /* prefetchnt0 */
8056 case 3: /* prefetchnt0 */
8057 if (mod == 3)
8058 goto illegal_op;
8059 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8060 /* nothing more to do */
8061 break;
8062 default: /* nop (multi byte) */
8063 gen_nop_modrm(s, modrm);
8064 break;
8065 }
8066 break;
8067 case 0x119 ... 0x11f: /* nop (multi byte) */
8068 modrm = ldub_code(s->pc++);
8069 gen_nop_modrm(s, modrm);
8070 break;
8071 case 0x120: /* mov reg, crN */
8072 case 0x122: /* mov crN, reg */
8073 if (s->cpl != 0) {
8074 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8075 } else {
8076 modrm = ldub_code(s->pc++);
8077 if ((modrm & 0xc0) != 0xc0)
8078 goto illegal_op;
8079 rm = (modrm & 7) | REX_B(s);
8080 reg = ((modrm >> 3) & 7) | rex_r;
8081 if (CODE64(s))
8082 ot = OT_QUAD;
8083 else
8084 ot = OT_LONG;
8085 switch(reg) {
8086 case 0:
8087 case 2:
8088 case 3:
8089 case 4:
8090 case 8:
8091 if (s->cc_op != CC_OP_DYNAMIC)
8092 gen_op_set_cc_op(s->cc_op);
8093 gen_jmp_im(pc_start - s->cs_base);
8094 if (b & 2) {
8095 gen_op_mov_TN_reg(ot, 0, rm);
8096 tcg_gen_helper_0_2(helper_write_crN,
8097 tcg_const_i32(reg), cpu_T[0]);
8098 gen_jmp_im(s->pc - s->cs_base);
8099 gen_eob(s);
8100 } else {
8101 tcg_gen_helper_1_1(helper_read_crN,
8102 cpu_T[0], tcg_const_i32(reg));
8103 gen_op_mov_reg_T0(ot, rm);
8104 }
8105 break;
8106 default:
8107 goto illegal_op;
8108 }
8109 }
8110 break;
8111 case 0x121: /* mov reg, drN */
8112 case 0x123: /* mov drN, reg */
8113 if (s->cpl != 0) {
8114 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8115 } else {
8116 modrm = ldub_code(s->pc++);
8117 if ((modrm & 0xc0) != 0xc0)
8118 goto illegal_op;
8119 rm = (modrm & 7) | REX_B(s);
8120 reg = ((modrm >> 3) & 7) | rex_r;
8121 if (CODE64(s))
8122 ot = OT_QUAD;
8123 else
8124 ot = OT_LONG;
8125 /* XXX: do it dynamically with CR4.DE bit */
8126 if (reg == 4 || reg == 5 || reg >= 8)
8127 goto illegal_op;
8128 if (b & 2) {
8129 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8130 gen_op_mov_TN_reg(ot, 0, rm);
8131 tcg_gen_helper_0_2(helper_movl_drN_T0,
8132 tcg_const_i32(reg), cpu_T[0]);
8133 gen_jmp_im(s->pc - s->cs_base);
8134 gen_eob(s);
8135 } else {
8136 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8137 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8138 gen_op_mov_reg_T0(ot, rm);
8139 }
8140 }
8141 break;
8142 case 0x106: /* clts */
8143 if (s->cpl != 0) {
8144 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8145 } else {
8146 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8147 tcg_gen_helper_0_0(helper_clts);
8148 /* abort block because static cpu state changed */
8149 gen_jmp_im(s->pc - s->cs_base);
8150 gen_eob(s);
8151 }
8152 break;
8153 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8154 case 0x1c3: /* MOVNTI reg, mem */
8155 if (!(s->cpuid_features & CPUID_SSE2))
8156 goto illegal_op;
8157 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8158 modrm = ldub_code(s->pc++);
8159 mod = (modrm >> 6) & 3;
8160 if (mod == 3)
8161 goto illegal_op;
8162 reg = ((modrm >> 3) & 7) | rex_r;
8163 /* generate a generic store */
8164 gen_ldst_modrm(s, modrm, ot, reg, 1);
8165 break;
8166 case 0x1ae:
8167 modrm = ldub_code(s->pc++);
8168 mod = (modrm >> 6) & 3;
8169 op = (modrm >> 3) & 7;
8170 switch(op) {
8171 case 0: /* fxsave */
8172 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8173 (s->flags & HF_EM_MASK))
8174 goto illegal_op;
8175 if (s->flags & HF_TS_MASK) {
8176 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8177 break;
8178 }
8179 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8180 if (s->cc_op != CC_OP_DYNAMIC)
8181 gen_op_set_cc_op(s->cc_op);
8182 gen_jmp_im(pc_start - s->cs_base);
8183 tcg_gen_helper_0_2(helper_fxsave,
8184 cpu_A0, tcg_const_i32((s->dflag == 2)));
8185 break;
8186 case 1: /* fxrstor */
8187 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8188 (s->flags & HF_EM_MASK))
8189 goto illegal_op;
8190 if (s->flags & HF_TS_MASK) {
8191 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8192 break;
8193 }
8194 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8195 if (s->cc_op != CC_OP_DYNAMIC)
8196 gen_op_set_cc_op(s->cc_op);
8197 gen_jmp_im(pc_start - s->cs_base);
8198 tcg_gen_helper_0_2(helper_fxrstor,
8199 cpu_A0, tcg_const_i32((s->dflag == 2)));
8200 break;
8201 case 2: /* ldmxcsr */
8202 case 3: /* stmxcsr */
8203 if (s->flags & HF_TS_MASK) {
8204 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8205 break;
8206 }
8207 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8208 mod == 3)
8209 goto illegal_op;
8210 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8211 if (op == 2) {
8212 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8213 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8214 } else {
8215 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8216 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8217 }
8218 break;
8219 case 5: /* lfence */
8220 case 6: /* mfence */
8221 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8222 goto illegal_op;
8223 break;
8224 case 7: /* sfence / clflush */
8225 if ((modrm & 0xc7) == 0xc0) {
8226 /* sfence */
8227 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8228 if (!(s->cpuid_features & CPUID_SSE))
8229 goto illegal_op;
8230 } else {
8231 /* clflush */
8232 if (!(s->cpuid_features & CPUID_CLFLUSH))
8233 goto illegal_op;
8234 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8235 }
8236 break;
8237 default:
8238 goto illegal_op;
8239 }
8240 break;
8241 case 0x10d: /* 3DNow! prefetch(w) */
8242 modrm = ldub_code(s->pc++);
8243 mod = (modrm >> 6) & 3;
8244 if (mod == 3)
8245 goto illegal_op;
8246 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8247 /* ignore for now */
8248 break;
8249 case 0x1aa: /* rsm */
8250 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8251 if (!(s->flags & HF_SMM_MASK))
8252 goto illegal_op;
8253 if (s->cc_op != CC_OP_DYNAMIC) {
8254 gen_op_set_cc_op(s->cc_op);
8255 s->cc_op = CC_OP_DYNAMIC;
8256 }
8257 gen_jmp_im(s->pc - s->cs_base);
8258 tcg_gen_helper_0_0(helper_rsm);
8259 gen_eob(s);
8260 break;
8261 case 0x1b8: /* SSE4.2 popcnt */
8262 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8263 PREFIX_REPZ)
8264 goto illegal_op;
8265 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8266 goto illegal_op;
8267
8268 modrm = ldub_code(s->pc++);
8269 reg = ((modrm >> 3) & 7);
8270
8271 if (s->prefix & PREFIX_DATA)
8272 ot = OT_WORD;
8273 else if (s->dflag != 2)
8274 ot = OT_LONG;
8275 else
8276 ot = OT_QUAD;
8277
8278 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8279 tcg_gen_helper_1_2(helper_popcnt,
8280 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8281 gen_op_mov_reg_T0(ot, reg);
8282
8283 s->cc_op = CC_OP_EFLAGS;
8284 break;
8285 case 0x10e ... 0x10f:
8286 /* 3DNow! instructions, ignore prefixes */
8287 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8288 case 0x110 ... 0x117:
8289 case 0x128 ... 0x12f:
8290 case 0x138 ... 0x13a:
8291 case 0x150 ... 0x177:
8292 case 0x17c ... 0x17f:
8293 case 0x1c2:
8294 case 0x1c4 ... 0x1c6:
8295 case 0x1d0 ... 0x1fe:
8296 gen_sse(s, b, pc_start, rex_r);
8297 break;
8298 default:
8299 goto illegal_op;
8300 }
8301 /* lock generation */
8302 if (s->prefix & PREFIX_LOCK)
8303 tcg_gen_helper_0_0(helper_unlock);
8304 return s->pc;
8305 illegal_op:
8306 if (s->prefix & PREFIX_LOCK)
8307 tcg_gen_helper_0_0(helper_unlock);
8308 /* XXX: ensure that no lock was generated */
8309 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8310 return s->pc;
8311}
8312
8313void optimize_flags_init(void)
8314{
8315#if TCG_TARGET_REG_BITS == 32
8316 assert(sizeof(CCTable) == (1 << 3));
8317#else
8318 assert(sizeof(CCTable) == (1 << 4));
8319#endif
8320 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8321 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8322 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8323 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8324 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8325 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8326 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8327 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8328 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8329
8330 /* register helpers */
8331
8332#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8333#include "helper.h"
8334}
8335
8336/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8337 basic block 'tb'. If search_pc is TRUE, also generate PC
8338 information for each intermediate instruction. */
8339#ifndef VBOX
8340static inline void gen_intermediate_code_internal(CPUState *env,
8341#else /* VBOX */
8342DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8343#endif /* VBOX */
8344 TranslationBlock *tb,
8345 int search_pc)
8346{
8347 DisasContext dc1, *dc = &dc1;
8348 target_ulong pc_ptr;
8349 uint16_t *gen_opc_end;
8350 int j, lj, cflags;
8351 uint64_t flags;
8352 target_ulong pc_start;
8353 target_ulong cs_base;
8354 int num_insns;
8355 int max_insns;
8356
8357 /* generate intermediate code */
8358 pc_start = tb->pc;
8359 cs_base = tb->cs_base;
8360 flags = tb->flags;
8361 cflags = tb->cflags;
8362
8363 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8364 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8365 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8366 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8367 dc->f_st = 0;
8368 dc->vm86 = (flags >> VM_SHIFT) & 1;
8369#ifdef VBOX
8370 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8371 dc->pvi = !!(env->cr[4] & CR4_PVI_MASK);
8372#ifdef VBOX_WITH_CALL_RECORD
8373 if ( !(env->state & CPU_RAW_RING0)
8374 && (env->cr[0] & CR0_PG_MASK)
8375 && !(env->eflags & X86_EFL_IF)
8376 && dc->code32)
8377 dc->record_call = 1;
8378 else
8379 dc->record_call = 0;
8380#endif
8381#endif
8382 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8383 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8384 dc->tf = (flags >> TF_SHIFT) & 1;
8385 dc->singlestep_enabled = env->singlestep_enabled;
8386 dc->cc_op = CC_OP_DYNAMIC;
8387 dc->cs_base = cs_base;
8388 dc->tb = tb;
8389 dc->popl_esp_hack = 0;
8390 /* select memory access functions */
8391 dc->mem_index = 0;
8392 if (flags & HF_SOFTMMU_MASK) {
8393 if (dc->cpl == 3)
8394 dc->mem_index = 2 * 4;
8395 else
8396 dc->mem_index = 1 * 4;
8397 }
8398 dc->cpuid_features = env->cpuid_features;
8399 dc->cpuid_ext_features = env->cpuid_ext_features;
8400 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8401 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8402#ifdef TARGET_X86_64
8403 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8404 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8405#endif
8406 dc->flags = flags;
8407 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8408 (flags & HF_INHIBIT_IRQ_MASK)
8409#ifndef CONFIG_SOFTMMU
8410 || (flags & HF_SOFTMMU_MASK)
8411#endif
8412 );
8413#if 0
8414 /* check addseg logic */
8415 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8416 printf("ERROR addseg\n");
8417#endif
8418
8419 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8420 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8421 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8422 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8423
8424 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8425 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8426 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8427 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8428 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8429 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8430 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8431 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8432 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8433
8434 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8435
8436 dc->is_jmp = DISAS_NEXT;
8437 pc_ptr = pc_start;
8438 lj = -1;
8439 num_insns = 0;
8440 max_insns = tb->cflags & CF_COUNT_MASK;
8441 if (max_insns == 0)
8442 max_insns = CF_COUNT_MASK;
8443
8444 gen_icount_start();
8445 for(;;) {
8446 if (env->nb_breakpoints > 0) {
8447 for(j = 0; j < env->nb_breakpoints; j++) {
8448 if (env->breakpoints[j] == pc_ptr) {
8449 gen_debug(dc, pc_ptr - dc->cs_base);
8450 break;
8451 }
8452 }
8453 }
8454 if (search_pc) {
8455 j = gen_opc_ptr - gen_opc_buf;
8456 if (lj < j) {
8457 lj++;
8458 while (lj < j)
8459 gen_opc_instr_start[lj++] = 0;
8460 }
8461 gen_opc_pc[lj] = pc_ptr;
8462 gen_opc_cc_op[lj] = dc->cc_op;
8463 gen_opc_instr_start[lj] = 1;
8464 gen_opc_icount[lj] = num_insns;
8465 }
8466 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8467 gen_io_start();
8468
8469 pc_ptr = disas_insn(dc, pc_ptr);
8470 num_insns++;
8471 /* stop translation if indicated */
8472 if (dc->is_jmp)
8473 break;
8474#ifdef VBOX
8475#ifdef DEBUG
8476/*
8477 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8478 {
8479 //should never happen as the jump to the patch code terminates the translation block
8480 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8481 }
8482*/
8483#endif
8484 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8485 {
8486 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8487 gen_jmp_im(pc_ptr - dc->cs_base);
8488 gen_eob(dc);
8489 break;
8490 }
8491#endif /* VBOX */
8492
8493 /* if single step mode, we generate only one instruction and
8494 generate an exception */
8495 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8496 the flag and abort the translation to give the irqs a
8497 change to be happen */
8498 if (dc->tf || dc->singlestep_enabled ||
8499 (flags & HF_INHIBIT_IRQ_MASK)) {
8500 gen_jmp_im(pc_ptr - dc->cs_base);
8501 gen_eob(dc);
8502 break;
8503 }
8504 /* if too long translation, stop generation too */
8505 if (gen_opc_ptr >= gen_opc_end ||
8506 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8507 num_insns >= max_insns) {
8508 gen_jmp_im(pc_ptr - dc->cs_base);
8509 gen_eob(dc);
8510 break;
8511 }
8512 }
8513 if (tb->cflags & CF_LAST_IO)
8514 gen_io_end();
8515 gen_icount_end(tb, num_insns);
8516 *gen_opc_ptr = INDEX_op_end;
8517 /* we don't forget to fill the last values */
8518 if (search_pc) {
8519 j = gen_opc_ptr - gen_opc_buf;
8520 lj++;
8521 while (lj <= j)
8522 gen_opc_instr_start[lj++] = 0;
8523 }
8524
8525#ifdef DEBUG_DISAS
8526 if (loglevel & CPU_LOG_TB_CPU) {
8527 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8528 }
8529 if (loglevel & CPU_LOG_TB_IN_ASM) {
8530 int disas_flags;
8531 fprintf(logfile, "----------------\n");
8532 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8533#ifdef TARGET_X86_64
8534 if (dc->code64)
8535 disas_flags = 2;
8536 else
8537#endif
8538 disas_flags = !dc->code32;
8539 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8540 fprintf(logfile, "\n");
8541 }
8542#endif
8543
8544 if (!search_pc) {
8545 tb->size = pc_ptr - pc_start;
8546 tb->icount = num_insns;
8547 }
8548}
8549
8550void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8551{
8552 gen_intermediate_code_internal(env, tb, 0);
8553}
8554
8555void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8556{
8557 gen_intermediate_code_internal(env, tb, 1);
8558}
8559
8560void gen_pc_load(CPUState *env, TranslationBlock *tb,
8561 unsigned long searched_pc, int pc_pos, void *puc)
8562{
8563 int cc_op;
8564#ifdef DEBUG_DISAS
8565 if (loglevel & CPU_LOG_TB_OP) {
8566 int i;
8567 fprintf(logfile, "RESTORE:\n");
8568 for(i = 0;i <= pc_pos; i++) {
8569 if (gen_opc_instr_start[i]) {
8570 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8571 }
8572 }
8573 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8574 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8575 (uint32_t)tb->cs_base);
8576 }
8577#endif
8578 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8579 cc_op = gen_opc_cc_op[pc_pos];
8580 if (cc_op != CC_OP_DYNAMIC)
8581 env->cc_op = cc_op;
8582}
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette