VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 88

最後變更 在這個檔案從88是 1,由 vboxsync 提交於 55 年 前

import

  • 屬性 svn:eol-style 設為 native
檔案大小: 195.7 KB
 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20#include <stdarg.h>
21#include <stdlib.h>
22#include <stdio.h>
23#include <string.h>
24#include <inttypes.h>
25#include <signal.h>
26#include <assert.h>
27
28#include "cpu.h"
29#include "exec-all.h"
30#include "disas.h"
31
32/* XXX: move that elsewhere */
33static uint16_t *gen_opc_ptr;
34static uint32_t *gen_opparam_ptr;
35
36#define PREFIX_REPZ 0x01
37#define PREFIX_REPNZ 0x02
38#define PREFIX_LOCK 0x04
39#define PREFIX_DATA 0x08
40#define PREFIX_ADR 0x10
41
42#ifdef TARGET_X86_64
43#define X86_64_ONLY(x) x
44#define X86_64_DEF(x...) x
45#define CODE64(s) ((s)->code64)
46#define REX_X(s) ((s)->rex_x)
47#define REX_B(s) ((s)->rex_b)
48/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49#if 1
50#define BUGGY_64(x) NULL
51#endif
52#else
53#define X86_64_ONLY(x) NULL
54#define X86_64_DEF(x...)
55#define CODE64(s) 0
56#define REX_X(s) 0
57#define REX_B(s) 0
58#endif
59
60#ifdef TARGET_X86_64
61static int x86_64_hregs;
62#endif
63
64#ifdef USE_DIRECT_JUMP
65#define TBPARAM(x)
66#else
67#define TBPARAM(x) (long)(x)
68#endif
69
70#ifdef VBOX
71/* Special/override code readers to hide patched code. */
72
73uint8_t ldub_code_raw(target_ulong pc)
74{
75 uint8_t b;
76
77 if (!remR3GetOpcode(cpu_single_env, pc, &b))
78 b = ldub_code(pc);
79 return b;
80}
81#define ldub_code(a) ldub_code_raw(a)
82
83uint16_t lduw_code_raw(target_ulong pc)
84{
85 return (ldub_code(pc+1) << 8) | ldub_code(pc);
86}
87#define lduw_code(a) lduw_code_raw(a)
88
89
90uint32_t ldl_code_raw(target_ulong pc)
91{
92 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
93}
94#define ldl_code(a) ldl_code_raw(a)
95
96#endif /* VBOX */
97
98
99typedef struct DisasContext {
100 /* current insn context */
101 int override; /* -1 if no override */
102 int prefix;
103 int aflag, dflag;
104 target_ulong pc; /* pc = eip + cs_base */
105 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
106 static state change (stop translation) */
107 /* current block context */
108 target_ulong cs_base; /* base of CS segment */
109 int pe; /* protected mode */
110 int code32; /* 32 bit code segment */
111#ifdef TARGET_X86_64
112 int lma; /* long mode active */
113 int code64; /* 64 bit code segment */
114 int rex_x, rex_b;
115#endif
116 int ss32; /* 32 bit stack segment */
117 int cc_op; /* current CC operation */
118 int addseg; /* non zero if either DS/ES/SS have a non zero base */
119 int f_st; /* currently unused */
120 int vm86; /* vm86 mode */
121 int cpl;
122 int iopl;
123 int tf; /* TF cpu flag */
124 int singlestep_enabled; /* "hardware" single step enabled */
125 int jmp_opt; /* use direct block chaining for direct jumps */
126 int mem_index; /* select memory access functions */
127 int flags; /* all execution flags */
128 struct TranslationBlock *tb;
129 int popl_esp_hack; /* for correct popl with esp base handling */
130 int rip_offset; /* only used in x86_64, but left for simplicity */
131 int cpuid_features;
132 int cpuid_ext_features;
133} DisasContext;
134
135static void gen_eob(DisasContext *s);
136static void gen_jmp(DisasContext *s, target_ulong eip);
137static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
138
139/* i386 arith/logic operations */
140enum {
141 OP_ADDL,
142 OP_ORL,
143 OP_ADCL,
144 OP_SBBL,
145 OP_ANDL,
146 OP_SUBL,
147 OP_XORL,
148 OP_CMPL,
149};
150
151/* i386 shift ops */
152enum {
153 OP_ROL,
154 OP_ROR,
155 OP_RCL,
156 OP_RCR,
157 OP_SHL,
158 OP_SHR,
159 OP_SHL1, /* undocumented */
160 OP_SAR = 7,
161};
162
163enum {
164#define DEF(s, n, copy_size) INDEX_op_ ## s,
165#include "opc.h"
166#undef DEF
167 NB_OPS,
168};
169
170#include "gen-op.h"
171
172/* operand size */
173enum {
174 OT_BYTE = 0,
175 OT_WORD,
176 OT_LONG,
177 OT_QUAD,
178};
179
180enum {
181 /* I386 int registers */
182 OR_EAX, /* MUST be even numbered */
183 OR_ECX,
184 OR_EDX,
185 OR_EBX,
186 OR_ESP,
187 OR_EBP,
188 OR_ESI,
189 OR_EDI,
190
191 OR_TMP0 = 16, /* temporary operand register */
192 OR_TMP1,
193 OR_A0, /* temporary register used when doing address evaluation */
194};
195
196#ifdef TARGET_X86_64
197
198#define NB_OP_SIZES 4
199
200#define DEF_REGS(prefix, suffix) \
201 prefix ## EAX ## suffix,\
202 prefix ## ECX ## suffix,\
203 prefix ## EDX ## suffix,\
204 prefix ## EBX ## suffix,\
205 prefix ## ESP ## suffix,\
206 prefix ## EBP ## suffix,\
207 prefix ## ESI ## suffix,\
208 prefix ## EDI ## suffix,\
209 prefix ## R8 ## suffix,\
210 prefix ## R9 ## suffix,\
211 prefix ## R10 ## suffix,\
212 prefix ## R11 ## suffix,\
213 prefix ## R12 ## suffix,\
214 prefix ## R13 ## suffix,\
215 prefix ## R14 ## suffix,\
216 prefix ## R15 ## suffix,
217
218#define DEF_BREGS(prefixb, prefixh, suffix) \
219 \
220static void prefixb ## ESP ## suffix ## _wrapper(void) \
221{ \
222 if (x86_64_hregs) \
223 prefixb ## ESP ## suffix (); \
224 else \
225 prefixh ## EAX ## suffix (); \
226} \
227 \
228static void prefixb ## EBP ## suffix ## _wrapper(void) \
229{ \
230 if (x86_64_hregs) \
231 prefixb ## EBP ## suffix (); \
232 else \
233 prefixh ## ECX ## suffix (); \
234} \
235 \
236static void prefixb ## ESI ## suffix ## _wrapper(void) \
237{ \
238 if (x86_64_hregs) \
239 prefixb ## ESI ## suffix (); \
240 else \
241 prefixh ## EDX ## suffix (); \
242} \
243 \
244static void prefixb ## EDI ## suffix ## _wrapper(void) \
245{ \
246 if (x86_64_hregs) \
247 prefixb ## EDI ## suffix (); \
248 else \
249 prefixh ## EBX ## suffix (); \
250}
251
252DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
253DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
254DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
255DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
256
257#else /* !TARGET_X86_64 */
258
259#define NB_OP_SIZES 3
260
261#define DEF_REGS(prefix, suffix) \
262 prefix ## EAX ## suffix,\
263 prefix ## ECX ## suffix,\
264 prefix ## EDX ## suffix,\
265 prefix ## EBX ## suffix,\
266 prefix ## ESP ## suffix,\
267 prefix ## EBP ## suffix,\
268 prefix ## ESI ## suffix,\
269 prefix ## EDI ## suffix,
270
271#endif /* !TARGET_X86_64 */
272
273static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
274 [OT_BYTE] = {
275 gen_op_movb_EAX_T0,
276 gen_op_movb_ECX_T0,
277 gen_op_movb_EDX_T0,
278 gen_op_movb_EBX_T0,
279#ifdef TARGET_X86_64
280 gen_op_movb_ESP_T0_wrapper,
281 gen_op_movb_EBP_T0_wrapper,
282 gen_op_movb_ESI_T0_wrapper,
283 gen_op_movb_EDI_T0_wrapper,
284 gen_op_movb_R8_T0,
285 gen_op_movb_R9_T0,
286 gen_op_movb_R10_T0,
287 gen_op_movb_R11_T0,
288 gen_op_movb_R12_T0,
289 gen_op_movb_R13_T0,
290 gen_op_movb_R14_T0,
291 gen_op_movb_R15_T0,
292#else
293 gen_op_movh_EAX_T0,
294 gen_op_movh_ECX_T0,
295 gen_op_movh_EDX_T0,
296 gen_op_movh_EBX_T0,
297#endif
298 },
299 [OT_WORD] = {
300 DEF_REGS(gen_op_movw_, _T0)
301 },
302 [OT_LONG] = {
303 DEF_REGS(gen_op_movl_, _T0)
304 },
305#ifdef TARGET_X86_64
306 [OT_QUAD] = {
307 DEF_REGS(gen_op_movq_, _T0)
308 },
309#endif
310};
311
312static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
313 [OT_BYTE] = {
314 gen_op_movb_EAX_T1,
315 gen_op_movb_ECX_T1,
316 gen_op_movb_EDX_T1,
317 gen_op_movb_EBX_T1,
318#ifdef TARGET_X86_64
319 gen_op_movb_ESP_T1_wrapper,
320 gen_op_movb_EBP_T1_wrapper,
321 gen_op_movb_ESI_T1_wrapper,
322 gen_op_movb_EDI_T1_wrapper,
323 gen_op_movb_R8_T1,
324 gen_op_movb_R9_T1,
325 gen_op_movb_R10_T1,
326 gen_op_movb_R11_T1,
327 gen_op_movb_R12_T1,
328 gen_op_movb_R13_T1,
329 gen_op_movb_R14_T1,
330 gen_op_movb_R15_T1,
331#else
332 gen_op_movh_EAX_T1,
333 gen_op_movh_ECX_T1,
334 gen_op_movh_EDX_T1,
335 gen_op_movh_EBX_T1,
336#endif
337 },
338 [OT_WORD] = {
339 DEF_REGS(gen_op_movw_, _T1)
340 },
341 [OT_LONG] = {
342 DEF_REGS(gen_op_movl_, _T1)
343 },
344#ifdef TARGET_X86_64
345 [OT_QUAD] = {
346 DEF_REGS(gen_op_movq_, _T1)
347 },
348#endif
349};
350
351static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
352 [0] = {
353 DEF_REGS(gen_op_movw_, _A0)
354 },
355 [1] = {
356 DEF_REGS(gen_op_movl_, _A0)
357 },
358#ifdef TARGET_X86_64
359 [2] = {
360 DEF_REGS(gen_op_movq_, _A0)
361 },
362#endif
363};
364
365static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
366{
367 [OT_BYTE] = {
368 {
369 gen_op_movl_T0_EAX,
370 gen_op_movl_T0_ECX,
371 gen_op_movl_T0_EDX,
372 gen_op_movl_T0_EBX,
373#ifdef TARGET_X86_64
374 gen_op_movl_T0_ESP_wrapper,
375 gen_op_movl_T0_EBP_wrapper,
376 gen_op_movl_T0_ESI_wrapper,
377 gen_op_movl_T0_EDI_wrapper,
378 gen_op_movl_T0_R8,
379 gen_op_movl_T0_R9,
380 gen_op_movl_T0_R10,
381 gen_op_movl_T0_R11,
382 gen_op_movl_T0_R12,
383 gen_op_movl_T0_R13,
384 gen_op_movl_T0_R14,
385 gen_op_movl_T0_R15,
386#else
387 gen_op_movh_T0_EAX,
388 gen_op_movh_T0_ECX,
389 gen_op_movh_T0_EDX,
390 gen_op_movh_T0_EBX,
391#endif
392 },
393 {
394 gen_op_movl_T1_EAX,
395 gen_op_movl_T1_ECX,
396 gen_op_movl_T1_EDX,
397 gen_op_movl_T1_EBX,
398#ifdef TARGET_X86_64
399 gen_op_movl_T1_ESP_wrapper,
400 gen_op_movl_T1_EBP_wrapper,
401 gen_op_movl_T1_ESI_wrapper,
402 gen_op_movl_T1_EDI_wrapper,
403 gen_op_movl_T1_R8,
404 gen_op_movl_T1_R9,
405 gen_op_movl_T1_R10,
406 gen_op_movl_T1_R11,
407 gen_op_movl_T1_R12,
408 gen_op_movl_T1_R13,
409 gen_op_movl_T1_R14,
410 gen_op_movl_T1_R15,
411#else
412 gen_op_movh_T1_EAX,
413 gen_op_movh_T1_ECX,
414 gen_op_movh_T1_EDX,
415 gen_op_movh_T1_EBX,
416#endif
417 },
418 },
419 [OT_WORD] = {
420 {
421 DEF_REGS(gen_op_movl_T0_, )
422 },
423 {
424 DEF_REGS(gen_op_movl_T1_, )
425 },
426 },
427 [OT_LONG] = {
428 {
429 DEF_REGS(gen_op_movl_T0_, )
430 },
431 {
432 DEF_REGS(gen_op_movl_T1_, )
433 },
434 },
435#ifdef TARGET_X86_64
436 [OT_QUAD] = {
437 {
438 DEF_REGS(gen_op_movl_T0_, )
439 },
440 {
441 DEF_REGS(gen_op_movl_T1_, )
442 },
443 },
444#endif
445};
446
447static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
448 DEF_REGS(gen_op_movl_A0_, )
449};
450
451static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
452 [0] = {
453 DEF_REGS(gen_op_addl_A0_, )
454 },
455 [1] = {
456 DEF_REGS(gen_op_addl_A0_, _s1)
457 },
458 [2] = {
459 DEF_REGS(gen_op_addl_A0_, _s2)
460 },
461 [3] = {
462 DEF_REGS(gen_op_addl_A0_, _s3)
463 },
464};
465
466#ifdef TARGET_X86_64
467static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
468 DEF_REGS(gen_op_movq_A0_, )
469};
470
471static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
472 [0] = {
473 DEF_REGS(gen_op_addq_A0_, )
474 },
475 [1] = {
476 DEF_REGS(gen_op_addq_A0_, _s1)
477 },
478 [2] = {
479 DEF_REGS(gen_op_addq_A0_, _s2)
480 },
481 [3] = {
482 DEF_REGS(gen_op_addq_A0_, _s3)
483 },
484};
485#endif
486
487static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
488 [0] = {
489 DEF_REGS(gen_op_cmovw_, _T1_T0)
490 },
491 [1] = {
492 DEF_REGS(gen_op_cmovl_, _T1_T0)
493 },
494#ifdef TARGET_X86_64
495 [2] = {
496 DEF_REGS(gen_op_cmovq_, _T1_T0)
497 },
498#endif
499};
500
501static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
502 NULL,
503 gen_op_orl_T0_T1,
504 NULL,
505 NULL,
506 gen_op_andl_T0_T1,
507 NULL,
508 gen_op_xorl_T0_T1,
509 NULL,
510};
511
512#define DEF_ARITHC(SUFFIX)\
513 {\
514 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
515 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
516 },\
517 {\
518 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
519 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
520 },\
521 {\
522 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
523 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
524 },\
525 {\
526 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
527 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
528 },
529
530static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
531 DEF_ARITHC( )
532};
533
534static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
535 DEF_ARITHC(_raw)
536#ifndef CONFIG_USER_ONLY
537 DEF_ARITHC(_kernel)
538 DEF_ARITHC(_user)
539#endif
540};
541
542static const int cc_op_arithb[8] = {
543 CC_OP_ADDB,
544 CC_OP_LOGICB,
545 CC_OP_ADDB,
546 CC_OP_SUBB,
547 CC_OP_LOGICB,
548 CC_OP_SUBB,
549 CC_OP_LOGICB,
550 CC_OP_SUBB,
551};
552
553#define DEF_CMPXCHG(SUFFIX)\
554 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
555 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
556 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
557 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
558
559static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
560 DEF_CMPXCHG( )
561};
562
563static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
564 DEF_CMPXCHG(_raw)
565#ifndef CONFIG_USER_ONLY
566 DEF_CMPXCHG(_kernel)
567 DEF_CMPXCHG(_user)
568#endif
569};
570
571#define DEF_SHIFT(SUFFIX)\
572 {\
573 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
574 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
575 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
576 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
577 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
578 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
579 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
580 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
581 },\
582 {\
583 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
585 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
586 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
588 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
589 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
590 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
591 },\
592 {\
593 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
594 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
595 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
596 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
597 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
598 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
599 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
600 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
601 },\
602 {\
603 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
604 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
605 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
606 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
607 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
608 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
609 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
610 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
611 },
612
613static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
614 DEF_SHIFT( )
615};
616
617static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
618 DEF_SHIFT(_raw)
619#ifndef CONFIG_USER_ONLY
620 DEF_SHIFT(_kernel)
621 DEF_SHIFT(_user)
622#endif
623};
624
625#define DEF_SHIFTD(SUFFIX, op)\
626 {\
627 NULL,\
628 NULL,\
629 },\
630 {\
631 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
632 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
633 },\
634 {\
635 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
636 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
637 },\
638 {\
639X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
640 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
641 },
642
643static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
644 DEF_SHIFTD(, im)
645};
646
647static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
648 DEF_SHIFTD(, ECX)
649};
650
651static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
652 DEF_SHIFTD(_raw, im)
653#ifndef CONFIG_USER_ONLY
654 DEF_SHIFTD(_kernel, im)
655 DEF_SHIFTD(_user, im)
656#endif
657};
658
659static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
660 DEF_SHIFTD(_raw, ECX)
661#ifndef CONFIG_USER_ONLY
662 DEF_SHIFTD(_kernel, ECX)
663 DEF_SHIFTD(_user, ECX)
664#endif
665};
666
667static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
668 [0] = {
669 gen_op_btw_T0_T1_cc,
670 gen_op_btsw_T0_T1_cc,
671 gen_op_btrw_T0_T1_cc,
672 gen_op_btcw_T0_T1_cc,
673 },
674 [1] = {
675 gen_op_btl_T0_T1_cc,
676 gen_op_btsl_T0_T1_cc,
677 gen_op_btrl_T0_T1_cc,
678 gen_op_btcl_T0_T1_cc,
679 },
680#ifdef TARGET_X86_64
681 [2] = {
682 gen_op_btq_T0_T1_cc,
683 gen_op_btsq_T0_T1_cc,
684 gen_op_btrq_T0_T1_cc,
685 gen_op_btcq_T0_T1_cc,
686 },
687#endif
688};
689
690static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
691 gen_op_add_bitw_A0_T1,
692 gen_op_add_bitl_A0_T1,
693 X86_64_ONLY(gen_op_add_bitq_A0_T1),
694};
695
696static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
697 [0] = {
698 gen_op_bsfw_T0_cc,
699 gen_op_bsrw_T0_cc,
700 },
701 [1] = {
702 gen_op_bsfl_T0_cc,
703 gen_op_bsrl_T0_cc,
704 },
705#ifdef TARGET_X86_64
706 [2] = {
707 gen_op_bsfq_T0_cc,
708 gen_op_bsrq_T0_cc,
709 },
710#endif
711};
712
713static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
714 gen_op_ldsb_raw_T0_A0,
715 gen_op_ldsw_raw_T0_A0,
716 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
717 NULL,
718#ifndef CONFIG_USER_ONLY
719 gen_op_ldsb_kernel_T0_A0,
720 gen_op_ldsw_kernel_T0_A0,
721 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
722 NULL,
723
724 gen_op_ldsb_user_T0_A0,
725 gen_op_ldsw_user_T0_A0,
726 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
727 NULL,
728#endif
729};
730
731static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
732 gen_op_ldub_raw_T0_A0,
733 gen_op_lduw_raw_T0_A0,
734 NULL,
735 NULL,
736
737#ifndef CONFIG_USER_ONLY
738 gen_op_ldub_kernel_T0_A0,
739 gen_op_lduw_kernel_T0_A0,
740 NULL,
741 NULL,
742
743 gen_op_ldub_user_T0_A0,
744 gen_op_lduw_user_T0_A0,
745 NULL,
746 NULL,
747#endif
748};
749
750/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
751static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
752 gen_op_ldub_raw_T0_A0,
753 gen_op_lduw_raw_T0_A0,
754 gen_op_ldl_raw_T0_A0,
755 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
756
757#ifndef CONFIG_USER_ONLY
758 gen_op_ldub_kernel_T0_A0,
759 gen_op_lduw_kernel_T0_A0,
760 gen_op_ldl_kernel_T0_A0,
761 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
762
763 gen_op_ldub_user_T0_A0,
764 gen_op_lduw_user_T0_A0,
765 gen_op_ldl_user_T0_A0,
766 X86_64_ONLY(gen_op_ldq_user_T0_A0),
767#endif
768};
769
770static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
771 gen_op_ldub_raw_T1_A0,
772 gen_op_lduw_raw_T1_A0,
773 gen_op_ldl_raw_T1_A0,
774 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
775
776#ifndef CONFIG_USER_ONLY
777 gen_op_ldub_kernel_T1_A0,
778 gen_op_lduw_kernel_T1_A0,
779 gen_op_ldl_kernel_T1_A0,
780 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
781
782 gen_op_ldub_user_T1_A0,
783 gen_op_lduw_user_T1_A0,
784 gen_op_ldl_user_T1_A0,
785 X86_64_ONLY(gen_op_ldq_user_T1_A0),
786#endif
787};
788
789static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
790 gen_op_stb_raw_T0_A0,
791 gen_op_stw_raw_T0_A0,
792 gen_op_stl_raw_T0_A0,
793 X86_64_ONLY(gen_op_stq_raw_T0_A0),
794
795#ifndef CONFIG_USER_ONLY
796 gen_op_stb_kernel_T0_A0,
797 gen_op_stw_kernel_T0_A0,
798 gen_op_stl_kernel_T0_A0,
799 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
800
801 gen_op_stb_user_T0_A0,
802 gen_op_stw_user_T0_A0,
803 gen_op_stl_user_T0_A0,
804 X86_64_ONLY(gen_op_stq_user_T0_A0),
805#endif
806};
807
808static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
809 NULL,
810 gen_op_stw_raw_T1_A0,
811 gen_op_stl_raw_T1_A0,
812 X86_64_ONLY(gen_op_stq_raw_T1_A0),
813
814#ifndef CONFIG_USER_ONLY
815 NULL,
816 gen_op_stw_kernel_T1_A0,
817 gen_op_stl_kernel_T1_A0,
818 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
819
820 NULL,
821 gen_op_stw_user_T1_A0,
822 gen_op_stl_user_T1_A0,
823 X86_64_ONLY(gen_op_stq_user_T1_A0),
824#endif
825};
826
827#ifdef VBOX
828static void gen_check_external_event()
829{
830 gen_op_check_external_event();
831}
832#endif /* VBOX */
833
834static inline void gen_jmp_im(target_ulong pc)
835{
836#ifdef VBOX
837 gen_check_external_event();
838#endif /* VBOX */
839#ifdef TARGET_X86_64
840 if (pc == (uint32_t)pc) {
841 gen_op_movl_eip_im(pc);
842 } else if (pc == (int32_t)pc) {
843 gen_op_movq_eip_im(pc);
844 } else {
845 gen_op_movq_eip_im64(pc >> 32, pc);
846 }
847#else
848 gen_op_movl_eip_im(pc);
849#endif
850}
851
852static inline void gen_string_movl_A0_ESI(DisasContext *s)
853{
854 int override;
855
856 override = s->override;
857#ifdef TARGET_X86_64
858 if (s->aflag == 2) {
859 if (override >= 0) {
860 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
861 gen_op_addq_A0_reg_sN[0][R_ESI]();
862 } else {
863 gen_op_movq_A0_reg[R_ESI]();
864 }
865 } else
866#endif
867 if (s->aflag) {
868 /* 32 bit address */
869 if (s->addseg && override < 0)
870 override = R_DS;
871 if (override >= 0) {
872 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
873 gen_op_addl_A0_reg_sN[0][R_ESI]();
874 } else {
875 gen_op_movl_A0_reg[R_ESI]();
876 }
877 } else {
878 /* 16 address, always override */
879 if (override < 0)
880 override = R_DS;
881 gen_op_movl_A0_reg[R_ESI]();
882 gen_op_andl_A0_ffff();
883 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
884 }
885}
886
887static inline void gen_string_movl_A0_EDI(DisasContext *s)
888{
889#ifdef TARGET_X86_64
890 if (s->aflag == 2) {
891 gen_op_movq_A0_reg[R_EDI]();
892 } else
893#endif
894 if (s->aflag) {
895 if (s->addseg) {
896 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
897 gen_op_addl_A0_reg_sN[0][R_EDI]();
898 } else {
899 gen_op_movl_A0_reg[R_EDI]();
900 }
901 } else {
902 gen_op_movl_A0_reg[R_EDI]();
903 gen_op_andl_A0_ffff();
904 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
905 }
906}
907
908static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
909 gen_op_movl_T0_Dshiftb,
910 gen_op_movl_T0_Dshiftw,
911 gen_op_movl_T0_Dshiftl,
912 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
913};
914
915static GenOpFunc1 *gen_op_jnz_ecx[3] = {
916 gen_op_jnz_ecxw,
917 gen_op_jnz_ecxl,
918 X86_64_ONLY(gen_op_jnz_ecxq),
919};
920
921static GenOpFunc1 *gen_op_jz_ecx[3] = {
922 gen_op_jz_ecxw,
923 gen_op_jz_ecxl,
924 X86_64_ONLY(gen_op_jz_ecxq),
925};
926
927static GenOpFunc *gen_op_dec_ECX[3] = {
928 gen_op_decw_ECX,
929 gen_op_decl_ECX,
930 X86_64_ONLY(gen_op_decq_ECX),
931};
932
933static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
934 {
935 gen_op_jnz_subb,
936 gen_op_jnz_subw,
937 gen_op_jnz_subl,
938 X86_64_ONLY(gen_op_jnz_subq),
939 },
940 {
941 gen_op_jz_subb,
942 gen_op_jz_subw,
943 gen_op_jz_subl,
944 X86_64_ONLY(gen_op_jz_subq),
945 },
946};
947
948static GenOpFunc *gen_op_in_DX_T0[3] = {
949 gen_op_inb_DX_T0,
950 gen_op_inw_DX_T0,
951 gen_op_inl_DX_T0,
952};
953
954static GenOpFunc *gen_op_out_DX_T0[3] = {
955 gen_op_outb_DX_T0,
956 gen_op_outw_DX_T0,
957 gen_op_outl_DX_T0,
958};
959
960static GenOpFunc *gen_op_in[3] = {
961 gen_op_inb_T0_T1,
962 gen_op_inw_T0_T1,
963 gen_op_inl_T0_T1,
964};
965
966static GenOpFunc *gen_op_out[3] = {
967 gen_op_outb_T0_T1,
968 gen_op_outw_T0_T1,
969 gen_op_outl_T0_T1,
970};
971
972static GenOpFunc *gen_check_io_T0[3] = {
973 gen_op_check_iob_T0,
974 gen_op_check_iow_T0,
975 gen_op_check_iol_T0,
976};
977
978static GenOpFunc *gen_check_io_DX[3] = {
979 gen_op_check_iob_DX,
980 gen_op_check_iow_DX,
981 gen_op_check_iol_DX,
982};
983
984static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
985{
986 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
987 if (s->cc_op != CC_OP_DYNAMIC)
988 gen_op_set_cc_op(s->cc_op);
989 gen_jmp_im(cur_eip);
990 if (use_dx)
991 gen_check_io_DX[ot]();
992 else
993 gen_check_io_T0[ot]();
994 }
995}
996
997static inline void gen_movs(DisasContext *s, int ot)
998{
999 gen_string_movl_A0_ESI(s);
1000 gen_op_ld_T0_A0[ot + s->mem_index]();
1001 gen_string_movl_A0_EDI(s);
1002 gen_op_st_T0_A0[ot + s->mem_index]();
1003 gen_op_movl_T0_Dshift[ot]();
1004#ifdef TARGET_X86_64
1005 if (s->aflag == 2) {
1006 gen_op_addq_ESI_T0();
1007 gen_op_addq_EDI_T0();
1008 } else
1009#endif
1010 if (s->aflag) {
1011 gen_op_addl_ESI_T0();
1012 gen_op_addl_EDI_T0();
1013 } else {
1014 gen_op_addw_ESI_T0();
1015 gen_op_addw_EDI_T0();
1016 }
1017}
1018
1019static inline void gen_update_cc_op(DisasContext *s)
1020{
1021 if (s->cc_op != CC_OP_DYNAMIC) {
1022 gen_op_set_cc_op(s->cc_op);
1023 s->cc_op = CC_OP_DYNAMIC;
1024 }
1025}
1026
1027/* XXX: does not work with gdbstub "ice" single step - not a
1028 serious problem */
1029static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1030{
1031 int l1, l2;
1032
1033 l1 = gen_new_label();
1034 l2 = gen_new_label();
1035 gen_op_jnz_ecx[s->aflag](l1);
1036 gen_set_label(l2);
1037 gen_jmp_tb(s, next_eip, 1);
1038 gen_set_label(l1);
1039 return l2;
1040}
1041
1042static inline void gen_stos(DisasContext *s, int ot)
1043{
1044 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1045 gen_string_movl_A0_EDI(s);
1046 gen_op_st_T0_A0[ot + s->mem_index]();
1047 gen_op_movl_T0_Dshift[ot]();
1048#ifdef TARGET_X86_64
1049 if (s->aflag == 2) {
1050 gen_op_addq_EDI_T0();
1051 } else
1052#endif
1053 if (s->aflag) {
1054 gen_op_addl_EDI_T0();
1055 } else {
1056 gen_op_addw_EDI_T0();
1057 }
1058}
1059
1060static inline void gen_lods(DisasContext *s, int ot)
1061{
1062 gen_string_movl_A0_ESI(s);
1063 gen_op_ld_T0_A0[ot + s->mem_index]();
1064 gen_op_mov_reg_T0[ot][R_EAX]();
1065 gen_op_movl_T0_Dshift[ot]();
1066#ifdef TARGET_X86_64
1067 if (s->aflag == 2) {
1068 gen_op_addq_ESI_T0();
1069 } else
1070#endif
1071 if (s->aflag) {
1072 gen_op_addl_ESI_T0();
1073 } else {
1074 gen_op_addw_ESI_T0();
1075 }
1076}
1077
1078static inline void gen_scas(DisasContext *s, int ot)
1079{
1080 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1081 gen_string_movl_A0_EDI(s);
1082 gen_op_ld_T1_A0[ot + s->mem_index]();
1083 gen_op_cmpl_T0_T1_cc();
1084 gen_op_movl_T0_Dshift[ot]();
1085#ifdef TARGET_X86_64
1086 if (s->aflag == 2) {
1087 gen_op_addq_EDI_T0();
1088 } else
1089#endif
1090 if (s->aflag) {
1091 gen_op_addl_EDI_T0();
1092 } else {
1093 gen_op_addw_EDI_T0();
1094 }
1095}
1096
1097static inline void gen_cmps(DisasContext *s, int ot)
1098{
1099 gen_string_movl_A0_ESI(s);
1100 gen_op_ld_T0_A0[ot + s->mem_index]();
1101 gen_string_movl_A0_EDI(s);
1102 gen_op_ld_T1_A0[ot + s->mem_index]();
1103 gen_op_cmpl_T0_T1_cc();
1104 gen_op_movl_T0_Dshift[ot]();
1105#ifdef TARGET_X86_64
1106 if (s->aflag == 2) {
1107 gen_op_addq_ESI_T0();
1108 gen_op_addq_EDI_T0();
1109 } else
1110#endif
1111 if (s->aflag) {
1112 gen_op_addl_ESI_T0();
1113 gen_op_addl_EDI_T0();
1114 } else {
1115 gen_op_addw_ESI_T0();
1116 gen_op_addw_EDI_T0();
1117 }
1118}
1119
1120static inline void gen_ins(DisasContext *s, int ot)
1121{
1122 gen_string_movl_A0_EDI(s);
1123 gen_op_movl_T0_0();
1124 gen_op_st_T0_A0[ot + s->mem_index]();
1125 gen_op_in_DX_T0[ot]();
1126 gen_op_st_T0_A0[ot + s->mem_index]();
1127 gen_op_movl_T0_Dshift[ot]();
1128#ifdef TARGET_X86_64
1129 if (s->aflag == 2) {
1130 gen_op_addq_EDI_T0();
1131 } else
1132#endif
1133 if (s->aflag) {
1134 gen_op_addl_EDI_T0();
1135 } else {
1136 gen_op_addw_EDI_T0();
1137 }
1138}
1139
1140static inline void gen_outs(DisasContext *s, int ot)
1141{
1142 gen_string_movl_A0_ESI(s);
1143 gen_op_ld_T0_A0[ot + s->mem_index]();
1144 gen_op_out_DX_T0[ot]();
1145 gen_op_movl_T0_Dshift[ot]();
1146#ifdef TARGET_X86_64
1147 if (s->aflag == 2) {
1148 gen_op_addq_ESI_T0();
1149 } else
1150#endif
1151 if (s->aflag) {
1152 gen_op_addl_ESI_T0();
1153 } else {
1154 gen_op_addw_ESI_T0();
1155 }
1156}
1157
1158/* same method as Valgrind : we generate jumps to current or next
1159 instruction */
1160#define GEN_REPZ(op) \
1161static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1162 target_ulong cur_eip, target_ulong next_eip) \
1163{ \
1164 int l2;\
1165 gen_update_cc_op(s); \
1166 l2 = gen_jz_ecx_string(s, next_eip); \
1167 gen_ ## op(s, ot); \
1168 gen_op_dec_ECX[s->aflag](); \
1169 /* a loop would cause two single step exceptions if ECX = 1 \
1170 before rep string_insn */ \
1171 if (!s->jmp_opt) \
1172 gen_op_jz_ecx[s->aflag](l2); \
1173 gen_jmp(s, cur_eip); \
1174}
1175
1176#define GEN_REPZ2(op) \
1177static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1178 target_ulong cur_eip, \
1179 target_ulong next_eip, \
1180 int nz) \
1181{ \
1182 int l2;\
1183 gen_update_cc_op(s); \
1184 l2 = gen_jz_ecx_string(s, next_eip); \
1185 gen_ ## op(s, ot); \
1186 gen_op_dec_ECX[s->aflag](); \
1187 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1188 gen_op_string_jnz_sub[nz][ot](l2);\
1189 if (!s->jmp_opt) \
1190 gen_op_jz_ecx[s->aflag](l2); \
1191 gen_jmp(s, cur_eip); \
1192}
1193
1194GEN_REPZ(movs)
1195GEN_REPZ(stos)
1196GEN_REPZ(lods)
1197GEN_REPZ(ins)
1198GEN_REPZ(outs)
1199GEN_REPZ2(scas)
1200GEN_REPZ2(cmps)
1201
1202enum {
1203 JCC_O,
1204 JCC_B,
1205 JCC_Z,
1206 JCC_BE,
1207 JCC_S,
1208 JCC_P,
1209 JCC_L,
1210 JCC_LE,
1211};
1212
1213static GenOpFunc1 *gen_jcc_sub[4][8] = {
1214 [OT_BYTE] = {
1215 NULL,
1216 gen_op_jb_subb,
1217 gen_op_jz_subb,
1218 gen_op_jbe_subb,
1219 gen_op_js_subb,
1220 NULL,
1221 gen_op_jl_subb,
1222 gen_op_jle_subb,
1223 },
1224 [OT_WORD] = {
1225 NULL,
1226 gen_op_jb_subw,
1227 gen_op_jz_subw,
1228 gen_op_jbe_subw,
1229 gen_op_js_subw,
1230 NULL,
1231 gen_op_jl_subw,
1232 gen_op_jle_subw,
1233 },
1234 [OT_LONG] = {
1235 NULL,
1236 gen_op_jb_subl,
1237 gen_op_jz_subl,
1238 gen_op_jbe_subl,
1239 gen_op_js_subl,
1240 NULL,
1241 gen_op_jl_subl,
1242 gen_op_jle_subl,
1243 },
1244#ifdef TARGET_X86_64
1245 [OT_QUAD] = {
1246 NULL,
1247 BUGGY_64(gen_op_jb_subq),
1248 gen_op_jz_subq,
1249 BUGGY_64(gen_op_jbe_subq),
1250 gen_op_js_subq,
1251 NULL,
1252 BUGGY_64(gen_op_jl_subq),
1253 BUGGY_64(gen_op_jle_subq),
1254 },
1255#endif
1256};
1257static GenOpFunc1 *gen_op_loop[3][4] = {
1258 [0] = {
1259 gen_op_loopnzw,
1260 gen_op_loopzw,
1261 gen_op_jnz_ecxw,
1262 },
1263 [1] = {
1264 gen_op_loopnzl,
1265 gen_op_loopzl,
1266 gen_op_jnz_ecxl,
1267 },
1268#ifdef TARGET_X86_64
1269 [2] = {
1270 gen_op_loopnzq,
1271 gen_op_loopzq,
1272 gen_op_jnz_ecxq,
1273 },
1274#endif
1275};
1276
1277static GenOpFunc *gen_setcc_slow[8] = {
1278 gen_op_seto_T0_cc,
1279 gen_op_setb_T0_cc,
1280 gen_op_setz_T0_cc,
1281 gen_op_setbe_T0_cc,
1282 gen_op_sets_T0_cc,
1283 gen_op_setp_T0_cc,
1284 gen_op_setl_T0_cc,
1285 gen_op_setle_T0_cc,
1286};
1287
1288static GenOpFunc *gen_setcc_sub[4][8] = {
1289 [OT_BYTE] = {
1290 NULL,
1291 gen_op_setb_T0_subb,
1292 gen_op_setz_T0_subb,
1293 gen_op_setbe_T0_subb,
1294 gen_op_sets_T0_subb,
1295 NULL,
1296 gen_op_setl_T0_subb,
1297 gen_op_setle_T0_subb,
1298 },
1299 [OT_WORD] = {
1300 NULL,
1301 gen_op_setb_T0_subw,
1302 gen_op_setz_T0_subw,
1303 gen_op_setbe_T0_subw,
1304 gen_op_sets_T0_subw,
1305 NULL,
1306 gen_op_setl_T0_subw,
1307 gen_op_setle_T0_subw,
1308 },
1309 [OT_LONG] = {
1310 NULL,
1311 gen_op_setb_T0_subl,
1312 gen_op_setz_T0_subl,
1313 gen_op_setbe_T0_subl,
1314 gen_op_sets_T0_subl,
1315 NULL,
1316 gen_op_setl_T0_subl,
1317 gen_op_setle_T0_subl,
1318 },
1319#ifdef TARGET_X86_64
1320 [OT_QUAD] = {
1321 NULL,
1322 gen_op_setb_T0_subq,
1323 gen_op_setz_T0_subq,
1324 gen_op_setbe_T0_subq,
1325 gen_op_sets_T0_subq,
1326 NULL,
1327 gen_op_setl_T0_subq,
1328 gen_op_setle_T0_subq,
1329 },
1330#endif
1331};
1332
1333static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1334 gen_op_fadd_ST0_FT0,
1335 gen_op_fmul_ST0_FT0,
1336 gen_op_fcom_ST0_FT0,
1337 gen_op_fcom_ST0_FT0,
1338 gen_op_fsub_ST0_FT0,
1339 gen_op_fsubr_ST0_FT0,
1340 gen_op_fdiv_ST0_FT0,
1341 gen_op_fdivr_ST0_FT0,
1342};
1343
1344/* NOTE the exception in "r" op ordering */
1345static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1346 gen_op_fadd_STN_ST0,
1347 gen_op_fmul_STN_ST0,
1348 NULL,
1349 NULL,
1350 gen_op_fsubr_STN_ST0,
1351 gen_op_fsub_STN_ST0,
1352 gen_op_fdivr_STN_ST0,
1353 gen_op_fdiv_STN_ST0,
1354};
1355
1356/* if d == OR_TMP0, it means memory operand (address in A0) */
1357static void gen_op(DisasContext *s1, int op, int ot, int d)
1358{
1359 GenOpFunc *gen_update_cc;
1360
1361 if (d != OR_TMP0) {
1362 gen_op_mov_TN_reg[ot][0][d]();
1363 } else {
1364 gen_op_ld_T0_A0[ot + s1->mem_index]();
1365 }
1366 switch(op) {
1367 case OP_ADCL:
1368 case OP_SBBL:
1369 if (s1->cc_op != CC_OP_DYNAMIC)
1370 gen_op_set_cc_op(s1->cc_op);
1371 if (d != OR_TMP0) {
1372 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1373 gen_op_mov_reg_T0[ot][d]();
1374 } else {
1375 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1376 }
1377 s1->cc_op = CC_OP_DYNAMIC;
1378 goto the_end;
1379 case OP_ADDL:
1380 gen_op_addl_T0_T1();
1381 s1->cc_op = CC_OP_ADDB + ot;
1382 gen_update_cc = gen_op_update2_cc;
1383 break;
1384 case OP_SUBL:
1385 gen_op_subl_T0_T1();
1386 s1->cc_op = CC_OP_SUBB + ot;
1387 gen_update_cc = gen_op_update2_cc;
1388 break;
1389 default:
1390 case OP_ANDL:
1391 case OP_ORL:
1392 case OP_XORL:
1393 gen_op_arith_T0_T1_cc[op]();
1394 s1->cc_op = CC_OP_LOGICB + ot;
1395 gen_update_cc = gen_op_update1_cc;
1396 break;
1397 case OP_CMPL:
1398 gen_op_cmpl_T0_T1_cc();
1399 s1->cc_op = CC_OP_SUBB + ot;
1400 gen_update_cc = NULL;
1401 break;
1402 }
1403 if (op != OP_CMPL) {
1404 if (d != OR_TMP0)
1405 gen_op_mov_reg_T0[ot][d]();
1406 else
1407 gen_op_st_T0_A0[ot + s1->mem_index]();
1408 }
1409 /* the flags update must happen after the memory write (precise
1410 exception support) */
1411 if (gen_update_cc)
1412 gen_update_cc();
1413 the_end: ;
1414}
1415
1416/* if d == OR_TMP0, it means memory operand (address in A0) */
1417static void gen_inc(DisasContext *s1, int ot, int d, int c)
1418{
1419 if (d != OR_TMP0)
1420 gen_op_mov_TN_reg[ot][0][d]();
1421 else
1422 gen_op_ld_T0_A0[ot + s1->mem_index]();
1423 if (s1->cc_op != CC_OP_DYNAMIC)
1424 gen_op_set_cc_op(s1->cc_op);
1425 if (c > 0) {
1426 gen_op_incl_T0();
1427 s1->cc_op = CC_OP_INCB + ot;
1428 } else {
1429 gen_op_decl_T0();
1430 s1->cc_op = CC_OP_DECB + ot;
1431 }
1432 if (d != OR_TMP0)
1433 gen_op_mov_reg_T0[ot][d]();
1434 else
1435 gen_op_st_T0_A0[ot + s1->mem_index]();
1436 gen_op_update_inc_cc();
1437}
1438
1439static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1440{
1441 if (d != OR_TMP0)
1442 gen_op_mov_TN_reg[ot][0][d]();
1443 else
1444 gen_op_ld_T0_A0[ot + s1->mem_index]();
1445 if (s != OR_TMP1)
1446 gen_op_mov_TN_reg[ot][1][s]();
1447 /* for zero counts, flags are not updated, so must do it dynamically */
1448 if (s1->cc_op != CC_OP_DYNAMIC)
1449 gen_op_set_cc_op(s1->cc_op);
1450
1451 if (d != OR_TMP0)
1452 gen_op_shift_T0_T1_cc[ot][op]();
1453 else
1454 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1455 if (d != OR_TMP0)
1456 gen_op_mov_reg_T0[ot][d]();
1457 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1458}
1459
1460static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1461{
1462 /* currently not optimized */
1463 gen_op_movl_T1_im(c);
1464 gen_shift(s1, op, ot, d, OR_TMP1);
1465}
1466
1467static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1468{
1469 target_long disp;
1470 int havesib;
1471 int base;
1472 int index;
1473 int scale;
1474 int opreg;
1475 int mod, rm, code, override, must_add_seg;
1476
1477 override = s->override;
1478 must_add_seg = s->addseg;
1479 if (override >= 0)
1480 must_add_seg = 1;
1481 mod = (modrm >> 6) & 3;
1482 rm = modrm & 7;
1483
1484 if (s->aflag) {
1485
1486 havesib = 0;
1487 base = rm;
1488 index = 0;
1489 scale = 0;
1490
1491 if (base == 4) {
1492 havesib = 1;
1493 code = ldub_code(s->pc++);
1494 scale = (code >> 6) & 3;
1495 index = ((code >> 3) & 7) | REX_X(s);
1496 base = (code & 7);
1497 }
1498 base |= REX_B(s);
1499
1500 switch (mod) {
1501 case 0:
1502 if ((base & 7) == 5) {
1503 base = -1;
1504 disp = (int32_t)ldl_code(s->pc);
1505 s->pc += 4;
1506 if (CODE64(s) && !havesib) {
1507 disp += s->pc + s->rip_offset;
1508 }
1509 } else {
1510 disp = 0;
1511 }
1512 break;
1513 case 1:
1514 disp = (int8_t)ldub_code(s->pc++);
1515 break;
1516 default:
1517 case 2:
1518 disp = ldl_code(s->pc);
1519 s->pc += 4;
1520 break;
1521 }
1522
1523 if (base >= 0) {
1524 /* for correct popl handling with esp */
1525 if (base == 4 && s->popl_esp_hack)
1526 disp += s->popl_esp_hack;
1527#ifdef TARGET_X86_64
1528 if (s->aflag == 2) {
1529 gen_op_movq_A0_reg[base]();
1530 if (disp != 0) {
1531 if ((int32_t)disp == disp)
1532 gen_op_addq_A0_im(disp);
1533 else
1534 gen_op_addq_A0_im64(disp >> 32, disp);
1535 }
1536 } else
1537#endif
1538 {
1539 gen_op_movl_A0_reg[base]();
1540 if (disp != 0)
1541 gen_op_addl_A0_im(disp);
1542 }
1543 } else {
1544#ifdef TARGET_X86_64
1545 if (s->aflag == 2) {
1546 if ((int32_t)disp == disp)
1547 gen_op_movq_A0_im(disp);
1548 else
1549 gen_op_movq_A0_im64(disp >> 32, disp);
1550 } else
1551#endif
1552 {
1553 gen_op_movl_A0_im(disp);
1554 }
1555 }
1556 /* XXX: index == 4 is always invalid */
1557 if (havesib && (index != 4 || scale != 0)) {
1558#ifdef TARGET_X86_64
1559 if (s->aflag == 2) {
1560 gen_op_addq_A0_reg_sN[scale][index]();
1561 } else
1562#endif
1563 {
1564 gen_op_addl_A0_reg_sN[scale][index]();
1565 }
1566 }
1567 if (must_add_seg) {
1568 if (override < 0) {
1569 if (base == R_EBP || base == R_ESP)
1570 override = R_SS;
1571 else
1572 override = R_DS;
1573 }
1574#ifdef TARGET_X86_64
1575 if (s->aflag == 2) {
1576 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1577 } else
1578#endif
1579 {
1580 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1581 }
1582 }
1583 } else {
1584 switch (mod) {
1585 case 0:
1586 if (rm == 6) {
1587 disp = lduw_code(s->pc);
1588 s->pc += 2;
1589 gen_op_movl_A0_im(disp);
1590 rm = 0; /* avoid SS override */
1591 goto no_rm;
1592 } else {
1593 disp = 0;
1594 }
1595 break;
1596 case 1:
1597 disp = (int8_t)ldub_code(s->pc++);
1598 break;
1599 default:
1600 case 2:
1601 disp = lduw_code(s->pc);
1602 s->pc += 2;
1603 break;
1604 }
1605 switch(rm) {
1606 case 0:
1607 gen_op_movl_A0_reg[R_EBX]();
1608 gen_op_addl_A0_reg_sN[0][R_ESI]();
1609 break;
1610 case 1:
1611 gen_op_movl_A0_reg[R_EBX]();
1612 gen_op_addl_A0_reg_sN[0][R_EDI]();
1613 break;
1614 case 2:
1615 gen_op_movl_A0_reg[R_EBP]();
1616 gen_op_addl_A0_reg_sN[0][R_ESI]();
1617 break;
1618 case 3:
1619 gen_op_movl_A0_reg[R_EBP]();
1620 gen_op_addl_A0_reg_sN[0][R_EDI]();
1621 break;
1622 case 4:
1623 gen_op_movl_A0_reg[R_ESI]();
1624 break;
1625 case 5:
1626 gen_op_movl_A0_reg[R_EDI]();
1627 break;
1628 case 6:
1629 gen_op_movl_A0_reg[R_EBP]();
1630 break;
1631 default:
1632 case 7:
1633 gen_op_movl_A0_reg[R_EBX]();
1634 break;
1635 }
1636 if (disp != 0)
1637 gen_op_addl_A0_im(disp);
1638 gen_op_andl_A0_ffff();
1639 no_rm:
1640 if (must_add_seg) {
1641 if (override < 0) {
1642 if (rm == 2 || rm == 3 || rm == 6)
1643 override = R_SS;
1644 else
1645 override = R_DS;
1646 }
1647 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1648 }
1649 }
1650
1651 opreg = OR_A0;
1652 disp = 0;
1653 *reg_ptr = opreg;
1654 *offset_ptr = disp;
1655}
1656
1657/* used for LEA and MOV AX, mem */
1658static void gen_add_A0_ds_seg(DisasContext *s)
1659{
1660 int override, must_add_seg;
1661 must_add_seg = s->addseg;
1662 override = R_DS;
1663 if (s->override >= 0) {
1664 override = s->override;
1665 must_add_seg = 1;
1666 } else {
1667 override = R_DS;
1668 }
1669 if (must_add_seg) {
1670#ifdef TARGET_X86_64
1671 if (CODE64(s)) {
1672 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1673 } else
1674#endif
1675 {
1676 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1677 }
1678 }
1679}
1680
1681/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1682 OR_TMP0 */
1683static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1684{
1685 int mod, rm, opreg, disp;
1686
1687 mod = (modrm >> 6) & 3;
1688 rm = (modrm & 7) | REX_B(s);
1689 if (mod == 3) {
1690 if (is_store) {
1691 if (reg != OR_TMP0)
1692 gen_op_mov_TN_reg[ot][0][reg]();
1693 gen_op_mov_reg_T0[ot][rm]();
1694 } else {
1695 gen_op_mov_TN_reg[ot][0][rm]();
1696 if (reg != OR_TMP0)
1697 gen_op_mov_reg_T0[ot][reg]();
1698 }
1699 } else {
1700 gen_lea_modrm(s, modrm, &opreg, &disp);
1701 if (is_store) {
1702 if (reg != OR_TMP0)
1703 gen_op_mov_TN_reg[ot][0][reg]();
1704 gen_op_st_T0_A0[ot + s->mem_index]();
1705 } else {
1706 gen_op_ld_T0_A0[ot + s->mem_index]();
1707 if (reg != OR_TMP0)
1708 gen_op_mov_reg_T0[ot][reg]();
1709 }
1710 }
1711}
1712
1713static inline uint32_t insn_get(DisasContext *s, int ot)
1714{
1715 uint32_t ret;
1716
1717 switch(ot) {
1718 case OT_BYTE:
1719 ret = ldub_code(s->pc);
1720 s->pc++;
1721 break;
1722 case OT_WORD:
1723 ret = lduw_code(s->pc);
1724 s->pc += 2;
1725 break;
1726 default:
1727 case OT_LONG:
1728 ret = ldl_code(s->pc);
1729 s->pc += 4;
1730 break;
1731 }
1732 return ret;
1733}
1734
1735static inline int insn_const_size(unsigned int ot)
1736{
1737 if (ot <= OT_LONG)
1738 return 1 << ot;
1739 else
1740 return 4;
1741}
1742
1743static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1744{
1745 TranslationBlock *tb;
1746 target_ulong pc;
1747
1748 pc = s->cs_base + eip;
1749 tb = s->tb;
1750 /* NOTE: we handle the case where the TB spans two pages here */
1751 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1752 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1753 /* jump to same page: we can use a direct jump */
1754 if (tb_num == 0)
1755 gen_op_goto_tb0(TBPARAM(tb));
1756 else
1757 gen_op_goto_tb1(TBPARAM(tb));
1758 gen_jmp_im(eip);
1759 gen_op_movl_T0_im((long)tb + tb_num);
1760 gen_op_exit_tb();
1761 } else {
1762 /* jump to another page: currently not optimized */
1763 gen_jmp_im(eip);
1764 gen_eob(s);
1765 }
1766}
1767
1768static inline void gen_jcc(DisasContext *s, int b,
1769 target_ulong val, target_ulong next_eip)
1770{
1771 TranslationBlock *tb;
1772 int inv, jcc_op;
1773 GenOpFunc1 *func;
1774 target_ulong tmp;
1775 int l1, l2;
1776
1777 inv = b & 1;
1778 jcc_op = (b >> 1) & 7;
1779
1780 if (s->jmp_opt) {
1781#ifdef VBOX
1782 gen_check_external_event(s);
1783#endif /* VBOX */
1784 switch(s->cc_op) {
1785 /* we optimize the cmp/jcc case */
1786 case CC_OP_SUBB:
1787 case CC_OP_SUBW:
1788 case CC_OP_SUBL:
1789 case CC_OP_SUBQ:
1790 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1791 break;
1792
1793 /* some jumps are easy to compute */
1794 case CC_OP_ADDB:
1795 case CC_OP_ADDW:
1796 case CC_OP_ADDL:
1797 case CC_OP_ADDQ:
1798
1799 case CC_OP_ADCB:
1800 case CC_OP_ADCW:
1801 case CC_OP_ADCL:
1802 case CC_OP_ADCQ:
1803
1804 case CC_OP_SBBB:
1805 case CC_OP_SBBW:
1806 case CC_OP_SBBL:
1807 case CC_OP_SBBQ:
1808
1809 case CC_OP_LOGICB:
1810 case CC_OP_LOGICW:
1811 case CC_OP_LOGICL:
1812 case CC_OP_LOGICQ:
1813
1814 case CC_OP_INCB:
1815 case CC_OP_INCW:
1816 case CC_OP_INCL:
1817 case CC_OP_INCQ:
1818
1819 case CC_OP_DECB:
1820 case CC_OP_DECW:
1821 case CC_OP_DECL:
1822 case CC_OP_DECQ:
1823
1824 case CC_OP_SHLB:
1825 case CC_OP_SHLW:
1826 case CC_OP_SHLL:
1827 case CC_OP_SHLQ:
1828
1829 case CC_OP_SARB:
1830 case CC_OP_SARW:
1831 case CC_OP_SARL:
1832 case CC_OP_SARQ:
1833 switch(jcc_op) {
1834 case JCC_Z:
1835 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1836 break;
1837 case JCC_S:
1838 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1839 break;
1840 default:
1841 func = NULL;
1842 break;
1843 }
1844 break;
1845 default:
1846 func = NULL;
1847 break;
1848 }
1849
1850 if (s->cc_op != CC_OP_DYNAMIC) {
1851 gen_op_set_cc_op(s->cc_op);
1852 s->cc_op = CC_OP_DYNAMIC;
1853 }
1854
1855 if (!func) {
1856 gen_setcc_slow[jcc_op]();
1857 func = gen_op_jnz_T0_label;
1858 }
1859
1860 if (inv) {
1861 tmp = val;
1862 val = next_eip;
1863 next_eip = tmp;
1864 }
1865 tb = s->tb;
1866
1867 l1 = gen_new_label();
1868 func(l1);
1869
1870 gen_goto_tb(s, 0, next_eip);
1871
1872 gen_set_label(l1);
1873 gen_goto_tb(s, 1, val);
1874
1875 s->is_jmp = 3;
1876 } else {
1877
1878 if (s->cc_op != CC_OP_DYNAMIC) {
1879 gen_op_set_cc_op(s->cc_op);
1880 s->cc_op = CC_OP_DYNAMIC;
1881 }
1882 gen_setcc_slow[jcc_op]();
1883 if (inv) {
1884 tmp = val;
1885 val = next_eip;
1886 next_eip = tmp;
1887 }
1888 l1 = gen_new_label();
1889 l2 = gen_new_label();
1890 gen_op_jnz_T0_label(l1);
1891 gen_jmp_im(next_eip);
1892 gen_op_jmp_label(l2);
1893 gen_set_label(l1);
1894 gen_jmp_im(val);
1895 gen_set_label(l2);
1896 gen_eob(s);
1897 }
1898}
1899
1900static void gen_setcc(DisasContext *s, int b)
1901{
1902 int inv, jcc_op;
1903 GenOpFunc *func;
1904
1905 inv = b & 1;
1906 jcc_op = (b >> 1) & 7;
1907 switch(s->cc_op) {
1908 /* we optimize the cmp/jcc case */
1909 case CC_OP_SUBB:
1910 case CC_OP_SUBW:
1911 case CC_OP_SUBL:
1912 case CC_OP_SUBQ:
1913 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1914 if (!func)
1915 goto slow_jcc;
1916 break;
1917
1918 /* some jumps are easy to compute */
1919 case CC_OP_ADDB:
1920 case CC_OP_ADDW:
1921 case CC_OP_ADDL:
1922 case CC_OP_ADDQ:
1923
1924 case CC_OP_LOGICB:
1925 case CC_OP_LOGICW:
1926 case CC_OP_LOGICL:
1927 case CC_OP_LOGICQ:
1928
1929 case CC_OP_INCB:
1930 case CC_OP_INCW:
1931 case CC_OP_INCL:
1932 case CC_OP_INCQ:
1933
1934 case CC_OP_DECB:
1935 case CC_OP_DECW:
1936 case CC_OP_DECL:
1937 case CC_OP_DECQ:
1938
1939 case CC_OP_SHLB:
1940 case CC_OP_SHLW:
1941 case CC_OP_SHLL:
1942 case CC_OP_SHLQ:
1943 switch(jcc_op) {
1944 case JCC_Z:
1945 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1946 break;
1947 case JCC_S:
1948 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1949 break;
1950 default:
1951 goto slow_jcc;
1952 }
1953 break;
1954 default:
1955 slow_jcc:
1956 if (s->cc_op != CC_OP_DYNAMIC)
1957 gen_op_set_cc_op(s->cc_op);
1958 func = gen_setcc_slow[jcc_op];
1959 break;
1960 }
1961 func();
1962 if (inv) {
1963 gen_op_xor_T0_1();
1964 }
1965}
1966
1967/* move T0 to seg_reg and compute if the CPU state may change. Never
1968 call this function with seg_reg == R_CS */
1969static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1970{
1971 if (s->pe && !s->vm86) {
1972 /* XXX: optimize by finding processor state dynamically */
1973 if (s->cc_op != CC_OP_DYNAMIC)
1974 gen_op_set_cc_op(s->cc_op);
1975 gen_jmp_im(cur_eip);
1976 gen_op_movl_seg_T0(seg_reg);
1977 /* abort translation because the addseg value may change or
1978 because ss32 may change. For R_SS, translation must always
1979 stop as a special handling must be done to disable hardware
1980 interrupts for the next instruction */
1981 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1982 s->is_jmp = 3;
1983 } else {
1984 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1985 if (seg_reg == R_SS)
1986 s->is_jmp = 3;
1987 }
1988}
1989
1990static inline void gen_stack_update(DisasContext *s, int addend)
1991{
1992#ifdef TARGET_X86_64
1993 if (CODE64(s)) {
1994 if (addend == 8)
1995 gen_op_addq_ESP_8();
1996 else
1997 gen_op_addq_ESP_im(addend);
1998 } else
1999#endif
2000 if (s->ss32) {
2001 if (addend == 2)
2002 gen_op_addl_ESP_2();
2003 else if (addend == 4)
2004 gen_op_addl_ESP_4();
2005 else
2006 gen_op_addl_ESP_im(addend);
2007 } else {
2008 if (addend == 2)
2009 gen_op_addw_ESP_2();
2010 else if (addend == 4)
2011 gen_op_addw_ESP_4();
2012 else
2013 gen_op_addw_ESP_im(addend);
2014 }
2015}
2016
2017/* generate a push. It depends on ss32, addseg and dflag */
2018static void gen_push_T0(DisasContext *s)
2019{
2020#ifdef TARGET_X86_64
2021 if (CODE64(s)) {
2022 gen_op_movq_A0_reg[R_ESP]();
2023 if (s->dflag) {
2024 gen_op_subq_A0_8();
2025 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2026 } else {
2027 gen_op_subq_A0_2();
2028 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2029 }
2030 gen_op_movq_ESP_A0();
2031 } else
2032#endif
2033 {
2034 gen_op_movl_A0_reg[R_ESP]();
2035 if (!s->dflag)
2036 gen_op_subl_A0_2();
2037 else
2038 gen_op_subl_A0_4();
2039 if (s->ss32) {
2040 if (s->addseg) {
2041 gen_op_movl_T1_A0();
2042 gen_op_addl_A0_SS();
2043 }
2044 } else {
2045 gen_op_andl_A0_ffff();
2046 gen_op_movl_T1_A0();
2047 gen_op_addl_A0_SS();
2048 }
2049 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2050 if (s->ss32 && !s->addseg)
2051 gen_op_movl_ESP_A0();
2052 else
2053 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2054 }
2055}
2056
2057/* generate a push. It depends on ss32, addseg and dflag */
2058/* slower version for T1, only used for call Ev */
2059static void gen_push_T1(DisasContext *s)
2060{
2061#ifdef TARGET_X86_64
2062 if (CODE64(s)) {
2063 gen_op_movq_A0_reg[R_ESP]();
2064 if (s->dflag) {
2065 gen_op_subq_A0_8();
2066 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2067 } else {
2068 gen_op_subq_A0_2();
2069 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2070 }
2071 gen_op_movq_ESP_A0();
2072 } else
2073#endif
2074 {
2075 gen_op_movl_A0_reg[R_ESP]();
2076 if (!s->dflag)
2077 gen_op_subl_A0_2();
2078 else
2079 gen_op_subl_A0_4();
2080 if (s->ss32) {
2081 if (s->addseg) {
2082 gen_op_addl_A0_SS();
2083 }
2084 } else {
2085 gen_op_andl_A0_ffff();
2086 gen_op_addl_A0_SS();
2087 }
2088 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2089
2090 if (s->ss32 && !s->addseg)
2091 gen_op_movl_ESP_A0();
2092 else
2093 gen_stack_update(s, (-2) << s->dflag);
2094 }
2095}
2096
2097/* two step pop is necessary for precise exceptions */
2098static void gen_pop_T0(DisasContext *s)
2099{
2100#ifdef TARGET_X86_64
2101 if (CODE64(s)) {
2102 gen_op_movq_A0_reg[R_ESP]();
2103 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2104 } else
2105#endif
2106 {
2107 gen_op_movl_A0_reg[R_ESP]();
2108 if (s->ss32) {
2109 if (s->addseg)
2110 gen_op_addl_A0_SS();
2111 } else {
2112 gen_op_andl_A0_ffff();
2113 gen_op_addl_A0_SS();
2114 }
2115 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2116 }
2117}
2118
2119static void gen_pop_update(DisasContext *s)
2120{
2121#ifdef TARGET_X86_64
2122 if (CODE64(s) && s->dflag) {
2123 gen_stack_update(s, 8);
2124 } else
2125#endif
2126 {
2127 gen_stack_update(s, 2 << s->dflag);
2128 }
2129}
2130
2131static void gen_stack_A0(DisasContext *s)
2132{
2133 gen_op_movl_A0_ESP();
2134 if (!s->ss32)
2135 gen_op_andl_A0_ffff();
2136 gen_op_movl_T1_A0();
2137 if (s->addseg)
2138 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2139}
2140
2141/* NOTE: wrap around in 16 bit not fully handled */
2142static void gen_pusha(DisasContext *s)
2143{
2144 int i;
2145 gen_op_movl_A0_ESP();
2146 gen_op_addl_A0_im(-16 << s->dflag);
2147 if (!s->ss32)
2148 gen_op_andl_A0_ffff();
2149 gen_op_movl_T1_A0();
2150 if (s->addseg)
2151 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2152 for(i = 0;i < 8; i++) {
2153 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2154 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2155 gen_op_addl_A0_im(2 << s->dflag);
2156 }
2157 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2158}
2159
2160/* NOTE: wrap around in 16 bit not fully handled */
2161static void gen_popa(DisasContext *s)
2162{
2163 int i;
2164 gen_op_movl_A0_ESP();
2165 if (!s->ss32)
2166 gen_op_andl_A0_ffff();
2167 gen_op_movl_T1_A0();
2168 gen_op_addl_T1_im(16 << s->dflag);
2169 if (s->addseg)
2170 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2171 for(i = 0;i < 8; i++) {
2172 /* ESP is not reloaded */
2173 if (i != 3) {
2174 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2175 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2176 }
2177 gen_op_addl_A0_im(2 << s->dflag);
2178 }
2179 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2180}
2181
2182static void gen_enter(DisasContext *s, int esp_addend, int level)
2183{
2184 int ot, opsize;
2185
2186 level &= 0x1f;
2187#ifdef TARGET_X86_64
2188 if (CODE64(s)) {
2189 ot = s->dflag ? OT_QUAD : OT_WORD;
2190 opsize = 1 << ot;
2191
2192 gen_op_movl_A0_ESP();
2193 gen_op_addq_A0_im(-opsize);
2194 gen_op_movl_T1_A0();
2195
2196 /* push bp */
2197 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2198 gen_op_st_T0_A0[ot + s->mem_index]();
2199 if (level) {
2200 gen_op_enter64_level(level, (ot == OT_QUAD));
2201 }
2202 gen_op_mov_reg_T1[ot][R_EBP]();
2203 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2204 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2205 } else
2206#endif
2207 {
2208 ot = s->dflag + OT_WORD;
2209 opsize = 2 << s->dflag;
2210
2211 gen_op_movl_A0_ESP();
2212 gen_op_addl_A0_im(-opsize);
2213 if (!s->ss32)
2214 gen_op_andl_A0_ffff();
2215 gen_op_movl_T1_A0();
2216 if (s->addseg)
2217 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2218 /* push bp */
2219 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2220 gen_op_st_T0_A0[ot + s->mem_index]();
2221 if (level) {
2222 gen_op_enter_level(level, s->dflag);
2223 }
2224 gen_op_mov_reg_T1[ot][R_EBP]();
2225 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2226 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2227 }
2228}
2229
2230static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2231{
2232 if (s->cc_op != CC_OP_DYNAMIC)
2233 gen_op_set_cc_op(s->cc_op);
2234 gen_jmp_im(cur_eip);
2235 gen_op_raise_exception(trapno);
2236 s->is_jmp = 3;
2237}
2238
2239/* an interrupt is different from an exception because of the
2240 priviledge checks */
2241static void gen_interrupt(DisasContext *s, int intno,
2242 target_ulong cur_eip, target_ulong next_eip)
2243{
2244 if (s->cc_op != CC_OP_DYNAMIC)
2245 gen_op_set_cc_op(s->cc_op);
2246 gen_jmp_im(cur_eip);
2247 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2248 s->is_jmp = 3;
2249}
2250
2251static void gen_debug(DisasContext *s, target_ulong cur_eip)
2252{
2253 if (s->cc_op != CC_OP_DYNAMIC)
2254 gen_op_set_cc_op(s->cc_op);
2255 gen_jmp_im(cur_eip);
2256 gen_op_debug();
2257 s->is_jmp = 3;
2258}
2259
2260/* generate a generic end of block. Trace exception is also generated
2261 if needed */
2262static void gen_eob(DisasContext *s)
2263{
2264 if (s->cc_op != CC_OP_DYNAMIC)
2265 gen_op_set_cc_op(s->cc_op);
2266 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2267 gen_op_reset_inhibit_irq();
2268 }
2269 if (s->singlestep_enabled) {
2270 gen_op_debug();
2271 } else if (s->tf) {
2272 gen_op_raise_exception(EXCP01_SSTP);
2273 } else {
2274 gen_op_movl_T0_0();
2275 gen_op_exit_tb();
2276 }
2277 s->is_jmp = 3;
2278}
2279
2280/* generate a jump to eip. No segment change must happen before as a
2281 direct call to the next block may occur */
2282static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2283{
2284 if (s->jmp_opt) {
2285#ifdef VBOX
2286 gen_check_external_event(s);
2287#endif /* VBOX */
2288 if (s->cc_op != CC_OP_DYNAMIC) {
2289 gen_op_set_cc_op(s->cc_op);
2290 s->cc_op = CC_OP_DYNAMIC;
2291 }
2292 gen_goto_tb(s, tb_num, eip);
2293 s->is_jmp = 3;
2294 } else {
2295 gen_jmp_im(eip);
2296 gen_eob(s);
2297 }
2298}
2299
2300static void gen_jmp(DisasContext *s, target_ulong eip)
2301{
2302 gen_jmp_tb(s, eip, 0);
2303}
2304
2305static void gen_movtl_T0_im(target_ulong val)
2306{
2307#ifdef TARGET_X86_64
2308 if ((int32_t)val == val) {
2309 gen_op_movl_T0_im(val);
2310 } else {
2311 gen_op_movq_T0_im64(val >> 32, val);
2312 }
2313#else
2314 gen_op_movl_T0_im(val);
2315#endif
2316}
2317
2318static void gen_movtl_T1_im(target_ulong val)
2319{
2320#ifdef TARGET_X86_64
2321 if ((int32_t)val == val) {
2322 gen_op_movl_T1_im(val);
2323 } else {
2324 gen_op_movq_T1_im64(val >> 32, val);
2325 }
2326#else
2327 gen_op_movl_T1_im(val);
2328#endif
2329}
2330
2331static void gen_add_A0_im(DisasContext *s, int val)
2332{
2333#ifdef TARGET_X86_64
2334 if (CODE64(s))
2335 gen_op_addq_A0_im(val);
2336 else
2337#endif
2338 gen_op_addl_A0_im(val);
2339}
2340
2341static GenOpFunc1 *gen_ldq_env_A0[3] = {
2342 gen_op_ldq_raw_env_A0,
2343#ifndef CONFIG_USER_ONLY
2344 gen_op_ldq_kernel_env_A0,
2345 gen_op_ldq_user_env_A0,
2346#endif
2347};
2348
2349static GenOpFunc1 *gen_stq_env_A0[3] = {
2350 gen_op_stq_raw_env_A0,
2351#ifndef CONFIG_USER_ONLY
2352 gen_op_stq_kernel_env_A0,
2353 gen_op_stq_user_env_A0,
2354#endif
2355};
2356
2357static GenOpFunc1 *gen_ldo_env_A0[3] = {
2358 gen_op_ldo_raw_env_A0,
2359#ifndef CONFIG_USER_ONLY
2360 gen_op_ldo_kernel_env_A0,
2361 gen_op_ldo_user_env_A0,
2362#endif
2363};
2364
2365static GenOpFunc1 *gen_sto_env_A0[3] = {
2366 gen_op_sto_raw_env_A0,
2367#ifndef CONFIG_USER_ONLY
2368 gen_op_sto_kernel_env_A0,
2369 gen_op_sto_user_env_A0,
2370#endif
2371};
2372
2373#define SSE_SPECIAL ((GenOpFunc2 *)1)
2374
2375#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2376#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2377 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2378
2379static GenOpFunc2 *sse_op_table1[256][4] = {
2380 /* pure SSE operations */
2381 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2382 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2383 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2384 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2385 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2386 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2387 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2388 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2389
2390 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2391 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2392 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2393 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2394 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2395 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2396 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2397 [0x2f] = { gen_op_comiss, gen_op_comisd },
2398 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2399 [0x51] = SSE_FOP(sqrt),
2400 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2401 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2402 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2403 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2404 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2405 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2406 [0x58] = SSE_FOP(add),
2407 [0x59] = SSE_FOP(mul),
2408 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2409 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2410 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2411 [0x5c] = SSE_FOP(sub),
2412 [0x5d] = SSE_FOP(min),
2413 [0x5e] = SSE_FOP(div),
2414 [0x5f] = SSE_FOP(max),
2415
2416 [0xc2] = SSE_FOP(cmpeq),
2417 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2418
2419 /* MMX ops and their SSE extensions */
2420 [0x60] = MMX_OP2(punpcklbw),
2421 [0x61] = MMX_OP2(punpcklwd),
2422 [0x62] = MMX_OP2(punpckldq),
2423 [0x63] = MMX_OP2(packsswb),
2424 [0x64] = MMX_OP2(pcmpgtb),
2425 [0x65] = MMX_OP2(pcmpgtw),
2426 [0x66] = MMX_OP2(pcmpgtl),
2427 [0x67] = MMX_OP2(packuswb),
2428 [0x68] = MMX_OP2(punpckhbw),
2429 [0x69] = MMX_OP2(punpckhwd),
2430 [0x6a] = MMX_OP2(punpckhdq),
2431 [0x6b] = MMX_OP2(packssdw),
2432 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2433 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2434 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2435 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2436 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2437 (GenOpFunc2 *)gen_op_pshufd_xmm,
2438 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2439 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2440 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2441 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2442 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2443 [0x74] = MMX_OP2(pcmpeqb),
2444 [0x75] = MMX_OP2(pcmpeqw),
2445 [0x76] = MMX_OP2(pcmpeql),
2446 [0x77] = { SSE_SPECIAL }, /* emms */
2447 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2448 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2449 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2450 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2451 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2452 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2453 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2454 [0xd1] = MMX_OP2(psrlw),
2455 [0xd2] = MMX_OP2(psrld),
2456 [0xd3] = MMX_OP2(psrlq),
2457 [0xd4] = MMX_OP2(paddq),
2458 [0xd5] = MMX_OP2(pmullw),
2459 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2460 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2461 [0xd8] = MMX_OP2(psubusb),
2462 [0xd9] = MMX_OP2(psubusw),
2463 [0xda] = MMX_OP2(pminub),
2464 [0xdb] = MMX_OP2(pand),
2465 [0xdc] = MMX_OP2(paddusb),
2466 [0xdd] = MMX_OP2(paddusw),
2467 [0xde] = MMX_OP2(pmaxub),
2468 [0xdf] = MMX_OP2(pandn),
2469 [0xe0] = MMX_OP2(pavgb),
2470 [0xe1] = MMX_OP2(psraw),
2471 [0xe2] = MMX_OP2(psrad),
2472 [0xe3] = MMX_OP2(pavgw),
2473 [0xe4] = MMX_OP2(pmulhuw),
2474 [0xe5] = MMX_OP2(pmulhw),
2475 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2476 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2477 [0xe8] = MMX_OP2(psubsb),
2478 [0xe9] = MMX_OP2(psubsw),
2479 [0xea] = MMX_OP2(pminsw),
2480 [0xeb] = MMX_OP2(por),
2481 [0xec] = MMX_OP2(paddsb),
2482 [0xed] = MMX_OP2(paddsw),
2483 [0xee] = MMX_OP2(pmaxsw),
2484 [0xef] = MMX_OP2(pxor),
2485 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2486 [0xf1] = MMX_OP2(psllw),
2487 [0xf2] = MMX_OP2(pslld),
2488 [0xf3] = MMX_OP2(psllq),
2489 [0xf4] = MMX_OP2(pmuludq),
2490 [0xf5] = MMX_OP2(pmaddwd),
2491 [0xf6] = MMX_OP2(psadbw),
2492 [0xf7] = MMX_OP2(maskmov),
2493 [0xf8] = MMX_OP2(psubb),
2494 [0xf9] = MMX_OP2(psubw),
2495 [0xfa] = MMX_OP2(psubl),
2496 [0xfb] = MMX_OP2(psubq),
2497 [0xfc] = MMX_OP2(paddb),
2498 [0xfd] = MMX_OP2(paddw),
2499 [0xfe] = MMX_OP2(paddl),
2500};
2501
2502static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2503 [0 + 2] = MMX_OP2(psrlw),
2504 [0 + 4] = MMX_OP2(psraw),
2505 [0 + 6] = MMX_OP2(psllw),
2506 [8 + 2] = MMX_OP2(psrld),
2507 [8 + 4] = MMX_OP2(psrad),
2508 [8 + 6] = MMX_OP2(pslld),
2509 [16 + 2] = MMX_OP2(psrlq),
2510 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2511 [16 + 6] = MMX_OP2(psllq),
2512 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2513};
2514
2515static GenOpFunc1 *sse_op_table3[4 * 3] = {
2516 gen_op_cvtsi2ss,
2517 gen_op_cvtsi2sd,
2518 X86_64_ONLY(gen_op_cvtsq2ss),
2519 X86_64_ONLY(gen_op_cvtsq2sd),
2520
2521 gen_op_cvttss2si,
2522 gen_op_cvttsd2si,
2523 X86_64_ONLY(gen_op_cvttss2sq),
2524 X86_64_ONLY(gen_op_cvttsd2sq),
2525
2526 gen_op_cvtss2si,
2527 gen_op_cvtsd2si,
2528 X86_64_ONLY(gen_op_cvtss2sq),
2529 X86_64_ONLY(gen_op_cvtsd2sq),
2530};
2531
2532static GenOpFunc2 *sse_op_table4[8][4] = {
2533 SSE_FOP(cmpeq),
2534 SSE_FOP(cmplt),
2535 SSE_FOP(cmple),
2536 SSE_FOP(cmpunord),
2537 SSE_FOP(cmpneq),
2538 SSE_FOP(cmpnlt),
2539 SSE_FOP(cmpnle),
2540 SSE_FOP(cmpord),
2541};
2542
2543static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2544{
2545 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2546 int modrm, mod, rm, reg, reg_addr, offset_addr;
2547 GenOpFunc2 *sse_op2;
2548 GenOpFunc3 *sse_op3;
2549
2550 b &= 0xff;
2551 if (s->prefix & PREFIX_DATA)
2552 b1 = 1;
2553 else if (s->prefix & PREFIX_REPZ)
2554 b1 = 2;
2555 else if (s->prefix & PREFIX_REPNZ)
2556 b1 = 3;
2557 else
2558 b1 = 0;
2559 sse_op2 = sse_op_table1[b][b1];
2560 if (!sse_op2)
2561 goto illegal_op;
2562 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2563 is_xmm = 1;
2564 } else {
2565 if (b1 == 0) {
2566 /* MMX case */
2567 is_xmm = 0;
2568 } else {
2569 is_xmm = 1;
2570 }
2571 }
2572 /* simple MMX/SSE operation */
2573 if (s->flags & HF_TS_MASK) {
2574 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2575 return;
2576 }
2577 if (s->flags & HF_EM_MASK) {
2578 illegal_op:
2579 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2580 return;
2581 }
2582 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2583 goto illegal_op;
2584 if (b == 0x77) {
2585 /* emms */
2586 gen_op_emms();
2587 return;
2588 }
2589 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2590 the static cpu state) */
2591 if (!is_xmm) {
2592 gen_op_enter_mmx();
2593 }
2594
2595 modrm = ldub_code(s->pc++);
2596 reg = ((modrm >> 3) & 7);
2597 if (is_xmm)
2598 reg |= rex_r;
2599 mod = (modrm >> 6) & 3;
2600 if (sse_op2 == SSE_SPECIAL) {
2601 b |= (b1 << 8);
2602 switch(b) {
2603 case 0x0e7: /* movntq */
2604 if (mod == 3)
2605 goto illegal_op;
2606 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2607 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2608 break;
2609 case 0x1e7: /* movntdq */
2610 case 0x02b: /* movntps */
2611 case 0x12b: /* movntps */
2612 case 0x3f0: /* lddqu */
2613 if (mod == 3)
2614 goto illegal_op;
2615 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2616 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2617 break;
2618 case 0x6e: /* movd mm, ea */
2619 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2620 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2621 break;
2622 case 0x16e: /* movd xmm, ea */
2623 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2624 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2625 break;
2626 case 0x6f: /* movq mm, ea */
2627 if (mod != 3) {
2628 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2629 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2630 } else {
2631 rm = (modrm & 7);
2632 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2633 offsetof(CPUX86State,fpregs[rm].mmx));
2634 }
2635 break;
2636 case 0x010: /* movups */
2637 case 0x110: /* movupd */
2638 case 0x028: /* movaps */
2639 case 0x128: /* movapd */
2640 case 0x16f: /* movdqa xmm, ea */
2641 case 0x26f: /* movdqu xmm, ea */
2642 if (mod != 3) {
2643 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2644 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2645 } else {
2646 rm = (modrm & 7) | REX_B(s);
2647 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2648 offsetof(CPUX86State,xmm_regs[rm]));
2649 }
2650 break;
2651 case 0x210: /* movss xmm, ea */
2652 if (mod != 3) {
2653 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2654 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2655 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2656 gen_op_movl_T0_0();
2657 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2658 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2659 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2660 } else {
2661 rm = (modrm & 7) | REX_B(s);
2662 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2663 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2664 }
2665 break;
2666 case 0x310: /* movsd xmm, ea */
2667 if (mod != 3) {
2668 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2669 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2670 gen_op_movl_T0_0();
2671 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2672 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2673 } else {
2674 rm = (modrm & 7) | REX_B(s);
2675 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2676 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2677 }
2678 break;
2679 case 0x012: /* movlps */
2680 case 0x112: /* movlpd */
2681 if (mod != 3) {
2682 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2683 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2684 } else {
2685 /* movhlps */
2686 rm = (modrm & 7) | REX_B(s);
2687 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2688 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2689 }
2690 break;
2691 case 0x212: /* movsldup */
2692 if (mod != 3) {
2693 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2694 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2695 } else {
2696 rm = (modrm & 7) | REX_B(s);
2697 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2698 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2699 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2700 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2701 }
2702 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2703 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2704 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2705 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2706 break;
2707 case 0x312: /* movddup */
2708 if (mod != 3) {
2709 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2710 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2711 } else {
2712 rm = (modrm & 7) | REX_B(s);
2713 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2714 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2715 }
2716 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2717 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2718 break;
2719 case 0x016: /* movhps */
2720 case 0x116: /* movhpd */
2721 if (mod != 3) {
2722 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2723 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2724 } else {
2725 /* movlhps */
2726 rm = (modrm & 7) | REX_B(s);
2727 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2728 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2729 }
2730 break;
2731 case 0x216: /* movshdup */
2732 if (mod != 3) {
2733 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2734 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2735 } else {
2736 rm = (modrm & 7) | REX_B(s);
2737 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2738 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2739 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2740 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2741 }
2742 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2743 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2744 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2745 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2746 break;
2747 case 0x7e: /* movd ea, mm */
2748 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2749 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2750 break;
2751 case 0x17e: /* movd ea, xmm */
2752 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2753 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2754 break;
2755 case 0x27e: /* movq xmm, ea */
2756 if (mod != 3) {
2757 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2758 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2759 } else {
2760 rm = (modrm & 7) | REX_B(s);
2761 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2762 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2763 }
2764 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2765 break;
2766 case 0x7f: /* movq ea, mm */
2767 if (mod != 3) {
2768 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2769 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2770 } else {
2771 rm = (modrm & 7);
2772 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2773 offsetof(CPUX86State,fpregs[reg].mmx));
2774 }
2775 break;
2776 case 0x011: /* movups */
2777 case 0x111: /* movupd */
2778 case 0x029: /* movaps */
2779 case 0x129: /* movapd */
2780 case 0x17f: /* movdqa ea, xmm */
2781 case 0x27f: /* movdqu ea, xmm */
2782 if (mod != 3) {
2783 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2784 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2785 } else {
2786 rm = (modrm & 7) | REX_B(s);
2787 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2788 offsetof(CPUX86State,xmm_regs[reg]));
2789 }
2790 break;
2791 case 0x211: /* movss ea, xmm */
2792 if (mod != 3) {
2793 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2794 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2795 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2796 } else {
2797 rm = (modrm & 7) | REX_B(s);
2798 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2799 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2800 }
2801 break;
2802 case 0x311: /* movsd ea, xmm */
2803 if (mod != 3) {
2804 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2805 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2806 } else {
2807 rm = (modrm & 7) | REX_B(s);
2808 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2809 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2810 }
2811 break;
2812 case 0x013: /* movlps */
2813 case 0x113: /* movlpd */
2814 if (mod != 3) {
2815 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2816 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2817 } else {
2818 goto illegal_op;
2819 }
2820 break;
2821 case 0x017: /* movhps */
2822 case 0x117: /* movhpd */
2823 if (mod != 3) {
2824 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2825 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2826 } else {
2827 goto illegal_op;
2828 }
2829 break;
2830 case 0x71: /* shift mm, im */
2831 case 0x72:
2832 case 0x73:
2833 case 0x171: /* shift xmm, im */
2834 case 0x172:
2835 case 0x173:
2836 val = ldub_code(s->pc++);
2837 if (is_xmm) {
2838 gen_op_movl_T0_im(val);
2839 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2840 gen_op_movl_T0_0();
2841 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2842 op1_offset = offsetof(CPUX86State,xmm_t0);
2843 } else {
2844 gen_op_movl_T0_im(val);
2845 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2846 gen_op_movl_T0_0();
2847 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2848 op1_offset = offsetof(CPUX86State,mmx_t0);
2849 }
2850 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2851 if (!sse_op2)
2852 goto illegal_op;
2853 if (is_xmm) {
2854 rm = (modrm & 7) | REX_B(s);
2855 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2856 } else {
2857 rm = (modrm & 7);
2858 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2859 }
2860 sse_op2(op2_offset, op1_offset);
2861 break;
2862 case 0x050: /* movmskps */
2863 rm = (modrm & 7) | REX_B(s);
2864 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2865 gen_op_mov_reg_T0[OT_LONG][reg]();
2866 break;
2867 case 0x150: /* movmskpd */
2868 rm = (modrm & 7) | REX_B(s);
2869 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2870 gen_op_mov_reg_T0[OT_LONG][reg]();
2871 break;
2872 case 0x02a: /* cvtpi2ps */
2873 case 0x12a: /* cvtpi2pd */
2874 gen_op_enter_mmx();
2875 if (mod != 3) {
2876 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2877 op2_offset = offsetof(CPUX86State,mmx_t0);
2878 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2879 } else {
2880 rm = (modrm & 7);
2881 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2882 }
2883 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2884 switch(b >> 8) {
2885 case 0x0:
2886 gen_op_cvtpi2ps(op1_offset, op2_offset);
2887 break;
2888 default:
2889 case 0x1:
2890 gen_op_cvtpi2pd(op1_offset, op2_offset);
2891 break;
2892 }
2893 break;
2894 case 0x22a: /* cvtsi2ss */
2895 case 0x32a: /* cvtsi2sd */
2896 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2897 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2898 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2899 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2900 break;
2901 case 0x02c: /* cvttps2pi */
2902 case 0x12c: /* cvttpd2pi */
2903 case 0x02d: /* cvtps2pi */
2904 case 0x12d: /* cvtpd2pi */
2905 gen_op_enter_mmx();
2906 if (mod != 3) {
2907 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2908 op2_offset = offsetof(CPUX86State,xmm_t0);
2909 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2910 } else {
2911 rm = (modrm & 7) | REX_B(s);
2912 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2913 }
2914 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2915 switch(b) {
2916 case 0x02c:
2917 gen_op_cvttps2pi(op1_offset, op2_offset);
2918 break;
2919 case 0x12c:
2920 gen_op_cvttpd2pi(op1_offset, op2_offset);
2921 break;
2922 case 0x02d:
2923 gen_op_cvtps2pi(op1_offset, op2_offset);
2924 break;
2925 case 0x12d:
2926 gen_op_cvtpd2pi(op1_offset, op2_offset);
2927 break;
2928 }
2929 break;
2930 case 0x22c: /* cvttss2si */
2931 case 0x32c: /* cvttsd2si */
2932 case 0x22d: /* cvtss2si */
2933 case 0x32d: /* cvtsd2si */
2934 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2935 if (mod != 3) {
2936 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2937 if ((b >> 8) & 1) {
2938 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
2939 } else {
2940 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2941 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2942 }
2943 op2_offset = offsetof(CPUX86State,xmm_t0);
2944 } else {
2945 rm = (modrm & 7) | REX_B(s);
2946 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2947 }
2948 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
2949 (b & 1) * 4](op2_offset);
2950 gen_op_mov_reg_T0[ot][reg]();
2951 break;
2952 case 0xc4: /* pinsrw */
2953 case 0x1c4:
2954 s->rip_offset = 1;
2955 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2956 val = ldub_code(s->pc++);
2957 if (b1) {
2958 val &= 7;
2959 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2960 } else {
2961 val &= 3;
2962 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
2963 }
2964 break;
2965 case 0xc5: /* pextrw */
2966 case 0x1c5:
2967 if (mod != 3)
2968 goto illegal_op;
2969 val = ldub_code(s->pc++);
2970 if (b1) {
2971 val &= 7;
2972 rm = (modrm & 7) | REX_B(s);
2973 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
2974 } else {
2975 val &= 3;
2976 rm = (modrm & 7);
2977 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
2978 }
2979 reg = ((modrm >> 3) & 7) | rex_r;
2980 gen_op_mov_reg_T0[OT_LONG][reg]();
2981 break;
2982 case 0x1d6: /* movq ea, xmm */
2983 if (mod != 3) {
2984 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2985 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2986 } else {
2987 rm = (modrm & 7) | REX_B(s);
2988 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2989 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2990 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2991 }
2992 break;
2993 case 0x2d6: /* movq2dq */
2994 gen_op_enter_mmx();
2995 rm = (modrm & 7) | REX_B(s);
2996 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2997 offsetof(CPUX86State,fpregs[reg & 7].mmx));
2998 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2999 break;
3000 case 0x3d6: /* movdq2q */
3001 gen_op_enter_mmx();
3002 rm = (modrm & 7);
3003 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3004 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3005 break;
3006 case 0xd7: /* pmovmskb */
3007 case 0x1d7:
3008 if (mod != 3)
3009 goto illegal_op;
3010 if (b1) {
3011 rm = (modrm & 7) | REX_B(s);
3012 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3013 } else {
3014 rm = (modrm & 7);
3015 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3016 }
3017 reg = ((modrm >> 3) & 7) | rex_r;
3018 gen_op_mov_reg_T0[OT_LONG][reg]();
3019 break;
3020 default:
3021 goto illegal_op;
3022 }
3023 } else {
3024 /* generic MMX or SSE operation */
3025 switch(b) {
3026 case 0xf7:
3027 /* maskmov : we must prepare A0 */
3028 if (mod != 3)
3029 goto illegal_op;
3030#ifdef TARGET_X86_64
3031 if (s->aflag == 2) {
3032 gen_op_movq_A0_reg[R_EDI]();
3033 } else
3034#endif
3035 {
3036 gen_op_movl_A0_reg[R_EDI]();
3037 if (s->aflag == 0)
3038 gen_op_andl_A0_ffff();
3039 }
3040 gen_add_A0_ds_seg(s);
3041 break;
3042 case 0x70: /* pshufx insn */
3043 case 0xc6: /* pshufx insn */
3044 case 0xc2: /* compare insns */
3045 s->rip_offset = 1;
3046 break;
3047 default:
3048 break;
3049 }
3050 if (is_xmm) {
3051 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3052 if (mod != 3) {
3053 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3054 op2_offset = offsetof(CPUX86State,xmm_t0);
3055 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f) ||
3056 b == 0xc2)) {
3057 /* specific case for SSE single instructions */
3058 if (b1 == 2) {
3059 /* 32 bit access */
3060 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3061 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3062 } else {
3063 /* 64 bit access */
3064 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3065 }
3066 } else {
3067 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3068 }
3069 } else {
3070 rm = (modrm & 7) | REX_B(s);
3071 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3072 }
3073 } else {
3074 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3075 if (mod != 3) {
3076 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3077 op2_offset = offsetof(CPUX86State,mmx_t0);
3078 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3079 } else {
3080 rm = (modrm & 7);
3081 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3082 }
3083 }
3084 switch(b) {
3085 case 0x70: /* pshufx insn */
3086 case 0xc6: /* pshufx insn */
3087 val = ldub_code(s->pc++);
3088 sse_op3 = (GenOpFunc3 *)sse_op2;
3089 sse_op3(op1_offset, op2_offset, val);
3090 break;
3091 case 0xc2:
3092 /* compare insns */
3093 val = ldub_code(s->pc++);
3094 if (val >= 8)
3095 goto illegal_op;
3096 sse_op2 = sse_op_table4[val][b1];
3097 sse_op2(op1_offset, op2_offset);
3098 break;
3099 default:
3100 sse_op2(op1_offset, op2_offset);
3101 break;
3102 }
3103 if (b == 0x2e || b == 0x2f) {
3104 s->cc_op = CC_OP_EFLAGS;
3105 }
3106 }
3107}
3108
3109
3110/* convert one instruction. s->is_jmp is set if the translation must
3111 be stopped. Return the next pc value */
3112static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3113{
3114 int b, prefixes, aflag, dflag;
3115 int shift, ot;
3116 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3117 target_ulong next_eip, tval;
3118 int rex_w, rex_r;
3119
3120 s->pc = pc_start;
3121 prefixes = 0;
3122 aflag = s->code32;
3123 dflag = s->code32;
3124 s->override = -1;
3125 rex_w = -1;
3126 rex_r = 0;
3127#ifdef TARGET_X86_64
3128 s->rex_x = 0;
3129 s->rex_b = 0;
3130 x86_64_hregs = 0;
3131#endif
3132 s->rip_offset = 0; /* for relative ip address */
3133 next_byte:
3134 b = ldub_code(s->pc);
3135 s->pc++;
3136 /* check prefixes */
3137#ifdef TARGET_X86_64
3138 if (CODE64(s)) {
3139 switch (b) {
3140 case 0xf3:
3141 prefixes |= PREFIX_REPZ;
3142 goto next_byte;
3143 case 0xf2:
3144 prefixes |= PREFIX_REPNZ;
3145 goto next_byte;
3146 case 0xf0:
3147 prefixes |= PREFIX_LOCK;
3148 goto next_byte;
3149 case 0x2e:
3150 s->override = R_CS;
3151 goto next_byte;
3152 case 0x36:
3153 s->override = R_SS;
3154 goto next_byte;
3155 case 0x3e:
3156 s->override = R_DS;
3157 goto next_byte;
3158 case 0x26:
3159 s->override = R_ES;
3160 goto next_byte;
3161 case 0x64:
3162 s->override = R_FS;
3163 goto next_byte;
3164 case 0x65:
3165 s->override = R_GS;
3166 goto next_byte;
3167 case 0x66:
3168 prefixes |= PREFIX_DATA;
3169 goto next_byte;
3170 case 0x67:
3171 prefixes |= PREFIX_ADR;
3172 goto next_byte;
3173 case 0x40 ... 0x4f:
3174 /* REX prefix */
3175 rex_w = (b >> 3) & 1;
3176 rex_r = (b & 0x4) << 1;
3177 s->rex_x = (b & 0x2) << 2;
3178 REX_B(s) = (b & 0x1) << 3;
3179 x86_64_hregs = 1; /* select uniform byte register addressing */
3180 goto next_byte;
3181 }
3182 if (rex_w == 1) {
3183 /* 0x66 is ignored if rex.w is set */
3184 dflag = 2;
3185 } else {
3186 if (prefixes & PREFIX_DATA)
3187 dflag ^= 1;
3188 }
3189 if (!(prefixes & PREFIX_ADR))
3190 aflag = 2;
3191 } else
3192#endif
3193 {
3194 switch (b) {
3195 case 0xf3:
3196 prefixes |= PREFIX_REPZ;
3197 goto next_byte;
3198 case 0xf2:
3199 prefixes |= PREFIX_REPNZ;
3200 goto next_byte;
3201 case 0xf0:
3202 prefixes |= PREFIX_LOCK;
3203 goto next_byte;
3204 case 0x2e:
3205 s->override = R_CS;
3206 goto next_byte;
3207 case 0x36:
3208 s->override = R_SS;
3209 goto next_byte;
3210 case 0x3e:
3211 s->override = R_DS;
3212 goto next_byte;
3213 case 0x26:
3214 s->override = R_ES;
3215 goto next_byte;
3216 case 0x64:
3217 s->override = R_FS;
3218 goto next_byte;
3219 case 0x65:
3220 s->override = R_GS;
3221 goto next_byte;
3222 case 0x66:
3223 prefixes |= PREFIX_DATA;
3224 goto next_byte;
3225 case 0x67:
3226 prefixes |= PREFIX_ADR;
3227 goto next_byte;
3228 }
3229 if (prefixes & PREFIX_DATA)
3230 dflag ^= 1;
3231 if (prefixes & PREFIX_ADR)
3232 aflag ^= 1;
3233 }
3234
3235 s->prefix = prefixes;
3236 s->aflag = aflag;
3237 s->dflag = dflag;
3238
3239 /* lock generation */
3240 if (prefixes & PREFIX_LOCK)
3241 gen_op_lock();
3242
3243 /* now check op code */
3244 reswitch:
3245 switch(b) {
3246 case 0x0f:
3247 /**************************/
3248 /* extended op code */
3249 b = ldub_code(s->pc++) | 0x100;
3250 goto reswitch;
3251
3252 /**************************/
3253 /* arith & logic */
3254 case 0x00 ... 0x05:
3255 case 0x08 ... 0x0d:
3256 case 0x10 ... 0x15:
3257 case 0x18 ... 0x1d:
3258 case 0x20 ... 0x25:
3259 case 0x28 ... 0x2d:
3260 case 0x30 ... 0x35:
3261 case 0x38 ... 0x3d:
3262 {
3263 int op, f, val;
3264 op = (b >> 3) & 7;
3265 f = (b >> 1) & 3;
3266
3267 if ((b & 1) == 0)
3268 ot = OT_BYTE;
3269 else
3270 ot = dflag + OT_WORD;
3271
3272 switch(f) {
3273 case 0: /* OP Ev, Gv */
3274 modrm = ldub_code(s->pc++);
3275 reg = ((modrm >> 3) & 7) | rex_r;
3276 mod = (modrm >> 6) & 3;
3277 rm = (modrm & 7) | REX_B(s);
3278 if (mod != 3) {
3279 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3280 opreg = OR_TMP0;
3281 } else if (op == OP_XORL && rm == reg) {
3282 xor_zero:
3283 /* xor reg, reg optimisation */
3284 gen_op_movl_T0_0();
3285 s->cc_op = CC_OP_LOGICB + ot;
3286 gen_op_mov_reg_T0[ot][reg]();
3287 gen_op_update1_cc();
3288 break;
3289 } else {
3290 opreg = rm;
3291 }
3292 gen_op_mov_TN_reg[ot][1][reg]();
3293 gen_op(s, op, ot, opreg);
3294 break;
3295 case 1: /* OP Gv, Ev */
3296 modrm = ldub_code(s->pc++);
3297 mod = (modrm >> 6) & 3;
3298 reg = ((modrm >> 3) & 7) | rex_r;
3299 rm = (modrm & 7) | REX_B(s);
3300 if (mod != 3) {
3301 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3302 gen_op_ld_T1_A0[ot + s->mem_index]();
3303 } else if (op == OP_XORL && rm == reg) {
3304 goto xor_zero;
3305 } else {
3306 gen_op_mov_TN_reg[ot][1][rm]();
3307 }
3308 gen_op(s, op, ot, reg);
3309 break;
3310 case 2: /* OP A, Iv */
3311 val = insn_get(s, ot);
3312 gen_op_movl_T1_im(val);
3313 gen_op(s, op, ot, OR_EAX);
3314 break;
3315 }
3316 }
3317 break;
3318
3319 case 0x80: /* GRP1 */
3320 case 0x81:
3321 case 0x82:
3322 case 0x83:
3323 {
3324 int val;
3325
3326 if ((b & 1) == 0)
3327 ot = OT_BYTE;
3328 else
3329 ot = dflag + OT_WORD;
3330
3331 modrm = ldub_code(s->pc++);
3332 mod = (modrm >> 6) & 3;
3333 rm = (modrm & 7) | REX_B(s);
3334 op = (modrm >> 3) & 7;
3335
3336 if (mod != 3) {
3337 if (b == 0x83)
3338 s->rip_offset = 1;
3339 else
3340 s->rip_offset = insn_const_size(ot);
3341 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3342 opreg = OR_TMP0;
3343 } else {
3344 opreg = rm;
3345 }
3346
3347 switch(b) {
3348 default:
3349 case 0x80:
3350 case 0x81:
3351 case 0x82:
3352 val = insn_get(s, ot);
3353 break;
3354 case 0x83:
3355 val = (int8_t)insn_get(s, OT_BYTE);
3356 break;
3357 }
3358 gen_op_movl_T1_im(val);
3359 gen_op(s, op, ot, opreg);
3360 }
3361 break;
3362
3363 /**************************/
3364 /* inc, dec, and other misc arith */
3365 case 0x40 ... 0x47: /* inc Gv */
3366 ot = dflag ? OT_LONG : OT_WORD;
3367 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3368 break;
3369 case 0x48 ... 0x4f: /* dec Gv */
3370 ot = dflag ? OT_LONG : OT_WORD;
3371 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3372 break;
3373 case 0xf6: /* GRP3 */
3374 case 0xf7:
3375 if ((b & 1) == 0)
3376 ot = OT_BYTE;
3377 else
3378 ot = dflag + OT_WORD;
3379
3380 modrm = ldub_code(s->pc++);
3381 mod = (modrm >> 6) & 3;
3382 rm = (modrm & 7) | REX_B(s);
3383 op = (modrm >> 3) & 7;
3384 if (mod != 3) {
3385 if (op == 0)
3386 s->rip_offset = insn_const_size(ot);
3387 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3388 gen_op_ld_T0_A0[ot + s->mem_index]();
3389 } else {
3390 gen_op_mov_TN_reg[ot][0][rm]();
3391 }
3392
3393 switch(op) {
3394 case 0: /* test */
3395 val = insn_get(s, ot);
3396 gen_op_movl_T1_im(val);
3397 gen_op_testl_T0_T1_cc();
3398 s->cc_op = CC_OP_LOGICB + ot;
3399 break;
3400 case 2: /* not */
3401 gen_op_notl_T0();
3402 if (mod != 3) {
3403 gen_op_st_T0_A0[ot + s->mem_index]();
3404 } else {
3405 gen_op_mov_reg_T0[ot][rm]();
3406 }
3407 break;
3408 case 3: /* neg */
3409 gen_op_negl_T0();
3410 if (mod != 3) {
3411 gen_op_st_T0_A0[ot + s->mem_index]();
3412 } else {
3413 gen_op_mov_reg_T0[ot][rm]();
3414 }
3415 gen_op_update_neg_cc();
3416 s->cc_op = CC_OP_SUBB + ot;
3417 break;
3418 case 4: /* mul */
3419 switch(ot) {
3420 case OT_BYTE:
3421 gen_op_mulb_AL_T0();
3422 s->cc_op = CC_OP_MULB;
3423 break;
3424 case OT_WORD:
3425 gen_op_mulw_AX_T0();
3426 s->cc_op = CC_OP_MULW;
3427 break;
3428 default:
3429 case OT_LONG:
3430 gen_op_mull_EAX_T0();
3431 s->cc_op = CC_OP_MULL;
3432 break;
3433#ifdef TARGET_X86_64
3434 case OT_QUAD:
3435 gen_op_mulq_EAX_T0();
3436 s->cc_op = CC_OP_MULQ;
3437 break;
3438#endif
3439 }
3440 break;
3441 case 5: /* imul */
3442 switch(ot) {
3443 case OT_BYTE:
3444 gen_op_imulb_AL_T0();
3445 s->cc_op = CC_OP_MULB;
3446 break;
3447 case OT_WORD:
3448 gen_op_imulw_AX_T0();
3449 s->cc_op = CC_OP_MULW;
3450 break;
3451 default:
3452 case OT_LONG:
3453 gen_op_imull_EAX_T0();
3454 s->cc_op = CC_OP_MULL;
3455 break;
3456#ifdef TARGET_X86_64
3457 case OT_QUAD:
3458 gen_op_imulq_EAX_T0();
3459 s->cc_op = CC_OP_MULQ;
3460 break;
3461#endif
3462 }
3463 break;
3464 case 6: /* div */
3465 switch(ot) {
3466 case OT_BYTE:
3467 gen_jmp_im(pc_start - s->cs_base);
3468 gen_op_divb_AL_T0();
3469 break;
3470 case OT_WORD:
3471 gen_jmp_im(pc_start - s->cs_base);
3472 gen_op_divw_AX_T0();
3473 break;
3474 default:
3475 case OT_LONG:
3476 gen_jmp_im(pc_start - s->cs_base);
3477 gen_op_divl_EAX_T0();
3478 break;
3479#ifdef TARGET_X86_64
3480 case OT_QUAD:
3481 gen_jmp_im(pc_start - s->cs_base);
3482 gen_op_divq_EAX_T0();
3483 break;
3484#endif
3485 }
3486 break;
3487 case 7: /* idiv */
3488 switch(ot) {
3489 case OT_BYTE:
3490 gen_jmp_im(pc_start - s->cs_base);
3491 gen_op_idivb_AL_T0();
3492 break;
3493 case OT_WORD:
3494 gen_jmp_im(pc_start - s->cs_base);
3495 gen_op_idivw_AX_T0();
3496 break;
3497 default:
3498 case OT_LONG:
3499 gen_jmp_im(pc_start - s->cs_base);
3500 gen_op_idivl_EAX_T0();
3501 break;
3502#ifdef TARGET_X86_64
3503 case OT_QUAD:
3504 gen_jmp_im(pc_start - s->cs_base);
3505 gen_op_idivq_EAX_T0();
3506 break;
3507#endif
3508 }
3509 break;
3510 default:
3511 goto illegal_op;
3512 }
3513 break;
3514
3515 case 0xfe: /* GRP4 */
3516 case 0xff: /* GRP5 */
3517 if ((b & 1) == 0)
3518 ot = OT_BYTE;
3519 else
3520 ot = dflag + OT_WORD;
3521
3522 modrm = ldub_code(s->pc++);
3523 mod = (modrm >> 6) & 3;
3524 rm = (modrm & 7) | REX_B(s);
3525 op = (modrm >> 3) & 7;
3526 if (op >= 2 && b == 0xfe) {
3527 goto illegal_op;
3528 }
3529 if (CODE64(s)) {
3530 if (op == 2 || op == 4) {
3531 /* operand size for jumps is 64 bit */
3532 ot = OT_QUAD;
3533 } else if (op == 3 || op == 5) {
3534 /* for call calls, the operand is 16 or 32 bit, even
3535 in long mode */
3536 ot = dflag ? OT_LONG : OT_WORD;
3537 } else if (op == 6) {
3538 /* default push size is 64 bit */
3539 ot = dflag ? OT_QUAD : OT_WORD;
3540 }
3541 }
3542 if (mod != 3) {
3543 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3544 if (op >= 2 && op != 3 && op != 5)
3545 gen_op_ld_T0_A0[ot + s->mem_index]();
3546 } else {
3547 gen_op_mov_TN_reg[ot][0][rm]();
3548 }
3549
3550 switch(op) {
3551 case 0: /* inc Ev */
3552 if (mod != 3)
3553 opreg = OR_TMP0;
3554 else
3555 opreg = rm;
3556 gen_inc(s, ot, opreg, 1);
3557 break;
3558 case 1: /* dec Ev */
3559 if (mod != 3)
3560 opreg = OR_TMP0;
3561 else
3562 opreg = rm;
3563 gen_inc(s, ot, opreg, -1);
3564 break;
3565 case 2: /* call Ev */
3566 /* XXX: optimize if memory (no 'and' is necessary) */
3567 if (s->dflag == 0)
3568 gen_op_andl_T0_ffff();
3569 next_eip = s->pc - s->cs_base;
3570 gen_movtl_T1_im(next_eip);
3571 gen_push_T1(s);
3572 gen_op_jmp_T0();
3573 gen_eob(s);
3574 break;
3575 case 3: /* lcall Ev */
3576 gen_op_ld_T1_A0[ot + s->mem_index]();
3577 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3578 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3579 do_lcall:
3580 if (s->pe && !s->vm86) {
3581 if (s->cc_op != CC_OP_DYNAMIC)
3582 gen_op_set_cc_op(s->cc_op);
3583 gen_jmp_im(pc_start - s->cs_base);
3584 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3585 } else {
3586 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3587 }
3588 gen_eob(s);
3589 break;
3590 case 4: /* jmp Ev */
3591 if (s->dflag == 0)
3592 gen_op_andl_T0_ffff();
3593 gen_op_jmp_T0();
3594 gen_eob(s);
3595 break;
3596 case 5: /* ljmp Ev */
3597 gen_op_ld_T1_A0[ot + s->mem_index]();
3598 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3599 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3600 do_ljmp:
3601 if (s->pe && !s->vm86) {
3602 if (s->cc_op != CC_OP_DYNAMIC)
3603 gen_op_set_cc_op(s->cc_op);
3604 gen_jmp_im(pc_start - s->cs_base);
3605 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3606 } else {
3607 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3608 gen_op_movl_T0_T1();
3609 gen_op_jmp_T0();
3610 }
3611 gen_eob(s);
3612 break;
3613 case 6: /* push Ev */
3614 gen_push_T0(s);
3615 break;
3616 default:
3617 goto illegal_op;
3618 }
3619 break;
3620
3621 case 0x84: /* test Ev, Gv */
3622 case 0x85:
3623 if ((b & 1) == 0)
3624 ot = OT_BYTE;
3625 else
3626 ot = dflag + OT_WORD;
3627
3628 modrm = ldub_code(s->pc++);
3629 mod = (modrm >> 6) & 3;
3630 rm = (modrm & 7) | REX_B(s);
3631 reg = ((modrm >> 3) & 7) | rex_r;
3632
3633 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3634 gen_op_mov_TN_reg[ot][1][reg]();
3635 gen_op_testl_T0_T1_cc();
3636 s->cc_op = CC_OP_LOGICB + ot;
3637 break;
3638
3639 case 0xa8: /* test eAX, Iv */
3640 case 0xa9:
3641 if ((b & 1) == 0)
3642 ot = OT_BYTE;
3643 else
3644 ot = dflag + OT_WORD;
3645 val = insn_get(s, ot);
3646
3647 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3648 gen_op_movl_T1_im(val);
3649 gen_op_testl_T0_T1_cc();
3650 s->cc_op = CC_OP_LOGICB + ot;
3651 break;
3652
3653 case 0x98: /* CWDE/CBW */
3654#ifdef TARGET_X86_64
3655 if (dflag == 2) {
3656 gen_op_movslq_RAX_EAX();
3657 } else
3658#endif
3659 if (dflag == 1)
3660 gen_op_movswl_EAX_AX();
3661 else
3662 gen_op_movsbw_AX_AL();
3663 break;
3664 case 0x99: /* CDQ/CWD */
3665#ifdef TARGET_X86_64
3666 if (dflag == 2) {
3667 gen_op_movsqo_RDX_RAX();
3668 } else
3669#endif
3670 if (dflag == 1)
3671 gen_op_movslq_EDX_EAX();
3672 else
3673 gen_op_movswl_DX_AX();
3674 break;
3675 case 0x1af: /* imul Gv, Ev */
3676 case 0x69: /* imul Gv, Ev, I */
3677 case 0x6b:
3678 ot = dflag + OT_WORD;
3679 modrm = ldub_code(s->pc++);
3680 reg = ((modrm >> 3) & 7) | rex_r;
3681 if (b == 0x69)
3682 s->rip_offset = insn_const_size(ot);
3683 else if (b == 0x6b)
3684 s->rip_offset = 1;
3685 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3686 if (b == 0x69) {
3687 val = insn_get(s, ot);
3688 gen_op_movl_T1_im(val);
3689 } else if (b == 0x6b) {
3690 val = (int8_t)insn_get(s, OT_BYTE);
3691 gen_op_movl_T1_im(val);
3692 } else {
3693 gen_op_mov_TN_reg[ot][1][reg]();
3694 }
3695
3696#ifdef TARGET_X86_64
3697 if (ot == OT_QUAD) {
3698 gen_op_imulq_T0_T1();
3699 } else
3700#endif
3701 if (ot == OT_LONG) {
3702 gen_op_imull_T0_T1();
3703 } else {
3704 gen_op_imulw_T0_T1();
3705 }
3706 gen_op_mov_reg_T0[ot][reg]();
3707 s->cc_op = CC_OP_MULB + ot;
3708 break;
3709 case 0x1c0:
3710 case 0x1c1: /* xadd Ev, Gv */
3711 if ((b & 1) == 0)
3712 ot = OT_BYTE;
3713 else
3714 ot = dflag + OT_WORD;
3715 modrm = ldub_code(s->pc++);
3716 reg = ((modrm >> 3) & 7) | rex_r;
3717 mod = (modrm >> 6) & 3;
3718 if (mod == 3) {
3719 rm = (modrm & 7) | REX_B(s);
3720 gen_op_mov_TN_reg[ot][0][reg]();
3721 gen_op_mov_TN_reg[ot][1][rm]();
3722 gen_op_addl_T0_T1();
3723 gen_op_mov_reg_T1[ot][reg]();
3724 gen_op_mov_reg_T0[ot][rm]();
3725 } else {
3726 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3727 gen_op_mov_TN_reg[ot][0][reg]();
3728 gen_op_ld_T1_A0[ot + s->mem_index]();
3729 gen_op_addl_T0_T1();
3730 gen_op_st_T0_A0[ot + s->mem_index]();
3731 gen_op_mov_reg_T1[ot][reg]();
3732 }
3733 gen_op_update2_cc();
3734 s->cc_op = CC_OP_ADDB + ot;
3735 break;
3736 case 0x1b0:
3737 case 0x1b1: /* cmpxchg Ev, Gv */
3738 if ((b & 1) == 0)
3739 ot = OT_BYTE;
3740 else
3741 ot = dflag + OT_WORD;
3742 modrm = ldub_code(s->pc++);
3743 reg = ((modrm >> 3) & 7) | rex_r;
3744 mod = (modrm >> 6) & 3;
3745 gen_op_mov_TN_reg[ot][1][reg]();
3746 if (mod == 3) {
3747 rm = (modrm & 7) | REX_B(s);
3748 gen_op_mov_TN_reg[ot][0][rm]();
3749 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3750 gen_op_mov_reg_T0[ot][rm]();
3751 } else {
3752 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3753 gen_op_ld_T0_A0[ot + s->mem_index]();
3754 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3755 }
3756 s->cc_op = CC_OP_SUBB + ot;
3757 break;
3758 case 0x1c7: /* cmpxchg8b */
3759 modrm = ldub_code(s->pc++);
3760 mod = (modrm >> 6) & 3;
3761 if (mod == 3)
3762 goto illegal_op;
3763 if (s->cc_op != CC_OP_DYNAMIC)
3764 gen_op_set_cc_op(s->cc_op);
3765 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3766 gen_op_cmpxchg8b();
3767 s->cc_op = CC_OP_EFLAGS;
3768 break;
3769
3770 /**************************/
3771 /* push/pop */
3772 case 0x50 ... 0x57: /* push */
3773 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3774 gen_push_T0(s);
3775 break;
3776 case 0x58 ... 0x5f: /* pop */
3777 if (CODE64(s)) {
3778 ot = dflag ? OT_QUAD : OT_WORD;
3779 } else {
3780 ot = dflag + OT_WORD;
3781 }
3782 gen_pop_T0(s);
3783 /* NOTE: order is important for pop %sp */
3784 gen_pop_update(s);
3785 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3786 break;
3787 case 0x60: /* pusha */
3788 if (CODE64(s))
3789 goto illegal_op;
3790 gen_pusha(s);
3791 break;
3792 case 0x61: /* popa */
3793 if (CODE64(s))
3794 goto illegal_op;
3795 gen_popa(s);
3796 break;
3797 case 0x68: /* push Iv */
3798 case 0x6a:
3799 if (CODE64(s)) {
3800 ot = dflag ? OT_QUAD : OT_WORD;
3801 } else {
3802 ot = dflag + OT_WORD;
3803 }
3804 if (b == 0x68)
3805 val = insn_get(s, ot);
3806 else
3807 val = (int8_t)insn_get(s, OT_BYTE);
3808 gen_op_movl_T0_im(val);
3809 gen_push_T0(s);
3810 break;
3811 case 0x8f: /* pop Ev */
3812 if (CODE64(s)) {
3813 ot = dflag ? OT_QUAD : OT_WORD;
3814 } else {
3815 ot = dflag + OT_WORD;
3816 }
3817 modrm = ldub_code(s->pc++);
3818 mod = (modrm >> 6) & 3;
3819 gen_pop_T0(s);
3820 if (mod == 3) {
3821 /* NOTE: order is important for pop %sp */
3822 gen_pop_update(s);
3823 rm = (modrm & 7) | REX_B(s);
3824 gen_op_mov_reg_T0[ot][rm]();
3825 } else {
3826 /* NOTE: order is important too for MMU exceptions */
3827 s->popl_esp_hack = 1 << ot;
3828 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3829 s->popl_esp_hack = 0;
3830 gen_pop_update(s);
3831 }
3832 break;
3833 case 0xc8: /* enter */
3834 {
3835 int level;
3836 val = lduw_code(s->pc);
3837 s->pc += 2;
3838 level = ldub_code(s->pc++);
3839 gen_enter(s, val, level);
3840 }
3841 break;
3842 case 0xc9: /* leave */
3843 /* XXX: exception not precise (ESP is updated before potential exception) */
3844 if (CODE64(s)) {
3845 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3846 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3847 } else if (s->ss32) {
3848 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3849 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3850 } else {
3851 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3852 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3853 }
3854 gen_pop_T0(s);
3855 if (CODE64(s)) {
3856 ot = dflag ? OT_QUAD : OT_WORD;
3857 } else {
3858 ot = dflag + OT_WORD;
3859 }
3860 gen_op_mov_reg_T0[ot][R_EBP]();
3861 gen_pop_update(s);
3862 break;
3863 case 0x06: /* push es */
3864 case 0x0e: /* push cs */
3865 case 0x16: /* push ss */
3866 case 0x1e: /* push ds */
3867 if (CODE64(s))
3868 goto illegal_op;
3869 gen_op_movl_T0_seg(b >> 3);
3870 gen_push_T0(s);
3871 break;
3872 case 0x1a0: /* push fs */
3873 case 0x1a8: /* push gs */
3874 gen_op_movl_T0_seg((b >> 3) & 7);
3875 gen_push_T0(s);
3876 break;
3877 case 0x07: /* pop es */
3878 case 0x17: /* pop ss */
3879 case 0x1f: /* pop ds */
3880 if (CODE64(s))
3881 goto illegal_op;
3882 reg = b >> 3;
3883 gen_pop_T0(s);
3884 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3885 gen_pop_update(s);
3886 if (reg == R_SS) {
3887 /* if reg == SS, inhibit interrupts/trace. */
3888 /* If several instructions disable interrupts, only the
3889 _first_ does it */
3890 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3891 gen_op_set_inhibit_irq();
3892 s->tf = 0;
3893 }
3894 if (s->is_jmp) {
3895 gen_jmp_im(s->pc - s->cs_base);
3896 gen_eob(s);
3897 }
3898 break;
3899 case 0x1a1: /* pop fs */
3900 case 0x1a9: /* pop gs */
3901 gen_pop_T0(s);
3902 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3903 gen_pop_update(s);
3904 if (s->is_jmp) {
3905 gen_jmp_im(s->pc - s->cs_base);
3906 gen_eob(s);
3907 }
3908 break;
3909
3910 /**************************/
3911 /* mov */
3912 case 0x88:
3913 case 0x89: /* mov Gv, Ev */
3914 if ((b & 1) == 0)
3915 ot = OT_BYTE;
3916 else
3917 ot = dflag + OT_WORD;
3918 modrm = ldub_code(s->pc++);
3919 reg = ((modrm >> 3) & 7) | rex_r;
3920
3921 /* generate a generic store */
3922 gen_ldst_modrm(s, modrm, ot, reg, 1);
3923 break;
3924 case 0xc6:
3925 case 0xc7: /* mov Ev, Iv */
3926 if ((b & 1) == 0)
3927 ot = OT_BYTE;
3928 else
3929 ot = dflag + OT_WORD;
3930 modrm = ldub_code(s->pc++);
3931 mod = (modrm >> 6) & 3;
3932 if (mod != 3) {
3933 s->rip_offset = insn_const_size(ot);
3934 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3935 }
3936 val = insn_get(s, ot);
3937 gen_op_movl_T0_im(val);
3938 if (mod != 3)
3939 gen_op_st_T0_A0[ot + s->mem_index]();
3940 else
3941 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3942 break;
3943 case 0x8a:
3944 case 0x8b: /* mov Ev, Gv */
3945 if ((b & 1) == 0)
3946 ot = OT_BYTE;
3947 else
3948 ot = OT_WORD + dflag;
3949 modrm = ldub_code(s->pc++);
3950 reg = ((modrm >> 3) & 7) | rex_r;
3951
3952 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3953 gen_op_mov_reg_T0[ot][reg]();
3954 break;
3955 case 0x8e: /* mov seg, Gv */
3956 modrm = ldub_code(s->pc++);
3957 reg = (modrm >> 3) & 7;
3958 if (reg >= 6 || reg == R_CS)
3959 goto illegal_op;
3960 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3961 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3962 if (reg == R_SS) {
3963 /* if reg == SS, inhibit interrupts/trace */
3964 /* If several instructions disable interrupts, only the
3965 _first_ does it */
3966 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3967 gen_op_set_inhibit_irq();
3968 s->tf = 0;
3969 }
3970 if (s->is_jmp) {
3971 gen_jmp_im(s->pc - s->cs_base);
3972 gen_eob(s);
3973 }
3974 break;
3975 case 0x8c: /* mov Gv, seg */
3976 modrm = ldub_code(s->pc++);
3977 reg = (modrm >> 3) & 7;
3978 mod = (modrm >> 6) & 3;
3979 if (reg >= 6)
3980 goto illegal_op;
3981 gen_op_movl_T0_seg(reg);
3982 if (mod == 3)
3983 ot = OT_WORD + dflag;
3984 else
3985 ot = OT_WORD;
3986 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3987 break;
3988
3989 case 0x1b6: /* movzbS Gv, Eb */
3990 case 0x1b7: /* movzwS Gv, Eb */
3991 case 0x1be: /* movsbS Gv, Eb */
3992 case 0x1bf: /* movswS Gv, Eb */
3993 {
3994 int d_ot;
3995 /* d_ot is the size of destination */
3996 d_ot = dflag + OT_WORD;
3997 /* ot is the size of source */
3998 ot = (b & 1) + OT_BYTE;
3999 modrm = ldub_code(s->pc++);
4000 reg = ((modrm >> 3) & 7) | rex_r;
4001 mod = (modrm >> 6) & 3;
4002 rm = (modrm & 7) | REX_B(s);
4003
4004 if (mod == 3) {
4005 gen_op_mov_TN_reg[ot][0][rm]();
4006 switch(ot | (b & 8)) {
4007 case OT_BYTE:
4008 gen_op_movzbl_T0_T0();
4009 break;
4010 case OT_BYTE | 8:
4011 gen_op_movsbl_T0_T0();
4012 break;
4013 case OT_WORD:
4014 gen_op_movzwl_T0_T0();
4015 break;
4016 default:
4017 case OT_WORD | 8:
4018 gen_op_movswl_T0_T0();
4019 break;
4020 }
4021 gen_op_mov_reg_T0[d_ot][reg]();
4022 } else {
4023 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4024 if (b & 8) {
4025 gen_op_lds_T0_A0[ot + s->mem_index]();
4026 } else {
4027 gen_op_ldu_T0_A0[ot + s->mem_index]();
4028 }
4029 gen_op_mov_reg_T0[d_ot][reg]();
4030 }
4031 }
4032 break;
4033
4034 case 0x8d: /* lea */
4035 ot = dflag + OT_WORD;
4036 modrm = ldub_code(s->pc++);
4037 mod = (modrm >> 6) & 3;
4038 if (mod == 3)
4039 goto illegal_op;
4040 reg = ((modrm >> 3) & 7) | rex_r;
4041 /* we must ensure that no segment is added */
4042 s->override = -1;
4043 val = s->addseg;
4044 s->addseg = 0;
4045 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4046 s->addseg = val;
4047 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4048 break;
4049
4050 case 0xa0: /* mov EAX, Ov */
4051 case 0xa1:
4052 case 0xa2: /* mov Ov, EAX */
4053 case 0xa3:
4054 {
4055 target_ulong offset_addr;
4056
4057 if ((b & 1) == 0)
4058 ot = OT_BYTE;
4059 else
4060 ot = dflag + OT_WORD;
4061#ifdef TARGET_X86_64
4062 if (s->aflag == 2) {
4063 offset_addr = ldq_code(s->pc);
4064 s->pc += 8;
4065 if (offset_addr == (int32_t)offset_addr)
4066 gen_op_movq_A0_im(offset_addr);
4067 else
4068 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4069 } else
4070#endif
4071 {
4072 if (s->aflag) {
4073 offset_addr = insn_get(s, OT_LONG);
4074 } else {
4075 offset_addr = insn_get(s, OT_WORD);
4076 }
4077 gen_op_movl_A0_im(offset_addr);
4078 }
4079 gen_add_A0_ds_seg(s);
4080 if ((b & 2) == 0) {
4081 gen_op_ld_T0_A0[ot + s->mem_index]();
4082 gen_op_mov_reg_T0[ot][R_EAX]();
4083 } else {
4084 gen_op_mov_TN_reg[ot][0][R_EAX]();
4085 gen_op_st_T0_A0[ot + s->mem_index]();
4086 }
4087 }
4088 break;
4089 case 0xd7: /* xlat */
4090#ifdef TARGET_X86_64
4091 if (s->aflag == 2) {
4092 gen_op_movq_A0_reg[R_EBX]();
4093 gen_op_addq_A0_AL();
4094 } else
4095#endif
4096 {
4097 gen_op_movl_A0_reg[R_EBX]();
4098 gen_op_addl_A0_AL();
4099 if (s->aflag == 0)
4100 gen_op_andl_A0_ffff();
4101 }
4102 gen_add_A0_ds_seg(s);
4103 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4104 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4105 break;
4106 case 0xb0 ... 0xb7: /* mov R, Ib */
4107 val = insn_get(s, OT_BYTE);
4108 gen_op_movl_T0_im(val);
4109 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4110 break;
4111 case 0xb8 ... 0xbf: /* mov R, Iv */
4112#ifdef TARGET_X86_64
4113 if (dflag == 2) {
4114 uint64_t tmp;
4115 /* 64 bit case */
4116 tmp = ldq_code(s->pc);
4117 s->pc += 8;
4118 reg = (b & 7) | REX_B(s);
4119 gen_movtl_T0_im(tmp);
4120 gen_op_mov_reg_T0[OT_QUAD][reg]();
4121 } else
4122#endif
4123 {
4124 ot = dflag ? OT_LONG : OT_WORD;
4125 val = insn_get(s, ot);
4126 reg = (b & 7) | REX_B(s);
4127 gen_op_movl_T0_im(val);
4128 gen_op_mov_reg_T0[ot][reg]();
4129 }
4130 break;
4131
4132 case 0x91 ... 0x97: /* xchg R, EAX */
4133 ot = dflag + OT_WORD;
4134 reg = (b & 7) | REX_B(s);
4135 rm = R_EAX;
4136 goto do_xchg_reg;
4137 case 0x86:
4138 case 0x87: /* xchg Ev, Gv */
4139 if ((b & 1) == 0)
4140 ot = OT_BYTE;
4141 else
4142 ot = dflag + OT_WORD;
4143 modrm = ldub_code(s->pc++);
4144 reg = ((modrm >> 3) & 7) | rex_r;
4145 mod = (modrm >> 6) & 3;
4146 if (mod == 3) {
4147 rm = (modrm & 7) | REX_B(s);
4148 do_xchg_reg:
4149 gen_op_mov_TN_reg[ot][0][reg]();
4150 gen_op_mov_TN_reg[ot][1][rm]();
4151 gen_op_mov_reg_T0[ot][rm]();
4152 gen_op_mov_reg_T1[ot][reg]();
4153 } else {
4154 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4155 gen_op_mov_TN_reg[ot][0][reg]();
4156 /* for xchg, lock is implicit */
4157 if (!(prefixes & PREFIX_LOCK))
4158 gen_op_lock();
4159 gen_op_ld_T1_A0[ot + s->mem_index]();
4160 gen_op_st_T0_A0[ot + s->mem_index]();
4161 if (!(prefixes & PREFIX_LOCK))
4162 gen_op_unlock();
4163 gen_op_mov_reg_T1[ot][reg]();
4164 }
4165 break;
4166 case 0xc4: /* les Gv */
4167 if (CODE64(s))
4168 goto illegal_op;
4169 op = R_ES;
4170 goto do_lxx;
4171 case 0xc5: /* lds Gv */
4172 if (CODE64(s))
4173 goto illegal_op;
4174 op = R_DS;
4175 goto do_lxx;
4176 case 0x1b2: /* lss Gv */
4177 op = R_SS;
4178 goto do_lxx;
4179 case 0x1b4: /* lfs Gv */
4180 op = R_FS;
4181 goto do_lxx;
4182 case 0x1b5: /* lgs Gv */
4183 op = R_GS;
4184 do_lxx:
4185 ot = dflag ? OT_LONG : OT_WORD;
4186 modrm = ldub_code(s->pc++);
4187 reg = ((modrm >> 3) & 7) | rex_r;
4188 mod = (modrm >> 6) & 3;
4189 if (mod == 3)
4190 goto illegal_op;
4191 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4192 gen_op_ld_T1_A0[ot + s->mem_index]();
4193 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4194 /* load the segment first to handle exceptions properly */
4195 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4196 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4197 /* then put the data */
4198 gen_op_mov_reg_T1[ot][reg]();
4199 if (s->is_jmp) {
4200 gen_jmp_im(s->pc - s->cs_base);
4201 gen_eob(s);
4202 }
4203 break;
4204
4205 /************************/
4206 /* shifts */
4207 case 0xc0:
4208 case 0xc1:
4209 /* shift Ev,Ib */
4210 shift = 2;
4211 grp2:
4212 {
4213 if ((b & 1) == 0)
4214 ot = OT_BYTE;
4215 else
4216 ot = dflag + OT_WORD;
4217
4218 modrm = ldub_code(s->pc++);
4219 mod = (modrm >> 6) & 3;
4220 op = (modrm >> 3) & 7;
4221
4222 if (mod != 3) {
4223 if (shift == 2) {
4224 s->rip_offset = 1;
4225 }
4226 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4227 opreg = OR_TMP0;
4228 } else {
4229 opreg = (modrm & 7) | REX_B(s);
4230 }
4231
4232 /* simpler op */
4233 if (shift == 0) {
4234 gen_shift(s, op, ot, opreg, OR_ECX);
4235 } else {
4236 if (shift == 2) {
4237 shift = ldub_code(s->pc++);
4238 }
4239 gen_shifti(s, op, ot, opreg, shift);
4240 }
4241 }
4242 break;
4243 case 0xd0:
4244 case 0xd1:
4245 /* shift Ev,1 */
4246 shift = 1;
4247 goto grp2;
4248 case 0xd2:
4249 case 0xd3:
4250 /* shift Ev,cl */
4251 shift = 0;
4252 goto grp2;
4253
4254 case 0x1a4: /* shld imm */
4255 op = 0;
4256 shift = 1;
4257 goto do_shiftd;
4258 case 0x1a5: /* shld cl */
4259 op = 0;
4260 shift = 0;
4261 goto do_shiftd;
4262 case 0x1ac: /* shrd imm */
4263 op = 1;
4264 shift = 1;
4265 goto do_shiftd;
4266 case 0x1ad: /* shrd cl */
4267 op = 1;
4268 shift = 0;
4269 do_shiftd:
4270 ot = dflag + OT_WORD;
4271 modrm = ldub_code(s->pc++);
4272 mod = (modrm >> 6) & 3;
4273 rm = (modrm & 7) | REX_B(s);
4274 reg = ((modrm >> 3) & 7) | rex_r;
4275
4276 if (mod != 3) {
4277 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4278 gen_op_ld_T0_A0[ot + s->mem_index]();
4279 } else {
4280 gen_op_mov_TN_reg[ot][0][rm]();
4281 }
4282 gen_op_mov_TN_reg[ot][1][reg]();
4283
4284 if (shift) {
4285 val = ldub_code(s->pc++);
4286 if (ot == OT_QUAD)
4287 val &= 0x3f;
4288 else
4289 val &= 0x1f;
4290 if (val) {
4291 if (mod == 3)
4292 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4293 else
4294 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4295 if (op == 0 && ot != OT_WORD)
4296 s->cc_op = CC_OP_SHLB + ot;
4297 else
4298 s->cc_op = CC_OP_SARB + ot;
4299 }
4300 } else {
4301 if (s->cc_op != CC_OP_DYNAMIC)
4302 gen_op_set_cc_op(s->cc_op);
4303 if (mod == 3)
4304 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4305 else
4306 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4307 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4308 }
4309 if (mod == 3) {
4310 gen_op_mov_reg_T0[ot][rm]();
4311 }
4312 break;
4313
4314 /************************/
4315 /* floats */
4316 case 0xd8 ... 0xdf:
4317 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4318 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4319 /* XXX: what to do if illegal op ? */
4320 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4321 break;
4322 }
4323 modrm = ldub_code(s->pc++);
4324 mod = (modrm >> 6) & 3;
4325 rm = modrm & 7;
4326 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4327 if (mod != 3) {
4328 /* memory op */
4329 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4330 switch(op) {
4331 case 0x00 ... 0x07: /* fxxxs */
4332 case 0x10 ... 0x17: /* fixxxl */
4333 case 0x20 ... 0x27: /* fxxxl */
4334 case 0x30 ... 0x37: /* fixxx */
4335 {
4336 int op1;
4337 op1 = op & 7;
4338
4339 switch(op >> 4) {
4340 case 0:
4341 gen_op_flds_FT0_A0();
4342 break;
4343 case 1:
4344 gen_op_fildl_FT0_A0();
4345 break;
4346 case 2:
4347 gen_op_fldl_FT0_A0();
4348 break;
4349 case 3:
4350 default:
4351 gen_op_fild_FT0_A0();
4352 break;
4353 }
4354
4355 gen_op_fp_arith_ST0_FT0[op1]();
4356 if (op1 == 3) {
4357 /* fcomp needs pop */
4358 gen_op_fpop();
4359 }
4360 }
4361 break;
4362 case 0x08: /* flds */
4363 case 0x0a: /* fsts */
4364 case 0x0b: /* fstps */
4365 case 0x18: /* fildl */
4366 case 0x1a: /* fistl */
4367 case 0x1b: /* fistpl */
4368 case 0x28: /* fldl */
4369 case 0x2a: /* fstl */
4370 case 0x2b: /* fstpl */
4371 case 0x38: /* filds */
4372 case 0x3a: /* fists */
4373 case 0x3b: /* fistps */
4374
4375 switch(op & 7) {
4376 case 0:
4377 switch(op >> 4) {
4378 case 0:
4379 gen_op_flds_ST0_A0();
4380 break;
4381 case 1:
4382 gen_op_fildl_ST0_A0();
4383 break;
4384 case 2:
4385 gen_op_fldl_ST0_A0();
4386 break;
4387 case 3:
4388 default:
4389 gen_op_fild_ST0_A0();
4390 break;
4391 }
4392 break;
4393 default:
4394 switch(op >> 4) {
4395 case 0:
4396 gen_op_fsts_ST0_A0();
4397 break;
4398 case 1:
4399 gen_op_fistl_ST0_A0();
4400 break;
4401 case 2:
4402 gen_op_fstl_ST0_A0();
4403 break;
4404 case 3:
4405 default:
4406 gen_op_fist_ST0_A0();
4407 break;
4408 }
4409 if ((op & 7) == 3)
4410 gen_op_fpop();
4411 break;
4412 }
4413 break;
4414 case 0x0c: /* fldenv mem */
4415 gen_op_fldenv_A0(s->dflag);
4416 break;
4417 case 0x0d: /* fldcw mem */
4418 gen_op_fldcw_A0();
4419 break;
4420 case 0x0e: /* fnstenv mem */
4421 gen_op_fnstenv_A0(s->dflag);
4422 break;
4423 case 0x0f: /* fnstcw mem */
4424 gen_op_fnstcw_A0();
4425 break;
4426 case 0x1d: /* fldt mem */
4427 gen_op_fldt_ST0_A0();
4428 break;
4429 case 0x1f: /* fstpt mem */
4430 gen_op_fstt_ST0_A0();
4431 gen_op_fpop();
4432 break;
4433 case 0x2c: /* frstor mem */
4434 gen_op_frstor_A0(s->dflag);
4435 break;
4436 case 0x2e: /* fnsave mem */
4437 gen_op_fnsave_A0(s->dflag);
4438 break;
4439 case 0x2f: /* fnstsw mem */
4440 gen_op_fnstsw_A0();
4441 break;
4442 case 0x3c: /* fbld */
4443 gen_op_fbld_ST0_A0();
4444 break;
4445 case 0x3e: /* fbstp */
4446 gen_op_fbst_ST0_A0();
4447 gen_op_fpop();
4448 break;
4449 case 0x3d: /* fildll */
4450 gen_op_fildll_ST0_A0();
4451 break;
4452 case 0x3f: /* fistpll */
4453 gen_op_fistll_ST0_A0();
4454 gen_op_fpop();
4455 break;
4456 default:
4457 goto illegal_op;
4458 }
4459 } else {
4460 /* register float ops */
4461 opreg = rm;
4462
4463 switch(op) {
4464 case 0x08: /* fld sti */
4465 gen_op_fpush();
4466 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4467 break;
4468 case 0x09: /* fxchg sti */
4469 case 0x29: /* fxchg4 sti, undocumented op */
4470 case 0x39: /* fxchg7 sti, undocumented op */
4471 gen_op_fxchg_ST0_STN(opreg);
4472 break;
4473 case 0x0a: /* grp d9/2 */
4474 switch(rm) {
4475 case 0: /* fnop */
4476 /* check exceptions (FreeBSD FPU probe) */
4477 if (s->cc_op != CC_OP_DYNAMIC)
4478 gen_op_set_cc_op(s->cc_op);
4479 gen_jmp_im(pc_start - s->cs_base);
4480 gen_op_fwait();
4481 break;
4482 default:
4483 goto illegal_op;
4484 }
4485 break;
4486 case 0x0c: /* grp d9/4 */
4487 switch(rm) {
4488 case 0: /* fchs */
4489 gen_op_fchs_ST0();
4490 break;
4491 case 1: /* fabs */
4492 gen_op_fabs_ST0();
4493 break;
4494 case 4: /* ftst */
4495 gen_op_fldz_FT0();
4496 gen_op_fcom_ST0_FT0();
4497 break;
4498 case 5: /* fxam */
4499 gen_op_fxam_ST0();
4500 break;
4501 default:
4502 goto illegal_op;
4503 }
4504 break;
4505 case 0x0d: /* grp d9/5 */
4506 {
4507 switch(rm) {
4508 case 0:
4509 gen_op_fpush();
4510 gen_op_fld1_ST0();
4511 break;
4512 case 1:
4513 gen_op_fpush();
4514 gen_op_fldl2t_ST0();
4515 break;
4516 case 2:
4517 gen_op_fpush();
4518 gen_op_fldl2e_ST0();
4519 break;
4520 case 3:
4521 gen_op_fpush();
4522 gen_op_fldpi_ST0();
4523 break;
4524 case 4:
4525 gen_op_fpush();
4526 gen_op_fldlg2_ST0();
4527 break;
4528 case 5:
4529 gen_op_fpush();
4530 gen_op_fldln2_ST0();
4531 break;
4532 case 6:
4533 gen_op_fpush();
4534 gen_op_fldz_ST0();
4535 break;
4536 default:
4537 goto illegal_op;
4538 }
4539 }
4540 break;
4541 case 0x0e: /* grp d9/6 */
4542 switch(rm) {
4543 case 0: /* f2xm1 */
4544 gen_op_f2xm1();
4545 break;
4546 case 1: /* fyl2x */
4547 gen_op_fyl2x();
4548 break;
4549 case 2: /* fptan */
4550 gen_op_fptan();
4551 break;
4552 case 3: /* fpatan */
4553 gen_op_fpatan();
4554 break;
4555 case 4: /* fxtract */
4556 gen_op_fxtract();
4557 break;
4558 case 5: /* fprem1 */
4559 gen_op_fprem1();
4560 break;
4561 case 6: /* fdecstp */
4562 gen_op_fdecstp();
4563 break;
4564 default:
4565 case 7: /* fincstp */
4566 gen_op_fincstp();
4567 break;
4568 }
4569 break;
4570 case 0x0f: /* grp d9/7 */
4571 switch(rm) {
4572 case 0: /* fprem */
4573 gen_op_fprem();
4574 break;
4575 case 1: /* fyl2xp1 */
4576 gen_op_fyl2xp1();
4577 break;
4578 case 2: /* fsqrt */
4579 gen_op_fsqrt();
4580 break;
4581 case 3: /* fsincos */
4582 gen_op_fsincos();
4583 break;
4584 case 5: /* fscale */
4585 gen_op_fscale();
4586 break;
4587 case 4: /* frndint */
4588 gen_op_frndint();
4589 break;
4590 case 6: /* fsin */
4591 gen_op_fsin();
4592 break;
4593 default:
4594 case 7: /* fcos */
4595 gen_op_fcos();
4596 break;
4597 }
4598 break;
4599 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4600 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4601 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4602 {
4603 int op1;
4604
4605 op1 = op & 7;
4606 if (op >= 0x20) {
4607 gen_op_fp_arith_STN_ST0[op1](opreg);
4608 if (op >= 0x30)
4609 gen_op_fpop();
4610 } else {
4611 gen_op_fmov_FT0_STN(opreg);
4612 gen_op_fp_arith_ST0_FT0[op1]();
4613 }
4614 }
4615 break;
4616 case 0x02: /* fcom */
4617 case 0x22: /* fcom2, undocumented op */
4618 gen_op_fmov_FT0_STN(opreg);
4619 gen_op_fcom_ST0_FT0();
4620 break;
4621 case 0x03: /* fcomp */
4622 case 0x23: /* fcomp3, undocumented op */
4623 case 0x32: /* fcomp5, undocumented op */
4624 gen_op_fmov_FT0_STN(opreg);
4625 gen_op_fcom_ST0_FT0();
4626 gen_op_fpop();
4627 break;
4628 case 0x15: /* da/5 */
4629 switch(rm) {
4630 case 1: /* fucompp */
4631 gen_op_fmov_FT0_STN(1);
4632 gen_op_fucom_ST0_FT0();
4633 gen_op_fpop();
4634 gen_op_fpop();
4635 break;
4636 default:
4637 goto illegal_op;
4638 }
4639 break;
4640 case 0x1c:
4641 switch(rm) {
4642 case 0: /* feni (287 only, just do nop here) */
4643 break;
4644 case 1: /* fdisi (287 only, just do nop here) */
4645 break;
4646 case 2: /* fclex */
4647 gen_op_fclex();
4648 break;
4649 case 3: /* fninit */
4650 gen_op_fninit();
4651 break;
4652 case 4: /* fsetpm (287 only, just do nop here) */
4653 break;
4654 default:
4655 goto illegal_op;
4656 }
4657 break;
4658 case 0x1d: /* fucomi */
4659 if (s->cc_op != CC_OP_DYNAMIC)
4660 gen_op_set_cc_op(s->cc_op);
4661 gen_op_fmov_FT0_STN(opreg);
4662 gen_op_fucomi_ST0_FT0();
4663 s->cc_op = CC_OP_EFLAGS;
4664 break;
4665 case 0x1e: /* fcomi */
4666 if (s->cc_op != CC_OP_DYNAMIC)
4667 gen_op_set_cc_op(s->cc_op);
4668 gen_op_fmov_FT0_STN(opreg);
4669 gen_op_fcomi_ST0_FT0();
4670 s->cc_op = CC_OP_EFLAGS;
4671 break;
4672 case 0x28: /* ffree sti */
4673 gen_op_ffree_STN(opreg);
4674 break;
4675 case 0x2a: /* fst sti */
4676 gen_op_fmov_STN_ST0(opreg);
4677 break;
4678 case 0x2b: /* fstp sti */
4679 case 0x0b: /* fstp1 sti, undocumented op */
4680 case 0x3a: /* fstp8 sti, undocumented op */
4681 case 0x3b: /* fstp9 sti, undocumented op */
4682 gen_op_fmov_STN_ST0(opreg);
4683 gen_op_fpop();
4684 break;
4685 case 0x2c: /* fucom st(i) */
4686 gen_op_fmov_FT0_STN(opreg);
4687 gen_op_fucom_ST0_FT0();
4688 break;
4689 case 0x2d: /* fucomp st(i) */
4690 gen_op_fmov_FT0_STN(opreg);
4691 gen_op_fucom_ST0_FT0();
4692 gen_op_fpop();
4693 break;
4694 case 0x33: /* de/3 */
4695 switch(rm) {
4696 case 1: /* fcompp */
4697 gen_op_fmov_FT0_STN(1);
4698 gen_op_fcom_ST0_FT0();
4699 gen_op_fpop();
4700 gen_op_fpop();
4701 break;
4702 default:
4703 goto illegal_op;
4704 }
4705 break;
4706 case 0x38: /* ffreep sti, undocumented op */
4707 gen_op_ffree_STN(opreg);
4708 gen_op_fpop();
4709 break;
4710 case 0x3c: /* df/4 */
4711 switch(rm) {
4712 case 0:
4713 gen_op_fnstsw_EAX();
4714 break;
4715 default:
4716 goto illegal_op;
4717 }
4718 break;
4719 case 0x3d: /* fucomip */
4720 if (s->cc_op != CC_OP_DYNAMIC)
4721 gen_op_set_cc_op(s->cc_op);
4722 gen_op_fmov_FT0_STN(opreg);
4723 gen_op_fucomi_ST0_FT0();
4724 gen_op_fpop();
4725 s->cc_op = CC_OP_EFLAGS;
4726 break;
4727 case 0x3e: /* fcomip */
4728 if (s->cc_op != CC_OP_DYNAMIC)
4729 gen_op_set_cc_op(s->cc_op);
4730 gen_op_fmov_FT0_STN(opreg);
4731 gen_op_fcomi_ST0_FT0();
4732 gen_op_fpop();
4733 s->cc_op = CC_OP_EFLAGS;
4734 break;
4735 case 0x10 ... 0x13: /* fcmovxx */
4736 case 0x18 ... 0x1b:
4737 {
4738 int op1;
4739 const static uint8_t fcmov_cc[8] = {
4740 (JCC_B << 1),
4741 (JCC_Z << 1),
4742 (JCC_BE << 1),
4743 (JCC_P << 1),
4744 };
4745 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4746 gen_setcc(s, op1);
4747 gen_op_fcmov_ST0_STN_T0(opreg);
4748 }
4749 break;
4750 default:
4751 goto illegal_op;
4752 }
4753 }
4754#ifdef USE_CODE_COPY
4755 s->tb->cflags |= CF_TB_FP_USED;
4756#endif
4757 break;
4758 /************************/
4759 /* string ops */
4760
4761 case 0xa4: /* movsS */
4762 case 0xa5:
4763 if ((b & 1) == 0)
4764 ot = OT_BYTE;
4765 else
4766 ot = dflag + OT_WORD;
4767
4768 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4769 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4770 } else {
4771 gen_movs(s, ot);
4772 }
4773 break;
4774
4775 case 0xaa: /* stosS */
4776 case 0xab:
4777 if ((b & 1) == 0)
4778 ot = OT_BYTE;
4779 else
4780 ot = dflag + OT_WORD;
4781
4782 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4783 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4784 } else {
4785 gen_stos(s, ot);
4786 }
4787 break;
4788 case 0xac: /* lodsS */
4789 case 0xad:
4790 if ((b & 1) == 0)
4791 ot = OT_BYTE;
4792 else
4793 ot = dflag + OT_WORD;
4794 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4795 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4796 } else {
4797 gen_lods(s, ot);
4798 }
4799 break;
4800 case 0xae: /* scasS */
4801 case 0xaf:
4802 if ((b & 1) == 0)
4803 ot = OT_BYTE;
4804 else
4805 ot = dflag + OT_WORD;
4806 if (prefixes & PREFIX_REPNZ) {
4807 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4808 } else if (prefixes & PREFIX_REPZ) {
4809 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4810 } else {
4811 gen_scas(s, ot);
4812 s->cc_op = CC_OP_SUBB + ot;
4813 }
4814 break;
4815
4816 case 0xa6: /* cmpsS */
4817 case 0xa7:
4818 if ((b & 1) == 0)
4819 ot = OT_BYTE;
4820 else
4821 ot = dflag + OT_WORD;
4822 if (prefixes & PREFIX_REPNZ) {
4823 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4824 } else if (prefixes & PREFIX_REPZ) {
4825 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4826 } else {
4827 gen_cmps(s, ot);
4828 s->cc_op = CC_OP_SUBB + ot;
4829 }
4830 break;
4831 case 0x6c: /* insS */
4832 case 0x6d:
4833 if ((b & 1) == 0)
4834 ot = OT_BYTE;
4835 else
4836 ot = dflag ? OT_LONG : OT_WORD;
4837 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4838 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4839 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4840 } else {
4841 gen_ins(s, ot);
4842 }
4843 break;
4844 case 0x6e: /* outsS */
4845 case 0x6f:
4846 if ((b & 1) == 0)
4847 ot = OT_BYTE;
4848 else
4849 ot = dflag ? OT_LONG : OT_WORD;
4850 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4851 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4852 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4853 } else {
4854 gen_outs(s, ot);
4855 }
4856 break;
4857
4858 /************************/
4859 /* port I/O */
4860 case 0xe4:
4861 case 0xe5:
4862 if ((b & 1) == 0)
4863 ot = OT_BYTE;
4864 else
4865 ot = dflag ? OT_LONG : OT_WORD;
4866 val = ldub_code(s->pc++);
4867 gen_op_movl_T0_im(val);
4868 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4869 gen_op_in[ot]();
4870 gen_op_mov_reg_T1[ot][R_EAX]();
4871 break;
4872 case 0xe6:
4873 case 0xe7:
4874 if ((b & 1) == 0)
4875 ot = OT_BYTE;
4876 else
4877 ot = dflag ? OT_LONG : OT_WORD;
4878 val = ldub_code(s->pc++);
4879 gen_op_movl_T0_im(val);
4880 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4881#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
4882 if (val == 0x80)
4883 break;
4884#endif /* VBOX */
4885 gen_op_mov_TN_reg[ot][1][R_EAX]();
4886 gen_op_out[ot]();
4887 break;
4888 case 0xec:
4889 case 0xed:
4890 if ((b & 1) == 0)
4891 ot = OT_BYTE;
4892 else
4893 ot = dflag ? OT_LONG : OT_WORD;
4894 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4895 gen_op_andl_T0_ffff();
4896 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4897 gen_op_in[ot]();
4898 gen_op_mov_reg_T1[ot][R_EAX]();
4899 break;
4900 case 0xee:
4901 case 0xef:
4902 if ((b & 1) == 0)
4903 ot = OT_BYTE;
4904 else
4905 ot = dflag ? OT_LONG : OT_WORD;
4906 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4907 gen_op_andl_T0_ffff();
4908 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4909 gen_op_mov_TN_reg[ot][1][R_EAX]();
4910 gen_op_out[ot]();
4911 break;
4912
4913 /************************/
4914 /* control */
4915 case 0xc2: /* ret im */
4916 val = ldsw_code(s->pc);
4917 s->pc += 2;
4918 gen_pop_T0(s);
4919 if (CODE64(s) && s->dflag)
4920 s->dflag = 2;
4921 gen_stack_update(s, val + (2 << s->dflag));
4922 if (s->dflag == 0)
4923 gen_op_andl_T0_ffff();
4924 gen_op_jmp_T0();
4925 gen_eob(s);
4926 break;
4927 case 0xc3: /* ret */
4928 gen_pop_T0(s);
4929 gen_pop_update(s);
4930 if (s->dflag == 0)
4931 gen_op_andl_T0_ffff();
4932 gen_op_jmp_T0();
4933 gen_eob(s);
4934 break;
4935 case 0xca: /* lret im */
4936 val = ldsw_code(s->pc);
4937 s->pc += 2;
4938 do_lret:
4939 if (s->pe && !s->vm86) {
4940 if (s->cc_op != CC_OP_DYNAMIC)
4941 gen_op_set_cc_op(s->cc_op);
4942 gen_jmp_im(pc_start - s->cs_base);
4943 gen_op_lret_protected(s->dflag, val);
4944 } else {
4945 gen_stack_A0(s);
4946 /* pop offset */
4947 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4948 if (s->dflag == 0)
4949 gen_op_andl_T0_ffff();
4950 /* NOTE: keeping EIP updated is not a problem in case of
4951 exception */
4952 gen_op_jmp_T0();
4953 /* pop selector */
4954 gen_op_addl_A0_im(2 << s->dflag);
4955 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4956 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4957 /* add stack offset */
4958 gen_stack_update(s, val + (4 << s->dflag));
4959 }
4960 gen_eob(s);
4961 break;
4962 case 0xcb: /* lret */
4963 val = 0;
4964 goto do_lret;
4965 case 0xcf: /* iret */
4966 if (!s->pe) {
4967 /* real mode */
4968 gen_op_iret_real(s->dflag);
4969 s->cc_op = CC_OP_EFLAGS;
4970 } else if (s->vm86) {
4971 if (s->iopl != 3) {
4972 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4973 } else {
4974 gen_op_iret_real(s->dflag);
4975 s->cc_op = CC_OP_EFLAGS;
4976 }
4977 } else {
4978 if (s->cc_op != CC_OP_DYNAMIC)
4979 gen_op_set_cc_op(s->cc_op);
4980 gen_jmp_im(pc_start - s->cs_base);
4981 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4982 s->cc_op = CC_OP_EFLAGS;
4983 }
4984 gen_eob(s);
4985 break;
4986 case 0xe8: /* call im */
4987 {
4988 if (dflag)
4989 tval = (int32_t)insn_get(s, OT_LONG);
4990 else
4991 tval = (int16_t)insn_get(s, OT_WORD);
4992 next_eip = s->pc - s->cs_base;
4993 tval += next_eip;
4994 if (s->dflag == 0)
4995 tval &= 0xffff;
4996 gen_movtl_T0_im(next_eip);
4997 gen_push_T0(s);
4998 gen_jmp(s, tval);
4999 }
5000 break;
5001 case 0x9a: /* lcall im */
5002 {
5003 unsigned int selector, offset;
5004
5005 if (CODE64(s))
5006 goto illegal_op;
5007 ot = dflag ? OT_LONG : OT_WORD;
5008 offset = insn_get(s, ot);
5009 selector = insn_get(s, OT_WORD);
5010
5011 gen_op_movl_T0_im(selector);
5012 gen_op_movl_T1_imu(offset);
5013 }
5014 goto do_lcall;
5015 case 0xe9: /* jmp im */
5016 if (dflag)
5017 tval = (int32_t)insn_get(s, OT_LONG);
5018 else
5019 tval = (int16_t)insn_get(s, OT_WORD);
5020 tval += s->pc - s->cs_base;
5021 if (s->dflag == 0)
5022 tval &= 0xffff;
5023 gen_jmp(s, tval);
5024 break;
5025 case 0xea: /* ljmp im */
5026 {
5027 unsigned int selector, offset;
5028
5029 if (CODE64(s))
5030 goto illegal_op;
5031 ot = dflag ? OT_LONG : OT_WORD;
5032 offset = insn_get(s, ot);
5033 selector = insn_get(s, OT_WORD);
5034
5035 gen_op_movl_T0_im(selector);
5036 gen_op_movl_T1_imu(offset);
5037 }
5038 goto do_ljmp;
5039 case 0xeb: /* jmp Jb */
5040 tval = (int8_t)insn_get(s, OT_BYTE);
5041 tval += s->pc - s->cs_base;
5042 if (s->dflag == 0)
5043 tval &= 0xffff;
5044 gen_jmp(s, tval);
5045 break;
5046 case 0x70 ... 0x7f: /* jcc Jb */
5047 tval = (int8_t)insn_get(s, OT_BYTE);
5048 goto do_jcc;
5049 case 0x180 ... 0x18f: /* jcc Jv */
5050 if (dflag) {
5051 tval = (int32_t)insn_get(s, OT_LONG);
5052 } else {
5053 tval = (int16_t)insn_get(s, OT_WORD);
5054 }
5055 do_jcc:
5056 next_eip = s->pc - s->cs_base;
5057 tval += next_eip;
5058 if (s->dflag == 0)
5059 tval &= 0xffff;
5060 gen_jcc(s, b, tval, next_eip);
5061 break;
5062
5063 case 0x190 ... 0x19f: /* setcc Gv */
5064 modrm = ldub_code(s->pc++);
5065 gen_setcc(s, b);
5066 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5067 break;
5068 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5069 ot = dflag + OT_WORD;
5070 modrm = ldub_code(s->pc++);
5071 reg = ((modrm >> 3) & 7) | rex_r;
5072 mod = (modrm >> 6) & 3;
5073 gen_setcc(s, b);
5074 if (mod != 3) {
5075 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5076 gen_op_ld_T1_A0[ot + s->mem_index]();
5077 } else {
5078 rm = (modrm & 7) | REX_B(s);
5079 gen_op_mov_TN_reg[ot][1][rm]();
5080 }
5081 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5082 break;
5083
5084 /************************/
5085 /* flags */
5086 case 0x9c: /* pushf */
5087 if (s->vm86 && s->iopl != 3) {
5088 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5089 } else {
5090 if (s->cc_op != CC_OP_DYNAMIC)
5091 gen_op_set_cc_op(s->cc_op);
5092 gen_op_movl_T0_eflags();
5093 gen_push_T0(s);
5094 }
5095 break;
5096 case 0x9d: /* popf */
5097 if (s->vm86 && s->iopl != 3) {
5098 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5099 } else {
5100 gen_pop_T0(s);
5101 if (s->cpl == 0) {
5102 if (s->dflag) {
5103 gen_op_movl_eflags_T0_cpl0();
5104 } else {
5105 gen_op_movw_eflags_T0_cpl0();
5106 }
5107 } else {
5108 if (s->cpl <= s->iopl) {
5109 if (s->dflag) {
5110 gen_op_movl_eflags_T0_io();
5111 } else {
5112 gen_op_movw_eflags_T0_io();
5113 }
5114 } else {
5115 if (s->dflag) {
5116 gen_op_movl_eflags_T0();
5117 } else {
5118 gen_op_movw_eflags_T0();
5119 }
5120 }
5121 }
5122 gen_pop_update(s);
5123 s->cc_op = CC_OP_EFLAGS;
5124 /* abort translation because TF flag may change */
5125 gen_jmp_im(s->pc - s->cs_base);
5126 gen_eob(s);
5127 }
5128 break;
5129 case 0x9e: /* sahf */
5130 if (CODE64(s))
5131 goto illegal_op;
5132 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5133 if (s->cc_op != CC_OP_DYNAMIC)
5134 gen_op_set_cc_op(s->cc_op);
5135 gen_op_movb_eflags_T0();
5136 s->cc_op = CC_OP_EFLAGS;
5137 break;
5138 case 0x9f: /* lahf */
5139 if (CODE64(s))
5140 goto illegal_op;
5141 if (s->cc_op != CC_OP_DYNAMIC)
5142 gen_op_set_cc_op(s->cc_op);
5143 gen_op_movl_T0_eflags();
5144 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5145 break;
5146 case 0xf5: /* cmc */
5147 if (s->cc_op != CC_OP_DYNAMIC)
5148 gen_op_set_cc_op(s->cc_op);
5149 gen_op_cmc();
5150 s->cc_op = CC_OP_EFLAGS;
5151 break;
5152 case 0xf8: /* clc */
5153 if (s->cc_op != CC_OP_DYNAMIC)
5154 gen_op_set_cc_op(s->cc_op);
5155 gen_op_clc();
5156 s->cc_op = CC_OP_EFLAGS;
5157 break;
5158 case 0xf9: /* stc */
5159 if (s->cc_op != CC_OP_DYNAMIC)
5160 gen_op_set_cc_op(s->cc_op);
5161 gen_op_stc();
5162 s->cc_op = CC_OP_EFLAGS;
5163 break;
5164 case 0xfc: /* cld */
5165 gen_op_cld();
5166 break;
5167 case 0xfd: /* std */
5168 gen_op_std();
5169 break;
5170
5171 /************************/
5172 /* bit operations */
5173 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5174 ot = dflag + OT_WORD;
5175 modrm = ldub_code(s->pc++);
5176 op = (modrm >> 3) & 7;
5177 mod = (modrm >> 6) & 3;
5178 rm = (modrm & 7) | REX_B(s);
5179 if (mod != 3) {
5180 s->rip_offset = 1;
5181 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5182 gen_op_ld_T0_A0[ot + s->mem_index]();
5183 } else {
5184 gen_op_mov_TN_reg[ot][0][rm]();
5185 }
5186 /* load shift */
5187 val = ldub_code(s->pc++);
5188 gen_op_movl_T1_im(val);
5189 if (op < 4)
5190 goto illegal_op;
5191 op -= 4;
5192 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5193 s->cc_op = CC_OP_SARB + ot;
5194 if (op != 0) {
5195 if (mod != 3)
5196 gen_op_st_T0_A0[ot + s->mem_index]();
5197 else
5198 gen_op_mov_reg_T0[ot][rm]();
5199 gen_op_update_bt_cc();
5200 }
5201 break;
5202 case 0x1a3: /* bt Gv, Ev */
5203 op = 0;
5204 goto do_btx;
5205 case 0x1ab: /* bts */
5206 op = 1;
5207 goto do_btx;
5208 case 0x1b3: /* btr */
5209 op = 2;
5210 goto do_btx;
5211 case 0x1bb: /* btc */
5212 op = 3;
5213 do_btx:
5214 ot = dflag + OT_WORD;
5215 modrm = ldub_code(s->pc++);
5216 reg = ((modrm >> 3) & 7) | rex_r;
5217 mod = (modrm >> 6) & 3;
5218 rm = (modrm & 7) | REX_B(s);
5219 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5220 if (mod != 3) {
5221 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5222 /* specific case: we need to add a displacement */
5223 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5224 gen_op_ld_T0_A0[ot + s->mem_index]();
5225 } else {
5226 gen_op_mov_TN_reg[ot][0][rm]();
5227 }
5228 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5229 s->cc_op = CC_OP_SARB + ot;
5230 if (op != 0) {
5231 if (mod != 3)
5232 gen_op_st_T0_A0[ot + s->mem_index]();
5233 else
5234 gen_op_mov_reg_T0[ot][rm]();
5235 gen_op_update_bt_cc();
5236 }
5237 break;
5238 case 0x1bc: /* bsf */
5239 case 0x1bd: /* bsr */
5240 ot = dflag + OT_WORD;
5241 modrm = ldub_code(s->pc++);
5242 reg = ((modrm >> 3) & 7) | rex_r;
5243 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5244 /* NOTE: in order to handle the 0 case, we must load the
5245 result. It could be optimized with a generated jump */
5246 gen_op_mov_TN_reg[ot][1][reg]();
5247 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5248 gen_op_mov_reg_T1[ot][reg]();
5249 s->cc_op = CC_OP_LOGICB + ot;
5250 break;
5251 /************************/
5252 /* bcd */
5253 case 0x27: /* daa */
5254 if (CODE64(s))
5255 goto illegal_op;
5256 if (s->cc_op != CC_OP_DYNAMIC)
5257 gen_op_set_cc_op(s->cc_op);
5258 gen_op_daa();
5259 s->cc_op = CC_OP_EFLAGS;
5260 break;
5261 case 0x2f: /* das */
5262 if (CODE64(s))
5263 goto illegal_op;
5264 if (s->cc_op != CC_OP_DYNAMIC)
5265 gen_op_set_cc_op(s->cc_op);
5266 gen_op_das();
5267 s->cc_op = CC_OP_EFLAGS;
5268 break;
5269 case 0x37: /* aaa */
5270 if (CODE64(s))
5271 goto illegal_op;
5272 if (s->cc_op != CC_OP_DYNAMIC)
5273 gen_op_set_cc_op(s->cc_op);
5274 gen_op_aaa();
5275 s->cc_op = CC_OP_EFLAGS;
5276 break;
5277 case 0x3f: /* aas */
5278 if (CODE64(s))
5279 goto illegal_op;
5280 if (s->cc_op != CC_OP_DYNAMIC)
5281 gen_op_set_cc_op(s->cc_op);
5282 gen_op_aas();
5283 s->cc_op = CC_OP_EFLAGS;
5284 break;
5285 case 0xd4: /* aam */
5286 if (CODE64(s))
5287 goto illegal_op;
5288 val = ldub_code(s->pc++);
5289 gen_op_aam(val);
5290 s->cc_op = CC_OP_LOGICB;
5291 break;
5292 case 0xd5: /* aad */
5293 if (CODE64(s))
5294 goto illegal_op;
5295 val = ldub_code(s->pc++);
5296 gen_op_aad(val);
5297 s->cc_op = CC_OP_LOGICB;
5298 break;
5299 /************************/
5300 /* misc */
5301 case 0x90: /* nop */
5302 /* XXX: xchg + rex handling */
5303 /* XXX: correct lock test for all insn */
5304 if (prefixes & PREFIX_LOCK)
5305 goto illegal_op;
5306 break;
5307 case 0x9b: /* fwait */
5308 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5309 (HF_MP_MASK | HF_TS_MASK)) {
5310 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5311 } else {
5312 if (s->cc_op != CC_OP_DYNAMIC)
5313 gen_op_set_cc_op(s->cc_op);
5314 gen_jmp_im(pc_start - s->cs_base);
5315 gen_op_fwait();
5316 }
5317 break;
5318 case 0xcc: /* int3 */
5319 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5320 break;
5321 case 0xcd: /* int N */
5322 val = ldub_code(s->pc++);
5323 if (s->vm86 && s->iopl != 3) {
5324 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5325 } else {
5326 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5327 }
5328 break;
5329 case 0xce: /* into */
5330 if (CODE64(s))
5331 goto illegal_op;
5332 if (s->cc_op != CC_OP_DYNAMIC)
5333 gen_op_set_cc_op(s->cc_op);
5334 gen_jmp_im(pc_start - s->cs_base);
5335 gen_op_into(s->pc - pc_start);
5336 break;
5337 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5338 gen_debug(s, pc_start - s->cs_base);
5339 break;
5340 case 0xfa: /* cli */
5341 if (!s->vm86) {
5342 if (s->cpl <= s->iopl) {
5343 gen_op_cli();
5344 } else {
5345 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5346 }
5347 } else {
5348 if (s->iopl == 3) {
5349 gen_op_cli();
5350 } else {
5351 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5352 }
5353 }
5354 break;
5355 case 0xfb: /* sti */
5356 if (!s->vm86) {
5357 if (s->cpl <= s->iopl) {
5358 gen_sti:
5359 gen_op_sti();
5360 /* interruptions are enabled only the first insn after sti */
5361 /* If several instructions disable interrupts, only the
5362 _first_ does it */
5363 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5364 gen_op_set_inhibit_irq();
5365 /* give a chance to handle pending irqs */
5366 gen_jmp_im(s->pc - s->cs_base);
5367 gen_eob(s);
5368 } else {
5369 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5370 }
5371 } else {
5372 if (s->iopl == 3) {
5373 goto gen_sti;
5374 } else {
5375 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5376 }
5377 }
5378 break;
5379 case 0x62: /* bound */
5380 if (CODE64(s))
5381 goto illegal_op;
5382 ot = dflag ? OT_LONG : OT_WORD;
5383 modrm = ldub_code(s->pc++);
5384 reg = (modrm >> 3) & 7;
5385 mod = (modrm >> 6) & 3;
5386 if (mod == 3)
5387 goto illegal_op;
5388 gen_op_mov_TN_reg[ot][0][reg]();
5389 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5390 gen_jmp_im(pc_start - s->cs_base);
5391 if (ot == OT_WORD)
5392 gen_op_boundw();
5393 else
5394 gen_op_boundl();
5395 break;
5396 case 0x1c8 ... 0x1cf: /* bswap reg */
5397 reg = (b & 7) | REX_B(s);
5398#ifdef TARGET_X86_64
5399 if (dflag == 2) {
5400 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5401 gen_op_bswapq_T0();
5402 gen_op_mov_reg_T0[OT_QUAD][reg]();
5403 } else
5404#endif
5405 {
5406 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5407 gen_op_bswapl_T0();
5408 gen_op_mov_reg_T0[OT_LONG][reg]();
5409 }
5410 break;
5411 case 0xd6: /* salc */
5412 if (CODE64(s))
5413 goto illegal_op;
5414 if (s->cc_op != CC_OP_DYNAMIC)
5415 gen_op_set_cc_op(s->cc_op);
5416 gen_op_salc();
5417 break;
5418 case 0xe0: /* loopnz */
5419 case 0xe1: /* loopz */
5420 if (s->cc_op != CC_OP_DYNAMIC)
5421 gen_op_set_cc_op(s->cc_op);
5422 /* FALL THRU */
5423 case 0xe2: /* loop */
5424 case 0xe3: /* jecxz */
5425 {
5426 int l1, l2;
5427
5428 tval = (int8_t)insn_get(s, OT_BYTE);
5429 next_eip = s->pc - s->cs_base;
5430 tval += next_eip;
5431 if (s->dflag == 0)
5432 tval &= 0xffff;
5433
5434 l1 = gen_new_label();
5435 l2 = gen_new_label();
5436 b &= 3;
5437 if (b == 3) {
5438 gen_op_jz_ecx[s->aflag](l1);
5439 } else {
5440 gen_op_dec_ECX[s->aflag]();
5441 if (b <= 1)
5442 gen_op_mov_T0_cc();
5443 gen_op_loop[s->aflag][b](l1);
5444 }
5445
5446 gen_jmp_im(next_eip);
5447 gen_op_jmp_label(l2);
5448 gen_set_label(l1);
5449 gen_jmp_im(tval);
5450 gen_set_label(l2);
5451 gen_eob(s);
5452 }
5453 break;
5454 case 0x130: /* wrmsr */
5455 case 0x132: /* rdmsr */
5456 if (s->cpl != 0) {
5457 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5458 } else {
5459 if (b & 2)
5460 gen_op_rdmsr();
5461 else
5462 gen_op_wrmsr();
5463 }
5464 break;
5465 case 0x131: /* rdtsc */
5466 gen_jmp_im(pc_start - s->cs_base);
5467 gen_op_rdtsc();
5468 break;
5469 case 0x134: /* sysenter */
5470 if (CODE64(s))
5471 goto illegal_op;
5472 if (!s->pe) {
5473 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5474 } else {
5475 if (s->cc_op != CC_OP_DYNAMIC) {
5476 gen_op_set_cc_op(s->cc_op);
5477 s->cc_op = CC_OP_DYNAMIC;
5478 }
5479 gen_jmp_im(pc_start - s->cs_base);
5480 gen_op_sysenter();
5481 gen_eob(s);
5482 }
5483 break;
5484 case 0x135: /* sysexit */
5485 if (CODE64(s))
5486 goto illegal_op;
5487 if (!s->pe) {
5488 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5489 } else {
5490 if (s->cc_op != CC_OP_DYNAMIC) {
5491 gen_op_set_cc_op(s->cc_op);
5492 s->cc_op = CC_OP_DYNAMIC;
5493 }
5494 gen_jmp_im(pc_start - s->cs_base);
5495 gen_op_sysexit();
5496 gen_eob(s);
5497 }
5498 break;
5499#ifdef TARGET_X86_64
5500 case 0x105: /* syscall */
5501 /* XXX: is it usable in real mode ? */
5502 if (s->cc_op != CC_OP_DYNAMIC) {
5503 gen_op_set_cc_op(s->cc_op);
5504 s->cc_op = CC_OP_DYNAMIC;
5505 }
5506 gen_jmp_im(pc_start - s->cs_base);
5507 gen_op_syscall(s->pc - pc_start);
5508 gen_eob(s);
5509 break;
5510 case 0x107: /* sysret */
5511 if (!s->pe) {
5512 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5513 } else {
5514 if (s->cc_op != CC_OP_DYNAMIC) {
5515 gen_op_set_cc_op(s->cc_op);
5516 s->cc_op = CC_OP_DYNAMIC;
5517 }
5518 gen_jmp_im(pc_start - s->cs_base);
5519 gen_op_sysret(s->dflag);
5520 /* condition codes are modified only in long mode */
5521 if (s->lma)
5522 s->cc_op = CC_OP_EFLAGS;
5523 gen_eob(s);
5524 }
5525 break;
5526#endif
5527 case 0x1a2: /* cpuid */
5528 gen_op_cpuid();
5529 break;
5530 case 0xf4: /* hlt */
5531 if (s->cpl != 0) {
5532 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5533 } else {
5534 if (s->cc_op != CC_OP_DYNAMIC)
5535 gen_op_set_cc_op(s->cc_op);
5536 gen_jmp_im(s->pc - s->cs_base);
5537 gen_op_hlt();
5538 s->is_jmp = 3;
5539 }
5540 break;
5541 case 0x100:
5542 modrm = ldub_code(s->pc++);
5543 mod = (modrm >> 6) & 3;
5544 op = (modrm >> 3) & 7;
5545 switch(op) {
5546 case 0: /* sldt */
5547 if (!s->pe || s->vm86)
5548 goto illegal_op;
5549 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5550 ot = OT_WORD;
5551 if (mod == 3)
5552 ot += s->dflag;
5553 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5554 break;
5555 case 2: /* lldt */
5556 if (!s->pe || s->vm86)
5557 goto illegal_op;
5558 if (s->cpl != 0) {
5559 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5560 } else {
5561 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5562 gen_jmp_im(pc_start - s->cs_base);
5563 gen_op_lldt_T0();
5564 }
5565 break;
5566 case 1: /* str */
5567 if (!s->pe || s->vm86)
5568 goto illegal_op;
5569 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5570 ot = OT_WORD;
5571 if (mod == 3)
5572 ot += s->dflag;
5573 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5574 break;
5575 case 3: /* ltr */
5576 if (!s->pe || s->vm86)
5577 goto illegal_op;
5578 if (s->cpl != 0) {
5579 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5580 } else {
5581 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5582 gen_jmp_im(pc_start - s->cs_base);
5583 gen_op_ltr_T0();
5584 }
5585 break;
5586 case 4: /* verr */
5587 case 5: /* verw */
5588 if (!s->pe || s->vm86)
5589 goto illegal_op;
5590 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5591 if (s->cc_op != CC_OP_DYNAMIC)
5592 gen_op_set_cc_op(s->cc_op);
5593 if (op == 4)
5594 gen_op_verr();
5595 else
5596 gen_op_verw();
5597 s->cc_op = CC_OP_EFLAGS;
5598 break;
5599 default:
5600 goto illegal_op;
5601 }
5602 break;
5603 case 0x101:
5604 modrm = ldub_code(s->pc++);
5605 mod = (modrm >> 6) & 3;
5606 op = (modrm >> 3) & 7;
5607 rm = modrm & 7;
5608 switch(op) {
5609 case 0: /* sgdt */
5610 if (mod == 3)
5611 goto illegal_op;
5612 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5613 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5614 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5615 gen_add_A0_im(s, 2);
5616 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5617 if (!s->dflag)
5618 gen_op_andl_T0_im(0xffffff);
5619 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5620 break;
5621 case 1:
5622 if (mod == 3) {
5623 switch (rm) {
5624 case 0: /* monitor */
5625 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5626 s->cpl != 0)
5627 goto illegal_op;
5628 gen_jmp_im(pc_start - s->cs_base);
5629#ifdef TARGET_X86_64
5630 if (s->aflag == 2) {
5631 gen_op_movq_A0_reg[R_EBX]();
5632 gen_op_addq_A0_AL();
5633 } else
5634#endif
5635 {
5636 gen_op_movl_A0_reg[R_EBX]();
5637 gen_op_addl_A0_AL();
5638 if (s->aflag == 0)
5639 gen_op_andl_A0_ffff();
5640 }
5641 gen_add_A0_ds_seg(s);
5642 gen_op_monitor();
5643 break;
5644 case 1: /* mwait */
5645 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5646 s->cpl != 0)
5647 goto illegal_op;
5648 if (s->cc_op != CC_OP_DYNAMIC) {
5649 gen_op_set_cc_op(s->cc_op);
5650 s->cc_op = CC_OP_DYNAMIC;
5651 }
5652 gen_jmp_im(s->pc - s->cs_base);
5653 gen_op_mwait();
5654 gen_eob(s);
5655 break;
5656 default:
5657 goto illegal_op;
5658 }
5659 } else { /* sidt */
5660 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5661 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5662 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5663 gen_add_A0_im(s, 2);
5664 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5665 if (!s->dflag)
5666 gen_op_andl_T0_im(0xffffff);
5667 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5668 }
5669 break;
5670 case 2: /* lgdt */
5671 case 3: /* lidt */
5672 if (mod == 3)
5673 goto illegal_op;
5674 if (s->cpl != 0) {
5675 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5676 } else {
5677 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5678 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5679 gen_add_A0_im(s, 2);
5680 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5681 if (!s->dflag)
5682 gen_op_andl_T0_im(0xffffff);
5683 if (op == 2) {
5684 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5685 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5686 } else {
5687 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5688 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5689 }
5690 }
5691 break;
5692 case 4: /* smsw */
5693 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5694 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5695 break;
5696 case 6: /* lmsw */
5697 if (s->cpl != 0) {
5698 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5699 } else {
5700 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5701 gen_op_lmsw_T0();
5702 gen_jmp_im(s->pc - s->cs_base);
5703 gen_eob(s);
5704 }
5705 break;
5706 case 7: /* invlpg */
5707 if (s->cpl != 0) {
5708 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5709 } else {
5710 if (mod == 3) {
5711#ifdef TARGET_X86_64
5712 if (CODE64(s) && (modrm & 7) == 0) {
5713 /* swapgs */
5714 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5715 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5716 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5717 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5718 } else
5719#endif
5720 {
5721 goto illegal_op;
5722 }
5723 } else {
5724 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5725 gen_op_invlpg_A0();
5726 gen_jmp_im(s->pc - s->cs_base);
5727 gen_eob(s);
5728 }
5729 }
5730 break;
5731 default:
5732 goto illegal_op;
5733 }
5734 break;
5735 case 0x108: /* invd */
5736 case 0x109: /* wbinvd */
5737 if (s->cpl != 0) {
5738 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5739 } else {
5740 /* nothing to do */
5741 }
5742 break;
5743 case 0x63: /* arpl or movslS (x86_64) */
5744#ifdef TARGET_X86_64
5745 if (CODE64(s)) {
5746 int d_ot;
5747 /* d_ot is the size of destination */
5748 d_ot = dflag + OT_WORD;
5749
5750 modrm = ldub_code(s->pc++);
5751 reg = ((modrm >> 3) & 7) | rex_r;
5752 mod = (modrm >> 6) & 3;
5753 rm = (modrm & 7) | REX_B(s);
5754
5755 if (mod == 3) {
5756 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5757 /* sign extend */
5758 if (d_ot == OT_QUAD)
5759 gen_op_movslq_T0_T0();
5760 gen_op_mov_reg_T0[d_ot][reg]();
5761 } else {
5762 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5763 if (d_ot == OT_QUAD) {
5764 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5765 } else {
5766 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5767 }
5768 gen_op_mov_reg_T0[d_ot][reg]();
5769 }
5770 } else
5771#endif
5772 {
5773 if (!s->pe || s->vm86)
5774 goto illegal_op;
5775 ot = dflag ? OT_LONG : OT_WORD;
5776 modrm = ldub_code(s->pc++);
5777 reg = (modrm >> 3) & 7;
5778 mod = (modrm >> 6) & 3;
5779 rm = modrm & 7;
5780 if (mod != 3) {
5781 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5782 gen_op_ld_T0_A0[ot + s->mem_index]();
5783 } else {
5784 gen_op_mov_TN_reg[ot][0][rm]();
5785 }
5786 if (s->cc_op != CC_OP_DYNAMIC)
5787 gen_op_set_cc_op(s->cc_op);
5788 gen_op_arpl();
5789 s->cc_op = CC_OP_EFLAGS;
5790 if (mod != 3) {
5791 gen_op_st_T0_A0[ot + s->mem_index]();
5792 } else {
5793 gen_op_mov_reg_T0[ot][rm]();
5794 }
5795 gen_op_arpl_update();
5796 }
5797 break;
5798 case 0x102: /* lar */
5799 case 0x103: /* lsl */
5800 if (!s->pe || s->vm86)
5801 goto illegal_op;
5802 ot = dflag ? OT_LONG : OT_WORD;
5803 modrm = ldub_code(s->pc++);
5804 reg = ((modrm >> 3) & 7) | rex_r;
5805 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5806 gen_op_mov_TN_reg[ot][1][reg]();
5807 if (s->cc_op != CC_OP_DYNAMIC)
5808 gen_op_set_cc_op(s->cc_op);
5809 if (b == 0x102)
5810 gen_op_lar();
5811 else
5812 gen_op_lsl();
5813 s->cc_op = CC_OP_EFLAGS;
5814 gen_op_mov_reg_T1[ot][reg]();
5815 break;
5816 case 0x118:
5817 modrm = ldub_code(s->pc++);
5818 mod = (modrm >> 6) & 3;
5819 op = (modrm >> 3) & 7;
5820 switch(op) {
5821 case 0: /* prefetchnta */
5822 case 1: /* prefetchnt0 */
5823 case 2: /* prefetchnt0 */
5824 case 3: /* prefetchnt0 */
5825 if (mod == 3)
5826 goto illegal_op;
5827 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5828 /* nothing more to do */
5829 break;
5830 default:
5831 goto illegal_op;
5832 }
5833 break;
5834 case 0x120: /* mov reg, crN */
5835 case 0x122: /* mov crN, reg */
5836 if (s->cpl != 0) {
5837 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5838 } else {
5839 modrm = ldub_code(s->pc++);
5840 if ((modrm & 0xc0) != 0xc0)
5841 goto illegal_op;
5842 rm = (modrm & 7) | REX_B(s);
5843 reg = ((modrm >> 3) & 7) | rex_r;
5844 if (CODE64(s))
5845 ot = OT_QUAD;
5846 else
5847 ot = OT_LONG;
5848 switch(reg) {
5849 case 0:
5850 case 2:
5851 case 3:
5852 case 4:
5853 case 8:
5854 if (b & 2) {
5855 gen_op_mov_TN_reg[ot][0][rm]();
5856 gen_op_movl_crN_T0(reg);
5857 gen_jmp_im(s->pc - s->cs_base);
5858 gen_eob(s);
5859 } else {
5860#if !defined(CONFIG_USER_ONLY)
5861 if (reg == 8)
5862 gen_op_movtl_T0_cr8();
5863 else
5864#endif
5865 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5866 gen_op_mov_reg_T0[ot][rm]();
5867 }
5868 break;
5869 default:
5870 goto illegal_op;
5871 }
5872 }
5873 break;
5874 case 0x121: /* mov reg, drN */
5875 case 0x123: /* mov drN, reg */
5876 if (s->cpl != 0) {
5877 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5878 } else {
5879 modrm = ldub_code(s->pc++);
5880 if ((modrm & 0xc0) != 0xc0)
5881 goto illegal_op;
5882 rm = (modrm & 7) | REX_B(s);
5883 reg = ((modrm >> 3) & 7) | rex_r;
5884 if (CODE64(s))
5885 ot = OT_QUAD;
5886 else
5887 ot = OT_LONG;
5888 /* XXX: do it dynamically with CR4.DE bit */
5889 if (reg == 4 || reg == 5 || reg >= 8)
5890 goto illegal_op;
5891 if (b & 2) {
5892 gen_op_mov_TN_reg[ot][0][rm]();
5893 gen_op_movl_drN_T0(reg);
5894 gen_jmp_im(s->pc - s->cs_base);
5895 gen_eob(s);
5896 } else {
5897 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5898 gen_op_mov_reg_T0[ot][rm]();
5899 }
5900 }
5901 break;
5902 case 0x106: /* clts */
5903 if (s->cpl != 0) {
5904 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5905 } else {
5906 gen_op_clts();
5907 /* abort block because static cpu state changed */
5908 gen_jmp_im(s->pc - s->cs_base);
5909 gen_eob(s);
5910 }
5911 break;
5912 /* MMX/SSE/SSE2/PNI support */
5913 case 0x1c3: /* MOVNTI reg, mem */
5914 if (!(s->cpuid_features & CPUID_SSE2))
5915 goto illegal_op;
5916 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5917 modrm = ldub_code(s->pc++);
5918 mod = (modrm >> 6) & 3;
5919 if (mod == 3)
5920 goto illegal_op;
5921 reg = ((modrm >> 3) & 7) | rex_r;
5922 /* generate a generic store */
5923 gen_ldst_modrm(s, modrm, ot, reg, 1);
5924 break;
5925 case 0x1ae:
5926 modrm = ldub_code(s->pc++);
5927 mod = (modrm >> 6) & 3;
5928 op = (modrm >> 3) & 7;
5929 switch(op) {
5930 case 0: /* fxsave */
5931 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
5932 (s->flags & HF_EM_MASK))
5933 goto illegal_op;
5934 if (s->flags & HF_TS_MASK) {
5935 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5936 break;
5937 }
5938 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5939 gen_op_fxsave_A0((s->dflag == 2));
5940 break;
5941 case 1: /* fxrstor */
5942 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
5943 (s->flags & HF_EM_MASK))
5944 goto illegal_op;
5945 if (s->flags & HF_TS_MASK) {
5946 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5947 break;
5948 }
5949 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5950 gen_op_fxrstor_A0((s->dflag == 2));
5951 break;
5952 case 2: /* ldmxcsr */
5953 case 3: /* stmxcsr */
5954 if (s->flags & HF_TS_MASK) {
5955 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5956 break;
5957 }
5958 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
5959 mod == 3)
5960 goto illegal_op;
5961 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5962 if (op == 2) {
5963 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5964 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
5965 } else {
5966 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
5967 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
5968 }
5969 break;
5970 case 5: /* lfence */
5971 case 6: /* mfence */
5972 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
5973 goto illegal_op;
5974 break;
5975 case 7: /* sfence / clflush */
5976 if ((modrm & 0xc7) == 0xc0) {
5977 /* sfence */
5978 if (!(s->cpuid_features & CPUID_SSE))
5979 goto illegal_op;
5980 } else {
5981 /* clflush */
5982 if (!(s->cpuid_features & CPUID_CLFLUSH))
5983 goto illegal_op;
5984 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5985 }
5986 break;
5987 default:
5988 goto illegal_op;
5989 }
5990 break;
5991 case 0x10d: /* prefetch */
5992 modrm = ldub_code(s->pc++);
5993 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5994 /* ignore for now */
5995 break;
5996 case 0x110 ... 0x117:
5997 case 0x128 ... 0x12f:
5998 case 0x150 ... 0x177:
5999 case 0x17c ... 0x17f:
6000 case 0x1c2:
6001 case 0x1c4 ... 0x1c6:
6002 case 0x1d0 ... 0x1fe:
6003 gen_sse(s, b, pc_start, rex_r);
6004 break;
6005 default:
6006 goto illegal_op;
6007 }
6008 /* lock generation */
6009 if (s->prefix & PREFIX_LOCK)
6010 gen_op_unlock();
6011 return s->pc;
6012 illegal_op:
6013 if (s->prefix & PREFIX_LOCK)
6014 gen_op_unlock();
6015 /* XXX: ensure that no lock was generated */
6016 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6017 return s->pc;
6018}
6019
6020#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6021#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6022
6023/* flags read by an operation */
6024static uint16_t opc_read_flags[NB_OPS] = {
6025 [INDEX_op_aas] = CC_A,
6026 [INDEX_op_aaa] = CC_A,
6027 [INDEX_op_das] = CC_A | CC_C,
6028 [INDEX_op_daa] = CC_A | CC_C,
6029
6030 /* subtle: due to the incl/decl implementation, C is used */
6031 [INDEX_op_update_inc_cc] = CC_C,
6032
6033 [INDEX_op_into] = CC_O,
6034
6035 [INDEX_op_jb_subb] = CC_C,
6036 [INDEX_op_jb_subw] = CC_C,
6037 [INDEX_op_jb_subl] = CC_C,
6038
6039 [INDEX_op_jz_subb] = CC_Z,
6040 [INDEX_op_jz_subw] = CC_Z,
6041 [INDEX_op_jz_subl] = CC_Z,
6042
6043 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6044 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6045 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6046
6047 [INDEX_op_js_subb] = CC_S,
6048 [INDEX_op_js_subw] = CC_S,
6049 [INDEX_op_js_subl] = CC_S,
6050
6051 [INDEX_op_jl_subb] = CC_O | CC_S,
6052 [INDEX_op_jl_subw] = CC_O | CC_S,
6053 [INDEX_op_jl_subl] = CC_O | CC_S,
6054
6055 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6056 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6057 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6058
6059 [INDEX_op_loopnzw] = CC_Z,
6060 [INDEX_op_loopnzl] = CC_Z,
6061 [INDEX_op_loopzw] = CC_Z,
6062 [INDEX_op_loopzl] = CC_Z,
6063
6064 [INDEX_op_seto_T0_cc] = CC_O,
6065 [INDEX_op_setb_T0_cc] = CC_C,
6066 [INDEX_op_setz_T0_cc] = CC_Z,
6067 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6068 [INDEX_op_sets_T0_cc] = CC_S,
6069 [INDEX_op_setp_T0_cc] = CC_P,
6070 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6071 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6072
6073 [INDEX_op_setb_T0_subb] = CC_C,
6074 [INDEX_op_setb_T0_subw] = CC_C,
6075 [INDEX_op_setb_T0_subl] = CC_C,
6076
6077 [INDEX_op_setz_T0_subb] = CC_Z,
6078 [INDEX_op_setz_T0_subw] = CC_Z,
6079 [INDEX_op_setz_T0_subl] = CC_Z,
6080
6081 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6082 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6083 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6084
6085 [INDEX_op_sets_T0_subb] = CC_S,
6086 [INDEX_op_sets_T0_subw] = CC_S,
6087 [INDEX_op_sets_T0_subl] = CC_S,
6088
6089 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6090 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6091 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6092
6093 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6094 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6095 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6096
6097 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6098 [INDEX_op_cmc] = CC_C,
6099 [INDEX_op_salc] = CC_C,
6100
6101 /* needed for correct flag optimisation before string ops */
6102 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6103 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6104 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6105 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6106
6107#ifdef TARGET_X86_64
6108 [INDEX_op_jb_subq] = CC_C,
6109 [INDEX_op_jz_subq] = CC_Z,
6110 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6111 [INDEX_op_js_subq] = CC_S,
6112 [INDEX_op_jl_subq] = CC_O | CC_S,
6113 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6114
6115 [INDEX_op_loopnzq] = CC_Z,
6116 [INDEX_op_loopzq] = CC_Z,
6117
6118 [INDEX_op_setb_T0_subq] = CC_C,
6119 [INDEX_op_setz_T0_subq] = CC_Z,
6120 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6121 [INDEX_op_sets_T0_subq] = CC_S,
6122 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6123 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6124
6125 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6126 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6127#endif
6128
6129#define DEF_READF(SUFFIX)\
6130 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6131 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6132 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6133 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6134 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6135 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6136 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6137 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6138\
6139 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6140 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6141 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6142 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6143 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6144 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6145 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6146 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6147
6148 DEF_READF( )
6149 DEF_READF(_raw)
6150#ifndef CONFIG_USER_ONLY
6151 DEF_READF(_kernel)
6152 DEF_READF(_user)
6153#endif
6154};
6155
6156/* flags written by an operation */
6157static uint16_t opc_write_flags[NB_OPS] = {
6158 [INDEX_op_update2_cc] = CC_OSZAPC,
6159 [INDEX_op_update1_cc] = CC_OSZAPC,
6160 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6161 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6162 /* subtle: due to the incl/decl implementation, C is used */
6163 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6164 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6165
6166 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6167 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6168 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6169 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6170 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6171 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6172 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6173 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6174 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6175 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6176 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6177
6178 /* sse */
6179 [INDEX_op_ucomiss] = CC_OSZAPC,
6180 [INDEX_op_ucomisd] = CC_OSZAPC,
6181 [INDEX_op_comiss] = CC_OSZAPC,
6182 [INDEX_op_comisd] = CC_OSZAPC,
6183
6184 /* bcd */
6185 [INDEX_op_aam] = CC_OSZAPC,
6186 [INDEX_op_aad] = CC_OSZAPC,
6187 [INDEX_op_aas] = CC_OSZAPC,
6188 [INDEX_op_aaa] = CC_OSZAPC,
6189 [INDEX_op_das] = CC_OSZAPC,
6190 [INDEX_op_daa] = CC_OSZAPC,
6191
6192 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6193 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6194 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6195 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6196 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6197 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6198 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6199 [INDEX_op_clc] = CC_C,
6200 [INDEX_op_stc] = CC_C,
6201 [INDEX_op_cmc] = CC_C,
6202
6203 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6204 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6205 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6206 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6207 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6208 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6209 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6210 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6211 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6212 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6213 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6214 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6215
6216 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6217 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6218 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6219 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6220 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6221 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6222
6223 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6224 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6225 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6226 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6227
6228 [INDEX_op_cmpxchg8b] = CC_Z,
6229 [INDEX_op_lar] = CC_Z,
6230 [INDEX_op_lsl] = CC_Z,
6231 [INDEX_op_verr] = CC_Z,
6232 [INDEX_op_verw] = CC_Z,
6233 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6234 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6235
6236#define DEF_WRITEF(SUFFIX)\
6237 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6238 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6239 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6240 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6241 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6242 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6243 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6244 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6245\
6246 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6247 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6248 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6249 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6250 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6251 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6252 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6253 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6254\
6255 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6256 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6257 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6258 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6259 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6260 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6261 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6262 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6263\
6264 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6265 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6266 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6267 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6268\
6269 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6270 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6271 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6272 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6273\
6274 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6275 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6276 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6277 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6278\
6279 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6280 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6281 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6282 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6283 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6284 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6285\
6286 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6287 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6288 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6289 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6290 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6291 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6292\
6293 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6294 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6295 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6296 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6297
6298
6299 DEF_WRITEF( )
6300 DEF_WRITEF(_raw)
6301#ifndef CONFIG_USER_ONLY
6302 DEF_WRITEF(_kernel)
6303 DEF_WRITEF(_user)
6304#endif
6305};
6306
6307/* simpler form of an operation if no flags need to be generated */
6308static uint16_t opc_simpler[NB_OPS] = {
6309 [INDEX_op_update2_cc] = INDEX_op_nop,
6310 [INDEX_op_update1_cc] = INDEX_op_nop,
6311 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6312#if 0
6313 /* broken: CC_OP logic must be rewritten */
6314 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6315#endif
6316
6317 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6318 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6319 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6320 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6321
6322 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6323 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6324 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6325 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6326
6327 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6328 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6329 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6330 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6331
6332#define DEF_SIMPLER(SUFFIX)\
6333 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6334 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6335 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6336 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6337\
6338 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6339 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6340 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6341 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6342
6343 DEF_SIMPLER( )
6344 DEF_SIMPLER(_raw)
6345#ifndef CONFIG_USER_ONLY
6346 DEF_SIMPLER(_kernel)
6347 DEF_SIMPLER(_user)
6348#endif
6349};
6350
6351void optimize_flags_init(void)
6352{
6353 int i;
6354 /* put default values in arrays */
6355 for(i = 0; i < NB_OPS; i++) {
6356 if (opc_simpler[i] == 0)
6357 opc_simpler[i] = i;
6358 }
6359}
6360
6361/* CPU flags computation optimization: we move backward thru the
6362 generated code to see which flags are needed. The operation is
6363 modified if suitable */
6364static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6365{
6366 uint16_t *opc_ptr;
6367 int live_flags, write_flags, op;
6368
6369 opc_ptr = opc_buf + opc_buf_len;
6370 /* live_flags contains the flags needed by the next instructions
6371 in the code. At the end of the bloc, we consider that all the
6372 flags are live. */
6373 live_flags = CC_OSZAPC;
6374 while (opc_ptr > opc_buf) {
6375 op = *--opc_ptr;
6376 /* if none of the flags written by the instruction is used,
6377 then we can try to find a simpler instruction */
6378 write_flags = opc_write_flags[op];
6379 if ((live_flags & write_flags) == 0) {
6380 *opc_ptr = opc_simpler[op];
6381 }
6382 /* compute the live flags before the instruction */
6383 live_flags &= ~write_flags;
6384 live_flags |= opc_read_flags[op];
6385 }
6386}
6387
6388/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6389 basic block 'tb'. If search_pc is TRUE, also generate PC
6390 information for each intermediate instruction. */
6391static inline int gen_intermediate_code_internal(CPUState *env,
6392 TranslationBlock *tb,
6393 int search_pc)
6394{
6395 DisasContext dc1, *dc = &dc1;
6396 target_ulong pc_ptr;
6397 uint16_t *gen_opc_end;
6398 int flags, j, lj, cflags;
6399 target_ulong pc_start;
6400 target_ulong cs_base;
6401
6402 /* generate intermediate code */
6403 pc_start = tb->pc;
6404 cs_base = tb->cs_base;
6405 flags = tb->flags;
6406 cflags = tb->cflags;
6407
6408 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6409 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6410 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6411 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6412 dc->f_st = 0;
6413 dc->vm86 = (flags >> VM_SHIFT) & 1;
6414 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6415 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6416 dc->tf = (flags >> TF_SHIFT) & 1;
6417 dc->singlestep_enabled = env->singlestep_enabled;
6418 dc->cc_op = CC_OP_DYNAMIC;
6419 dc->cs_base = cs_base;
6420 dc->tb = tb;
6421 dc->popl_esp_hack = 0;
6422 /* select memory access functions */
6423 dc->mem_index = 0;
6424 if (flags & HF_SOFTMMU_MASK) {
6425 if (dc->cpl == 3)
6426 dc->mem_index = 2 * 4;
6427 else
6428 dc->mem_index = 1 * 4;
6429 }
6430 dc->cpuid_features = env->cpuid_features;
6431 dc->cpuid_ext_features = env->cpuid_ext_features;
6432#ifdef TARGET_X86_64
6433 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6434 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6435#endif
6436 dc->flags = flags;
6437 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6438 (flags & HF_INHIBIT_IRQ_MASK)
6439#ifndef CONFIG_SOFTMMU
6440 || (flags & HF_SOFTMMU_MASK)
6441#endif
6442 );
6443#if 0
6444 /* check addseg logic */
6445 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6446 printf("ERROR addseg\n");
6447#endif
6448
6449 gen_opc_ptr = gen_opc_buf;
6450 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6451 gen_opparam_ptr = gen_opparam_buf;
6452 nb_gen_labels = 0;
6453
6454 dc->is_jmp = DISAS_NEXT;
6455 pc_ptr = pc_start;
6456 lj = -1;
6457
6458 for(;;) {
6459 if (env->nb_breakpoints > 0) {
6460 for(j = 0; j < env->nb_breakpoints; j++) {
6461 if (env->breakpoints[j] == pc_ptr) {
6462 gen_debug(dc, pc_ptr - dc->cs_base);
6463 break;
6464 }
6465 }
6466 }
6467 if (search_pc) {
6468 j = gen_opc_ptr - gen_opc_buf;
6469 if (lj < j) {
6470 lj++;
6471 while (lj < j)
6472 gen_opc_instr_start[lj++] = 0;
6473 }
6474 gen_opc_pc[lj] = pc_ptr;
6475 gen_opc_cc_op[lj] = dc->cc_op;
6476 gen_opc_instr_start[lj] = 1;
6477 }
6478 pc_ptr = disas_insn(dc, pc_ptr);
6479 /* stop translation if indicated */
6480 if (dc->is_jmp)
6481 break;
6482
6483#ifdef VBOX
6484#ifdef DEBUG
6485/*
6486 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6487 {
6488 //should never happen as the jump to the patch code terminates the translation block
6489 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6490 }
6491*/
6492#endif
6493 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6494 {
6495 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6496 gen_jmp_im(pc_ptr - dc->cs_base);
6497 gen_eob(dc);
6498 break;
6499 }
6500#endif
6501
6502 /* if single step mode, we generate only one instruction and
6503 generate an exception */
6504 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6505 the flag and abort the translation to give the irqs a
6506 change to be happen */
6507 if (dc->tf || dc->singlestep_enabled ||
6508 (flags & HF_INHIBIT_IRQ_MASK) ||
6509 (cflags & CF_SINGLE_INSN)) {
6510 gen_jmp_im(pc_ptr - dc->cs_base);
6511 gen_eob(dc);
6512 break;
6513 }
6514 /* if too long translation, stop generation too */
6515 if (gen_opc_ptr >= gen_opc_end ||
6516 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6517 gen_jmp_im(pc_ptr - dc->cs_base);
6518 gen_eob(dc);
6519 break;
6520 }
6521 }
6522 *gen_opc_ptr = INDEX_op_end;
6523 /* we don't forget to fill the last values */
6524 if (search_pc) {
6525 j = gen_opc_ptr - gen_opc_buf;
6526 lj++;
6527 while (lj <= j)
6528 gen_opc_instr_start[lj++] = 0;
6529 }
6530
6531#ifdef DEBUG_DISAS
6532 if (loglevel & CPU_LOG_TB_CPU) {
6533 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6534 }
6535 if (loglevel & CPU_LOG_TB_IN_ASM) {
6536 int disas_flags;
6537 fprintf(logfile, "----------------\n");
6538 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6539#ifdef TARGET_X86_64
6540 if (dc->code64)
6541 disas_flags = 2;
6542 else
6543#endif
6544 disas_flags = !dc->code32;
6545 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6546 fprintf(logfile, "\n");
6547 if (loglevel & CPU_LOG_TB_OP) {
6548 fprintf(logfile, "OP:\n");
6549 dump_ops(gen_opc_buf, gen_opparam_buf);
6550 fprintf(logfile, "\n");
6551 }
6552 }
6553#endif
6554
6555 /* optimize flag computations */
6556 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6557
6558#ifdef DEBUG_DISAS
6559 if (loglevel & CPU_LOG_TB_OP_OPT) {
6560 fprintf(logfile, "AFTER FLAGS OPT:\n");
6561 dump_ops(gen_opc_buf, gen_opparam_buf);
6562 fprintf(logfile, "\n");
6563 }
6564#endif
6565 if (!search_pc)
6566 tb->size = pc_ptr - pc_start;
6567 return 0;
6568}
6569
6570int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6571{
6572 return gen_intermediate_code_internal(env, tb, 0);
6573}
6574
6575int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6576{
6577 return gen_intermediate_code_internal(env, tb, 1);
6578}
6579
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette