VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 9761

最後變更 在這個檔案從9761是 9486,由 vboxsync 提交於 17 年 前

Synced 64 bits lahf/sahf from QEmu.

  • 屬性 svn:eol-style 設為 native
檔案大小: 205.5 KB
 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20#include <stdarg.h>
21#include <stdlib.h>
22#include <stdio.h>
23#include <string.h>
24#include <inttypes.h>
25#ifndef VBOX
26#include <signal.h>
27#include <assert.h>
28#endif /* !VBOX */
29
30#include "cpu.h"
31#include "exec-all.h"
32#include "disas.h"
33
34/* XXX: move that elsewhere */
35static uint16_t *gen_opc_ptr;
36static uint32_t *gen_opparam_ptr;
37
38#define PREFIX_REPZ 0x01
39#define PREFIX_REPNZ 0x02
40#define PREFIX_LOCK 0x04
41#define PREFIX_DATA 0x08
42#define PREFIX_ADR 0x10
43
44#ifdef TARGET_X86_64
45#define X86_64_ONLY(x) x
46#define X86_64_DEF(x...) x
47#define CODE64(s) ((s)->code64)
48#define REX_X(s) ((s)->rex_x)
49#define REX_B(s) ((s)->rex_b)
50/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
51#if 1
52#define BUGGY_64(x) NULL
53#endif
54#else
55#define X86_64_ONLY(x) NULL
56#define X86_64_DEF(x...)
57#define CODE64(s) 0
58#define REX_X(s) 0
59#define REX_B(s) 0
60#endif
61
62#ifdef TARGET_X86_64
63static int x86_64_hregs;
64#endif
65
66#ifdef USE_DIRECT_JUMP
67#define TBPARAM(x)
68#else
69#define TBPARAM(x) (long)(x)
70#endif
71
72#ifdef VBOX
73/* Special/override code readers to hide patched code. */
74
75uint8_t ldub_code_raw(target_ulong pc)
76{
77 uint8_t b;
78
79 if (!remR3GetOpcode(cpu_single_env, pc, &b))
80 b = ldub_code(pc);
81 return b;
82}
83#define ldub_code(a) ldub_code_raw(a)
84
85uint16_t lduw_code_raw(target_ulong pc)
86{
87 return (ldub_code(pc+1) << 8) | ldub_code(pc);
88}
89#define lduw_code(a) lduw_code_raw(a)
90
91
92uint32_t ldl_code_raw(target_ulong pc)
93{
94 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
95}
96#define ldl_code(a) ldl_code_raw(a)
97
98#endif /* VBOX */
99
100
101typedef struct DisasContext {
102 /* current insn context */
103 int override; /* -1 if no override */
104 int prefix;
105 int aflag, dflag;
106 target_ulong pc; /* pc = eip + cs_base */
107 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
108 static state change (stop translation) */
109 /* current block context */
110 target_ulong cs_base; /* base of CS segment */
111 int pe; /* protected mode */
112 int code32; /* 32 bit code segment */
113#ifdef TARGET_X86_64
114 int lma; /* long mode active */
115 int code64; /* 64 bit code segment */
116 int rex_x, rex_b;
117#endif
118 int ss32; /* 32 bit stack segment */
119 int cc_op; /* current CC operation */
120 int addseg; /* non zero if either DS/ES/SS have a non zero base */
121 int f_st; /* currently unused */
122 int vm86; /* vm86 mode */
123#ifdef VBOX
124 int vme; /* CR4.VME */
125 int record_call; /* record calls for CSAM or not? */
126#endif
127 int cpl;
128 int iopl;
129 int tf; /* TF cpu flag */
130 int singlestep_enabled; /* "hardware" single step enabled */
131 int jmp_opt; /* use direct block chaining for direct jumps */
132 int mem_index; /* select memory access functions */
133 int flags; /* all execution flags */
134 struct TranslationBlock *tb;
135 int popl_esp_hack; /* for correct popl with esp base handling */
136 int rip_offset; /* only used in x86_64, but left for simplicity */
137 int cpuid_features;
138 int cpuid_ext_features;
139 int cpuid_ext2_features;
140 int cpuid_ext3_features;
141} DisasContext;
142
143static void gen_eob(DisasContext *s);
144static void gen_jmp(DisasContext *s, target_ulong eip);
145static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
146
147/* i386 arith/logic operations */
148enum {
149 OP_ADDL,
150 OP_ORL,
151 OP_ADCL,
152 OP_SBBL,
153 OP_ANDL,
154 OP_SUBL,
155 OP_XORL,
156 OP_CMPL,
157};
158
159/* i386 shift ops */
160enum {
161 OP_ROL,
162 OP_ROR,
163 OP_RCL,
164 OP_RCR,
165 OP_SHL,
166 OP_SHR,
167 OP_SHL1, /* undocumented */
168 OP_SAR = 7,
169};
170
171enum {
172#define DEF(s, n, copy_size) INDEX_op_ ## s,
173#include "opc.h"
174#undef DEF
175 NB_OPS,
176};
177
178#include "gen-op.h"
179
180/* operand size */
181enum {
182 OT_BYTE = 0,
183 OT_WORD,
184 OT_LONG,
185 OT_QUAD,
186};
187
188enum {
189 /* I386 int registers */
190 OR_EAX, /* MUST be even numbered */
191 OR_ECX,
192 OR_EDX,
193 OR_EBX,
194 OR_ESP,
195 OR_EBP,
196 OR_ESI,
197 OR_EDI,
198
199 OR_TMP0 = 16, /* temporary operand register */
200 OR_TMP1,
201 OR_A0, /* temporary register used when doing address evaluation */
202};
203
204#ifdef TARGET_X86_64
205
206#define NB_OP_SIZES 4
207
208#define DEF_REGS(prefix, suffix) \
209 prefix ## EAX ## suffix,\
210 prefix ## ECX ## suffix,\
211 prefix ## EDX ## suffix,\
212 prefix ## EBX ## suffix,\
213 prefix ## ESP ## suffix,\
214 prefix ## EBP ## suffix,\
215 prefix ## ESI ## suffix,\
216 prefix ## EDI ## suffix,\
217 prefix ## R8 ## suffix,\
218 prefix ## R9 ## suffix,\
219 prefix ## R10 ## suffix,\
220 prefix ## R11 ## suffix,\
221 prefix ## R12 ## suffix,\
222 prefix ## R13 ## suffix,\
223 prefix ## R14 ## suffix,\
224 prefix ## R15 ## suffix,
225
226#define DEF_BREGS(prefixb, prefixh, suffix) \
227 \
228static void prefixb ## ESP ## suffix ## _wrapper(void) \
229{ \
230 if (x86_64_hregs) \
231 prefixb ## ESP ## suffix (); \
232 else \
233 prefixh ## EAX ## suffix (); \
234} \
235 \
236static void prefixb ## EBP ## suffix ## _wrapper(void) \
237{ \
238 if (x86_64_hregs) \
239 prefixb ## EBP ## suffix (); \
240 else \
241 prefixh ## ECX ## suffix (); \
242} \
243 \
244static void prefixb ## ESI ## suffix ## _wrapper(void) \
245{ \
246 if (x86_64_hregs) \
247 prefixb ## ESI ## suffix (); \
248 else \
249 prefixh ## EDX ## suffix (); \
250} \
251 \
252static void prefixb ## EDI ## suffix ## _wrapper(void) \
253{ \
254 if (x86_64_hregs) \
255 prefixb ## EDI ## suffix (); \
256 else \
257 prefixh ## EBX ## suffix (); \
258}
259
260DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
261DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
262DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
263DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
264
265#else /* !TARGET_X86_64 */
266
267#define NB_OP_SIZES 3
268
269#define DEF_REGS(prefix, suffix) \
270 prefix ## EAX ## suffix,\
271 prefix ## ECX ## suffix,\
272 prefix ## EDX ## suffix,\
273 prefix ## EBX ## suffix,\
274 prefix ## ESP ## suffix,\
275 prefix ## EBP ## suffix,\
276 prefix ## ESI ## suffix,\
277 prefix ## EDI ## suffix,
278
279#endif /* !TARGET_X86_64 */
280
281static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
282 [OT_BYTE] = {
283 gen_op_movb_EAX_T0,
284 gen_op_movb_ECX_T0,
285 gen_op_movb_EDX_T0,
286 gen_op_movb_EBX_T0,
287#ifdef TARGET_X86_64
288 gen_op_movb_ESP_T0_wrapper,
289 gen_op_movb_EBP_T0_wrapper,
290 gen_op_movb_ESI_T0_wrapper,
291 gen_op_movb_EDI_T0_wrapper,
292 gen_op_movb_R8_T0,
293 gen_op_movb_R9_T0,
294 gen_op_movb_R10_T0,
295 gen_op_movb_R11_T0,
296 gen_op_movb_R12_T0,
297 gen_op_movb_R13_T0,
298 gen_op_movb_R14_T0,
299 gen_op_movb_R15_T0,
300#else
301 gen_op_movh_EAX_T0,
302 gen_op_movh_ECX_T0,
303 gen_op_movh_EDX_T0,
304 gen_op_movh_EBX_T0,
305#endif
306 },
307 [OT_WORD] = {
308 DEF_REGS(gen_op_movw_, _T0)
309 },
310 [OT_LONG] = {
311 DEF_REGS(gen_op_movl_, _T0)
312 },
313#ifdef TARGET_X86_64
314 [OT_QUAD] = {
315 DEF_REGS(gen_op_movq_, _T0)
316 },
317#endif
318};
319
320static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
321 [OT_BYTE] = {
322 gen_op_movb_EAX_T1,
323 gen_op_movb_ECX_T1,
324 gen_op_movb_EDX_T1,
325 gen_op_movb_EBX_T1,
326#ifdef TARGET_X86_64
327 gen_op_movb_ESP_T1_wrapper,
328 gen_op_movb_EBP_T1_wrapper,
329 gen_op_movb_ESI_T1_wrapper,
330 gen_op_movb_EDI_T1_wrapper,
331 gen_op_movb_R8_T1,
332 gen_op_movb_R9_T1,
333 gen_op_movb_R10_T1,
334 gen_op_movb_R11_T1,
335 gen_op_movb_R12_T1,
336 gen_op_movb_R13_T1,
337 gen_op_movb_R14_T1,
338 gen_op_movb_R15_T1,
339#else
340 gen_op_movh_EAX_T1,
341 gen_op_movh_ECX_T1,
342 gen_op_movh_EDX_T1,
343 gen_op_movh_EBX_T1,
344#endif
345 },
346 [OT_WORD] = {
347 DEF_REGS(gen_op_movw_, _T1)
348 },
349 [OT_LONG] = {
350 DEF_REGS(gen_op_movl_, _T1)
351 },
352#ifdef TARGET_X86_64
353 [OT_QUAD] = {
354 DEF_REGS(gen_op_movq_, _T1)
355 },
356#endif
357};
358
359static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
360 [0] = {
361 DEF_REGS(gen_op_movw_, _A0)
362 },
363 [1] = {
364 DEF_REGS(gen_op_movl_, _A0)
365 },
366#ifdef TARGET_X86_64
367 [2] = {
368 DEF_REGS(gen_op_movq_, _A0)
369 },
370#endif
371};
372
373static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
374{
375 [OT_BYTE] = {
376 {
377 gen_op_movl_T0_EAX,
378 gen_op_movl_T0_ECX,
379 gen_op_movl_T0_EDX,
380 gen_op_movl_T0_EBX,
381#ifdef TARGET_X86_64
382 gen_op_movl_T0_ESP_wrapper,
383 gen_op_movl_T0_EBP_wrapper,
384 gen_op_movl_T0_ESI_wrapper,
385 gen_op_movl_T0_EDI_wrapper,
386 gen_op_movl_T0_R8,
387 gen_op_movl_T0_R9,
388 gen_op_movl_T0_R10,
389 gen_op_movl_T0_R11,
390 gen_op_movl_T0_R12,
391 gen_op_movl_T0_R13,
392 gen_op_movl_T0_R14,
393 gen_op_movl_T0_R15,
394#else
395 gen_op_movh_T0_EAX,
396 gen_op_movh_T0_ECX,
397 gen_op_movh_T0_EDX,
398 gen_op_movh_T0_EBX,
399#endif
400 },
401 {
402 gen_op_movl_T1_EAX,
403 gen_op_movl_T1_ECX,
404 gen_op_movl_T1_EDX,
405 gen_op_movl_T1_EBX,
406#ifdef TARGET_X86_64
407 gen_op_movl_T1_ESP_wrapper,
408 gen_op_movl_T1_EBP_wrapper,
409 gen_op_movl_T1_ESI_wrapper,
410 gen_op_movl_T1_EDI_wrapper,
411 gen_op_movl_T1_R8,
412 gen_op_movl_T1_R9,
413 gen_op_movl_T1_R10,
414 gen_op_movl_T1_R11,
415 gen_op_movl_T1_R12,
416 gen_op_movl_T1_R13,
417 gen_op_movl_T1_R14,
418 gen_op_movl_T1_R15,
419#else
420 gen_op_movh_T1_EAX,
421 gen_op_movh_T1_ECX,
422 gen_op_movh_T1_EDX,
423 gen_op_movh_T1_EBX,
424#endif
425 },
426 },
427 [OT_WORD] = {
428 {
429 DEF_REGS(gen_op_movl_T0_, )
430 },
431 {
432 DEF_REGS(gen_op_movl_T1_, )
433 },
434 },
435 [OT_LONG] = {
436 {
437 DEF_REGS(gen_op_movl_T0_, )
438 },
439 {
440 DEF_REGS(gen_op_movl_T1_, )
441 },
442 },
443#ifdef TARGET_X86_64
444 [OT_QUAD] = {
445 {
446 DEF_REGS(gen_op_movl_T0_, )
447 },
448 {
449 DEF_REGS(gen_op_movl_T1_, )
450 },
451 },
452#endif
453};
454
455static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
456 DEF_REGS(gen_op_movl_A0_, )
457};
458
459static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
460 [0] = {
461 DEF_REGS(gen_op_addl_A0_, )
462 },
463 [1] = {
464 DEF_REGS(gen_op_addl_A0_, _s1)
465 },
466 [2] = {
467 DEF_REGS(gen_op_addl_A0_, _s2)
468 },
469 [3] = {
470 DEF_REGS(gen_op_addl_A0_, _s3)
471 },
472};
473
474#ifdef TARGET_X86_64
475static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
476 DEF_REGS(gen_op_movq_A0_, )
477};
478
479static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
480 [0] = {
481 DEF_REGS(gen_op_addq_A0_, )
482 },
483 [1] = {
484 DEF_REGS(gen_op_addq_A0_, _s1)
485 },
486 [2] = {
487 DEF_REGS(gen_op_addq_A0_, _s2)
488 },
489 [3] = {
490 DEF_REGS(gen_op_addq_A0_, _s3)
491 },
492};
493#endif
494
495static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
496 [0] = {
497 DEF_REGS(gen_op_cmovw_, _T1_T0)
498 },
499 [1] = {
500 DEF_REGS(gen_op_cmovl_, _T1_T0)
501 },
502#ifdef TARGET_X86_64
503 [2] = {
504 DEF_REGS(gen_op_cmovq_, _T1_T0)
505 },
506#endif
507};
508
509static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
510 NULL,
511 gen_op_orl_T0_T1,
512 NULL,
513 NULL,
514 gen_op_andl_T0_T1,
515 NULL,
516 gen_op_xorl_T0_T1,
517 NULL,
518};
519
520#define DEF_ARITHC(SUFFIX)\
521 {\
522 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
523 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
524 },\
525 {\
526 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
527 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
528 },\
529 {\
530 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
531 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
532 },\
533 {\
534 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
535 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
536 },
537
538static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
539 DEF_ARITHC( )
540};
541
542static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
543 DEF_ARITHC(_raw)
544#ifndef CONFIG_USER_ONLY
545 DEF_ARITHC(_kernel)
546 DEF_ARITHC(_user)
547#endif
548};
549
550static const int cc_op_arithb[8] = {
551 CC_OP_ADDB,
552 CC_OP_LOGICB,
553 CC_OP_ADDB,
554 CC_OP_SUBB,
555 CC_OP_LOGICB,
556 CC_OP_SUBB,
557 CC_OP_LOGICB,
558 CC_OP_SUBB,
559};
560
561#define DEF_CMPXCHG(SUFFIX)\
562 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
563 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
564 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
565 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
566
567static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
568 DEF_CMPXCHG( )
569};
570
571static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
572 DEF_CMPXCHG(_raw)
573#ifndef CONFIG_USER_ONLY
574 DEF_CMPXCHG(_kernel)
575 DEF_CMPXCHG(_user)
576#endif
577};
578
579#define DEF_SHIFT(SUFFIX)\
580 {\
581 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
584 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
587 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
588 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
589 },\
590 {\
591 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
592 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
593 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
594 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
595 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
596 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
597 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
598 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
599 },\
600 {\
601 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
602 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
603 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
604 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
605 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
606 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
607 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
608 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
609 },\
610 {\
611 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
612 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
613 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
614 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
615 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
616 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
617 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
618 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
619 },
620
621static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
622 DEF_SHIFT( )
623};
624
625static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
626 DEF_SHIFT(_raw)
627#ifndef CONFIG_USER_ONLY
628 DEF_SHIFT(_kernel)
629 DEF_SHIFT(_user)
630#endif
631};
632
633#define DEF_SHIFTD(SUFFIX, op)\
634 {\
635 NULL,\
636 NULL,\
637 },\
638 {\
639 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
640 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
641 },\
642 {\
643 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
644 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
645 },\
646 {\
647X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
648 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
649 },
650
651static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
652 DEF_SHIFTD(, im)
653};
654
655static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
656 DEF_SHIFTD(, ECX)
657};
658
659static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
660 DEF_SHIFTD(_raw, im)
661#ifndef CONFIG_USER_ONLY
662 DEF_SHIFTD(_kernel, im)
663 DEF_SHIFTD(_user, im)
664#endif
665};
666
667static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
668 DEF_SHIFTD(_raw, ECX)
669#ifndef CONFIG_USER_ONLY
670 DEF_SHIFTD(_kernel, ECX)
671 DEF_SHIFTD(_user, ECX)
672#endif
673};
674
675static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
676 [0] = {
677 gen_op_btw_T0_T1_cc,
678 gen_op_btsw_T0_T1_cc,
679 gen_op_btrw_T0_T1_cc,
680 gen_op_btcw_T0_T1_cc,
681 },
682 [1] = {
683 gen_op_btl_T0_T1_cc,
684 gen_op_btsl_T0_T1_cc,
685 gen_op_btrl_T0_T1_cc,
686 gen_op_btcl_T0_T1_cc,
687 },
688#ifdef TARGET_X86_64
689 [2] = {
690 gen_op_btq_T0_T1_cc,
691 gen_op_btsq_T0_T1_cc,
692 gen_op_btrq_T0_T1_cc,
693 gen_op_btcq_T0_T1_cc,
694 },
695#endif
696};
697
698static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
699 gen_op_add_bitw_A0_T1,
700 gen_op_add_bitl_A0_T1,
701 X86_64_ONLY(gen_op_add_bitq_A0_T1),
702};
703
704static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
705 [0] = {
706 gen_op_bsfw_T0_cc,
707 gen_op_bsrw_T0_cc,
708 },
709 [1] = {
710 gen_op_bsfl_T0_cc,
711 gen_op_bsrl_T0_cc,
712 },
713#ifdef TARGET_X86_64
714 [2] = {
715 gen_op_bsfq_T0_cc,
716 gen_op_bsrq_T0_cc,
717 },
718#endif
719};
720
721static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
722 gen_op_ldsb_raw_T0_A0,
723 gen_op_ldsw_raw_T0_A0,
724 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
725 NULL,
726#ifndef CONFIG_USER_ONLY
727 gen_op_ldsb_kernel_T0_A0,
728 gen_op_ldsw_kernel_T0_A0,
729 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
730 NULL,
731
732 gen_op_ldsb_user_T0_A0,
733 gen_op_ldsw_user_T0_A0,
734 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
735 NULL,
736#endif
737};
738
739static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
740 gen_op_ldub_raw_T0_A0,
741 gen_op_lduw_raw_T0_A0,
742 NULL,
743 NULL,
744
745#ifndef CONFIG_USER_ONLY
746 gen_op_ldub_kernel_T0_A0,
747 gen_op_lduw_kernel_T0_A0,
748 NULL,
749 NULL,
750
751 gen_op_ldub_user_T0_A0,
752 gen_op_lduw_user_T0_A0,
753 NULL,
754 NULL,
755#endif
756};
757
758/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
759static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
760 gen_op_ldub_raw_T0_A0,
761 gen_op_lduw_raw_T0_A0,
762 gen_op_ldl_raw_T0_A0,
763 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
764
765#ifndef CONFIG_USER_ONLY
766 gen_op_ldub_kernel_T0_A0,
767 gen_op_lduw_kernel_T0_A0,
768 gen_op_ldl_kernel_T0_A0,
769 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
770
771 gen_op_ldub_user_T0_A0,
772 gen_op_lduw_user_T0_A0,
773 gen_op_ldl_user_T0_A0,
774 X86_64_ONLY(gen_op_ldq_user_T0_A0),
775#endif
776};
777
778static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
779 gen_op_ldub_raw_T1_A0,
780 gen_op_lduw_raw_T1_A0,
781 gen_op_ldl_raw_T1_A0,
782 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
783
784#ifndef CONFIG_USER_ONLY
785 gen_op_ldub_kernel_T1_A0,
786 gen_op_lduw_kernel_T1_A0,
787 gen_op_ldl_kernel_T1_A0,
788 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
789
790 gen_op_ldub_user_T1_A0,
791 gen_op_lduw_user_T1_A0,
792 gen_op_ldl_user_T1_A0,
793 X86_64_ONLY(gen_op_ldq_user_T1_A0),
794#endif
795};
796
797static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
798 gen_op_stb_raw_T0_A0,
799 gen_op_stw_raw_T0_A0,
800 gen_op_stl_raw_T0_A0,
801 X86_64_ONLY(gen_op_stq_raw_T0_A0),
802
803#ifndef CONFIG_USER_ONLY
804 gen_op_stb_kernel_T0_A0,
805 gen_op_stw_kernel_T0_A0,
806 gen_op_stl_kernel_T0_A0,
807 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
808
809 gen_op_stb_user_T0_A0,
810 gen_op_stw_user_T0_A0,
811 gen_op_stl_user_T0_A0,
812 X86_64_ONLY(gen_op_stq_user_T0_A0),
813#endif
814};
815
816static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
817 NULL,
818 gen_op_stw_raw_T1_A0,
819 gen_op_stl_raw_T1_A0,
820 X86_64_ONLY(gen_op_stq_raw_T1_A0),
821
822#ifndef CONFIG_USER_ONLY
823 NULL,
824 gen_op_stw_kernel_T1_A0,
825 gen_op_stl_kernel_T1_A0,
826 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
827
828 NULL,
829 gen_op_stw_user_T1_A0,
830 gen_op_stl_user_T1_A0,
831 X86_64_ONLY(gen_op_stq_user_T1_A0),
832#endif
833};
834
835#ifdef VBOX
836static void gen_check_external_event()
837{
838 gen_op_check_external_event();
839}
840
841static inline void gen_update_eip(target_ulong pc)
842{
843#ifdef TARGET_X86_64
844 if (pc == (uint32_t)pc) {
845 gen_op_movl_eip_im(pc);
846 } else if (pc == (int32_t)pc) {
847 gen_op_movq_eip_im(pc);
848 } else {
849 gen_op_movq_eip_im64(pc >> 32, pc);
850 }
851#else
852 gen_op_movl_eip_im(pc);
853#endif
854}
855
856#endif /* VBOX */
857
858static inline void gen_jmp_im(target_ulong pc)
859{
860#ifdef VBOX
861 gen_check_external_event();
862#endif /* VBOX */
863#ifdef TARGET_X86_64
864 if (pc == (uint32_t)pc) {
865 gen_op_movl_eip_im(pc);
866 } else if (pc == (int32_t)pc) {
867 gen_op_movq_eip_im(pc);
868 } else {
869 gen_op_movq_eip_im64(pc >> 32, pc);
870 }
871#else
872 gen_op_movl_eip_im(pc);
873#endif
874}
875
876static inline void gen_string_movl_A0_ESI(DisasContext *s)
877{
878 int override;
879
880 override = s->override;
881#ifdef TARGET_X86_64
882 if (s->aflag == 2) {
883 if (override >= 0) {
884 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
885 gen_op_addq_A0_reg_sN[0][R_ESI]();
886 } else {
887 gen_op_movq_A0_reg[R_ESI]();
888 }
889 } else
890#endif
891 if (s->aflag) {
892 /* 32 bit address */
893 if (s->addseg && override < 0)
894 override = R_DS;
895 if (override >= 0) {
896 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
897 gen_op_addl_A0_reg_sN[0][R_ESI]();
898 } else {
899 gen_op_movl_A0_reg[R_ESI]();
900 }
901 } else {
902 /* 16 address, always override */
903 if (override < 0)
904 override = R_DS;
905 gen_op_movl_A0_reg[R_ESI]();
906 gen_op_andl_A0_ffff();
907 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
908 }
909}
910
911static inline void gen_string_movl_A0_EDI(DisasContext *s)
912{
913#ifdef TARGET_X86_64
914 if (s->aflag == 2) {
915 gen_op_movq_A0_reg[R_EDI]();
916 } else
917#endif
918 if (s->aflag) {
919 if (s->addseg) {
920 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
921 gen_op_addl_A0_reg_sN[0][R_EDI]();
922 } else {
923 gen_op_movl_A0_reg[R_EDI]();
924 }
925 } else {
926 gen_op_movl_A0_reg[R_EDI]();
927 gen_op_andl_A0_ffff();
928 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
929 }
930}
931
932static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
933 gen_op_movl_T0_Dshiftb,
934 gen_op_movl_T0_Dshiftw,
935 gen_op_movl_T0_Dshiftl,
936 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
937};
938
939static GenOpFunc1 *gen_op_jnz_ecx[3] = {
940 gen_op_jnz_ecxw,
941 gen_op_jnz_ecxl,
942 X86_64_ONLY(gen_op_jnz_ecxq),
943};
944
945static GenOpFunc1 *gen_op_jz_ecx[3] = {
946 gen_op_jz_ecxw,
947 gen_op_jz_ecxl,
948 X86_64_ONLY(gen_op_jz_ecxq),
949};
950
951static GenOpFunc *gen_op_dec_ECX[3] = {
952 gen_op_decw_ECX,
953 gen_op_decl_ECX,
954 X86_64_ONLY(gen_op_decq_ECX),
955};
956
957static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
958 {
959 gen_op_jnz_subb,
960 gen_op_jnz_subw,
961 gen_op_jnz_subl,
962 X86_64_ONLY(gen_op_jnz_subq),
963 },
964 {
965 gen_op_jz_subb,
966 gen_op_jz_subw,
967 gen_op_jz_subl,
968 X86_64_ONLY(gen_op_jz_subq),
969 },
970};
971
972static GenOpFunc *gen_op_in_DX_T0[3] = {
973 gen_op_inb_DX_T0,
974 gen_op_inw_DX_T0,
975 gen_op_inl_DX_T0,
976};
977
978static GenOpFunc *gen_op_out_DX_T0[3] = {
979 gen_op_outb_DX_T0,
980 gen_op_outw_DX_T0,
981 gen_op_outl_DX_T0,
982};
983
984static GenOpFunc *gen_op_in[3] = {
985 gen_op_inb_T0_T1,
986 gen_op_inw_T0_T1,
987 gen_op_inl_T0_T1,
988};
989
990static GenOpFunc *gen_op_out[3] = {
991 gen_op_outb_T0_T1,
992 gen_op_outw_T0_T1,
993 gen_op_outl_T0_T1,
994};
995
996static GenOpFunc *gen_check_io_T0[3] = {
997 gen_op_check_iob_T0,
998 gen_op_check_iow_T0,
999 gen_op_check_iol_T0,
1000};
1001
1002static GenOpFunc *gen_check_io_DX[3] = {
1003 gen_op_check_iob_DX,
1004 gen_op_check_iow_DX,
1005 gen_op_check_iol_DX,
1006};
1007
1008static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
1009{
1010 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1011 if (s->cc_op != CC_OP_DYNAMIC)
1012 gen_op_set_cc_op(s->cc_op);
1013 gen_jmp_im(cur_eip);
1014 if (use_dx)
1015 gen_check_io_DX[ot]();
1016 else
1017 gen_check_io_T0[ot]();
1018 }
1019}
1020
1021static inline void gen_movs(DisasContext *s, int ot)
1022{
1023 gen_string_movl_A0_ESI(s);
1024 gen_op_ld_T0_A0[ot + s->mem_index]();
1025 gen_string_movl_A0_EDI(s);
1026 gen_op_st_T0_A0[ot + s->mem_index]();
1027 gen_op_movl_T0_Dshift[ot]();
1028#ifdef TARGET_X86_64
1029 if (s->aflag == 2) {
1030 gen_op_addq_ESI_T0();
1031 gen_op_addq_EDI_T0();
1032 } else
1033#endif
1034 if (s->aflag) {
1035 gen_op_addl_ESI_T0();
1036 gen_op_addl_EDI_T0();
1037 } else {
1038 gen_op_addw_ESI_T0();
1039 gen_op_addw_EDI_T0();
1040 }
1041}
1042
1043static inline void gen_update_cc_op(DisasContext *s)
1044{
1045 if (s->cc_op != CC_OP_DYNAMIC) {
1046 gen_op_set_cc_op(s->cc_op);
1047 s->cc_op = CC_OP_DYNAMIC;
1048 }
1049}
1050
1051/* XXX: does not work with gdbstub "ice" single step - not a
1052 serious problem */
1053static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1054{
1055 int l1, l2;
1056
1057 l1 = gen_new_label();
1058 l2 = gen_new_label();
1059 gen_op_jnz_ecx[s->aflag](l1);
1060 gen_set_label(l2);
1061 gen_jmp_tb(s, next_eip, 1);
1062 gen_set_label(l1);
1063 return l2;
1064}
1065
1066static inline void gen_stos(DisasContext *s, int ot)
1067{
1068 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1069 gen_string_movl_A0_EDI(s);
1070 gen_op_st_T0_A0[ot + s->mem_index]();
1071 gen_op_movl_T0_Dshift[ot]();
1072#ifdef TARGET_X86_64
1073 if (s->aflag == 2) {
1074 gen_op_addq_EDI_T0();
1075 } else
1076#endif
1077 if (s->aflag) {
1078 gen_op_addl_EDI_T0();
1079 } else {
1080 gen_op_addw_EDI_T0();
1081 }
1082}
1083
1084static inline void gen_lods(DisasContext *s, int ot)
1085{
1086 gen_string_movl_A0_ESI(s);
1087 gen_op_ld_T0_A0[ot + s->mem_index]();
1088 gen_op_mov_reg_T0[ot][R_EAX]();
1089 gen_op_movl_T0_Dshift[ot]();
1090#ifdef TARGET_X86_64
1091 if (s->aflag == 2) {
1092 gen_op_addq_ESI_T0();
1093 } else
1094#endif
1095 if (s->aflag) {
1096 gen_op_addl_ESI_T0();
1097 } else {
1098 gen_op_addw_ESI_T0();
1099 }
1100}
1101
1102static inline void gen_scas(DisasContext *s, int ot)
1103{
1104 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1105 gen_string_movl_A0_EDI(s);
1106 gen_op_ld_T1_A0[ot + s->mem_index]();
1107 gen_op_cmpl_T0_T1_cc();
1108 gen_op_movl_T0_Dshift[ot]();
1109#ifdef TARGET_X86_64
1110 if (s->aflag == 2) {
1111 gen_op_addq_EDI_T0();
1112 } else
1113#endif
1114 if (s->aflag) {
1115 gen_op_addl_EDI_T0();
1116 } else {
1117 gen_op_addw_EDI_T0();
1118 }
1119}
1120
1121static inline void gen_cmps(DisasContext *s, int ot)
1122{
1123 gen_string_movl_A0_ESI(s);
1124 gen_op_ld_T0_A0[ot + s->mem_index]();
1125 gen_string_movl_A0_EDI(s);
1126 gen_op_ld_T1_A0[ot + s->mem_index]();
1127 gen_op_cmpl_T0_T1_cc();
1128 gen_op_movl_T0_Dshift[ot]();
1129#ifdef TARGET_X86_64
1130 if (s->aflag == 2) {
1131 gen_op_addq_ESI_T0();
1132 gen_op_addq_EDI_T0();
1133 } else
1134#endif
1135 if (s->aflag) {
1136 gen_op_addl_ESI_T0();
1137 gen_op_addl_EDI_T0();
1138 } else {
1139 gen_op_addw_ESI_T0();
1140 gen_op_addw_EDI_T0();
1141 }
1142}
1143
1144static inline void gen_ins(DisasContext *s, int ot)
1145{
1146 gen_string_movl_A0_EDI(s);
1147 gen_op_movl_T0_0();
1148 gen_op_st_T0_A0[ot + s->mem_index]();
1149 gen_op_in_DX_T0[ot]();
1150 gen_op_st_T0_A0[ot + s->mem_index]();
1151 gen_op_movl_T0_Dshift[ot]();
1152#ifdef TARGET_X86_64
1153 if (s->aflag == 2) {
1154 gen_op_addq_EDI_T0();
1155 } else
1156#endif
1157 if (s->aflag) {
1158 gen_op_addl_EDI_T0();
1159 } else {
1160 gen_op_addw_EDI_T0();
1161 }
1162}
1163
1164static inline void gen_outs(DisasContext *s, int ot)
1165{
1166 gen_string_movl_A0_ESI(s);
1167 gen_op_ld_T0_A0[ot + s->mem_index]();
1168 gen_op_out_DX_T0[ot]();
1169 gen_op_movl_T0_Dshift[ot]();
1170#ifdef TARGET_X86_64
1171 if (s->aflag == 2) {
1172 gen_op_addq_ESI_T0();
1173 } else
1174#endif
1175 if (s->aflag) {
1176 gen_op_addl_ESI_T0();
1177 } else {
1178 gen_op_addw_ESI_T0();
1179 }
1180}
1181
1182/* same method as Valgrind : we generate jumps to current or next
1183 instruction */
1184#define GEN_REPZ(op) \
1185static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1186 target_ulong cur_eip, target_ulong next_eip) \
1187{ \
1188 int l2;\
1189 gen_update_cc_op(s); \
1190 l2 = gen_jz_ecx_string(s, next_eip); \
1191 gen_ ## op(s, ot); \
1192 gen_op_dec_ECX[s->aflag](); \
1193 /* a loop would cause two single step exceptions if ECX = 1 \
1194 before rep string_insn */ \
1195 if (!s->jmp_opt) \
1196 gen_op_jz_ecx[s->aflag](l2); \
1197 gen_jmp(s, cur_eip); \
1198}
1199
1200#define GEN_REPZ2(op) \
1201static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1202 target_ulong cur_eip, \
1203 target_ulong next_eip, \
1204 int nz) \
1205{ \
1206 int l2;\
1207 gen_update_cc_op(s); \
1208 l2 = gen_jz_ecx_string(s, next_eip); \
1209 gen_ ## op(s, ot); \
1210 gen_op_dec_ECX[s->aflag](); \
1211 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1212 gen_op_string_jnz_sub[nz][ot](l2);\
1213 if (!s->jmp_opt) \
1214 gen_op_jz_ecx[s->aflag](l2); \
1215 gen_jmp(s, cur_eip); \
1216}
1217
1218GEN_REPZ(movs)
1219GEN_REPZ(stos)
1220GEN_REPZ(lods)
1221GEN_REPZ(ins)
1222GEN_REPZ(outs)
1223GEN_REPZ2(scas)
1224GEN_REPZ2(cmps)
1225
1226enum {
1227 JCC_O,
1228 JCC_B,
1229 JCC_Z,
1230 JCC_BE,
1231 JCC_S,
1232 JCC_P,
1233 JCC_L,
1234 JCC_LE,
1235};
1236
1237static GenOpFunc1 *gen_jcc_sub[4][8] = {
1238 [OT_BYTE] = {
1239 NULL,
1240 gen_op_jb_subb,
1241 gen_op_jz_subb,
1242 gen_op_jbe_subb,
1243 gen_op_js_subb,
1244 NULL,
1245 gen_op_jl_subb,
1246 gen_op_jle_subb,
1247 },
1248 [OT_WORD] = {
1249 NULL,
1250 gen_op_jb_subw,
1251 gen_op_jz_subw,
1252 gen_op_jbe_subw,
1253 gen_op_js_subw,
1254 NULL,
1255 gen_op_jl_subw,
1256 gen_op_jle_subw,
1257 },
1258 [OT_LONG] = {
1259 NULL,
1260 gen_op_jb_subl,
1261 gen_op_jz_subl,
1262 gen_op_jbe_subl,
1263 gen_op_js_subl,
1264 NULL,
1265 gen_op_jl_subl,
1266 gen_op_jle_subl,
1267 },
1268#ifdef TARGET_X86_64
1269 [OT_QUAD] = {
1270 NULL,
1271 BUGGY_64(gen_op_jb_subq),
1272 gen_op_jz_subq,
1273 BUGGY_64(gen_op_jbe_subq),
1274 gen_op_js_subq,
1275 NULL,
1276 BUGGY_64(gen_op_jl_subq),
1277 BUGGY_64(gen_op_jle_subq),
1278 },
1279#endif
1280};
1281static GenOpFunc1 *gen_op_loop[3][4] = {
1282 [0] = {
1283 gen_op_loopnzw,
1284 gen_op_loopzw,
1285 gen_op_jnz_ecxw,
1286 },
1287 [1] = {
1288 gen_op_loopnzl,
1289 gen_op_loopzl,
1290 gen_op_jnz_ecxl,
1291 },
1292#ifdef TARGET_X86_64
1293 [2] = {
1294 gen_op_loopnzq,
1295 gen_op_loopzq,
1296 gen_op_jnz_ecxq,
1297 },
1298#endif
1299};
1300
1301static GenOpFunc *gen_setcc_slow[8] = {
1302 gen_op_seto_T0_cc,
1303 gen_op_setb_T0_cc,
1304 gen_op_setz_T0_cc,
1305 gen_op_setbe_T0_cc,
1306 gen_op_sets_T0_cc,
1307 gen_op_setp_T0_cc,
1308 gen_op_setl_T0_cc,
1309 gen_op_setle_T0_cc,
1310};
1311
1312static GenOpFunc *gen_setcc_sub[4][8] = {
1313 [OT_BYTE] = {
1314 NULL,
1315 gen_op_setb_T0_subb,
1316 gen_op_setz_T0_subb,
1317 gen_op_setbe_T0_subb,
1318 gen_op_sets_T0_subb,
1319 NULL,
1320 gen_op_setl_T0_subb,
1321 gen_op_setle_T0_subb,
1322 },
1323 [OT_WORD] = {
1324 NULL,
1325 gen_op_setb_T0_subw,
1326 gen_op_setz_T0_subw,
1327 gen_op_setbe_T0_subw,
1328 gen_op_sets_T0_subw,
1329 NULL,
1330 gen_op_setl_T0_subw,
1331 gen_op_setle_T0_subw,
1332 },
1333 [OT_LONG] = {
1334 NULL,
1335 gen_op_setb_T0_subl,
1336 gen_op_setz_T0_subl,
1337 gen_op_setbe_T0_subl,
1338 gen_op_sets_T0_subl,
1339 NULL,
1340 gen_op_setl_T0_subl,
1341 gen_op_setle_T0_subl,
1342 },
1343#ifdef TARGET_X86_64
1344 [OT_QUAD] = {
1345 NULL,
1346 gen_op_setb_T0_subq,
1347 gen_op_setz_T0_subq,
1348 gen_op_setbe_T0_subq,
1349 gen_op_sets_T0_subq,
1350 NULL,
1351 gen_op_setl_T0_subq,
1352 gen_op_setle_T0_subq,
1353 },
1354#endif
1355};
1356
1357static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1358 gen_op_fadd_ST0_FT0,
1359 gen_op_fmul_ST0_FT0,
1360 gen_op_fcom_ST0_FT0,
1361 gen_op_fcom_ST0_FT0,
1362 gen_op_fsub_ST0_FT0,
1363 gen_op_fsubr_ST0_FT0,
1364 gen_op_fdiv_ST0_FT0,
1365 gen_op_fdivr_ST0_FT0,
1366};
1367
1368/* NOTE the exception in "r" op ordering */
1369static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1370 gen_op_fadd_STN_ST0,
1371 gen_op_fmul_STN_ST0,
1372 NULL,
1373 NULL,
1374 gen_op_fsubr_STN_ST0,
1375 gen_op_fsub_STN_ST0,
1376 gen_op_fdivr_STN_ST0,
1377 gen_op_fdiv_STN_ST0,
1378};
1379
1380/* if d == OR_TMP0, it means memory operand (address in A0) */
1381static void gen_op(DisasContext *s1, int op, int ot, int d)
1382{
1383 GenOpFunc *gen_update_cc;
1384
1385 if (d != OR_TMP0) {
1386 gen_op_mov_TN_reg[ot][0][d]();
1387 } else {
1388 gen_op_ld_T0_A0[ot + s1->mem_index]();
1389 }
1390 switch(op) {
1391 case OP_ADCL:
1392 case OP_SBBL:
1393 if (s1->cc_op != CC_OP_DYNAMIC)
1394 gen_op_set_cc_op(s1->cc_op);
1395 if (d != OR_TMP0) {
1396 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1397 gen_op_mov_reg_T0[ot][d]();
1398 } else {
1399 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1400 }
1401 s1->cc_op = CC_OP_DYNAMIC;
1402 goto the_end;
1403 case OP_ADDL:
1404 gen_op_addl_T0_T1();
1405 s1->cc_op = CC_OP_ADDB + ot;
1406 gen_update_cc = gen_op_update2_cc;
1407 break;
1408 case OP_SUBL:
1409 gen_op_subl_T0_T1();
1410 s1->cc_op = CC_OP_SUBB + ot;
1411 gen_update_cc = gen_op_update2_cc;
1412 break;
1413 default:
1414 case OP_ANDL:
1415 case OP_ORL:
1416 case OP_XORL:
1417 gen_op_arith_T0_T1_cc[op]();
1418 s1->cc_op = CC_OP_LOGICB + ot;
1419 gen_update_cc = gen_op_update1_cc;
1420 break;
1421 case OP_CMPL:
1422 gen_op_cmpl_T0_T1_cc();
1423 s1->cc_op = CC_OP_SUBB + ot;
1424 gen_update_cc = NULL;
1425 break;
1426 }
1427 if (op != OP_CMPL) {
1428 if (d != OR_TMP0)
1429 gen_op_mov_reg_T0[ot][d]();
1430 else
1431 gen_op_st_T0_A0[ot + s1->mem_index]();
1432 }
1433 /* the flags update must happen after the memory write (precise
1434 exception support) */
1435 if (gen_update_cc)
1436 gen_update_cc();
1437 the_end: ;
1438}
1439
1440/* if d == OR_TMP0, it means memory operand (address in A0) */
1441static void gen_inc(DisasContext *s1, int ot, int d, int c)
1442{
1443 if (d != OR_TMP0)
1444 gen_op_mov_TN_reg[ot][0][d]();
1445 else
1446 gen_op_ld_T0_A0[ot + s1->mem_index]();
1447 if (s1->cc_op != CC_OP_DYNAMIC)
1448 gen_op_set_cc_op(s1->cc_op);
1449 if (c > 0) {
1450 gen_op_incl_T0();
1451 s1->cc_op = CC_OP_INCB + ot;
1452 } else {
1453 gen_op_decl_T0();
1454 s1->cc_op = CC_OP_DECB + ot;
1455 }
1456 if (d != OR_TMP0)
1457 gen_op_mov_reg_T0[ot][d]();
1458 else
1459 gen_op_st_T0_A0[ot + s1->mem_index]();
1460 gen_op_update_inc_cc();
1461}
1462
1463static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1464{
1465 if (d != OR_TMP0)
1466 gen_op_mov_TN_reg[ot][0][d]();
1467 else
1468 gen_op_ld_T0_A0[ot + s1->mem_index]();
1469 if (s != OR_TMP1)
1470 gen_op_mov_TN_reg[ot][1][s]();
1471 /* for zero counts, flags are not updated, so must do it dynamically */
1472 if (s1->cc_op != CC_OP_DYNAMIC)
1473 gen_op_set_cc_op(s1->cc_op);
1474
1475 if (d != OR_TMP0)
1476 gen_op_shift_T0_T1_cc[ot][op]();
1477 else
1478 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1479 if (d != OR_TMP0)
1480 gen_op_mov_reg_T0[ot][d]();
1481 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1482}
1483
1484static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1485{
1486 /* currently not optimized */
1487 gen_op_movl_T1_im(c);
1488 gen_shift(s1, op, ot, d, OR_TMP1);
1489}
1490
1491static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1492{
1493 target_long disp;
1494 int havesib;
1495 int base;
1496 int index;
1497 int scale;
1498 int opreg;
1499 int mod, rm, code, override, must_add_seg;
1500
1501 override = s->override;
1502 must_add_seg = s->addseg;
1503 if (override >= 0)
1504 must_add_seg = 1;
1505 mod = (modrm >> 6) & 3;
1506 rm = modrm & 7;
1507
1508 if (s->aflag) {
1509
1510 havesib = 0;
1511 base = rm;
1512 index = 0;
1513 scale = 0;
1514
1515 if (base == 4) {
1516 havesib = 1;
1517 code = ldub_code(s->pc++);
1518 scale = (code >> 6) & 3;
1519 index = ((code >> 3) & 7) | REX_X(s);
1520 base = (code & 7);
1521 }
1522 base |= REX_B(s);
1523
1524 switch (mod) {
1525 case 0:
1526 if ((base & 7) == 5) {
1527 base = -1;
1528 disp = (int32_t)ldl_code(s->pc);
1529 s->pc += 4;
1530 if (CODE64(s) && !havesib) {
1531 disp += s->pc + s->rip_offset;
1532 }
1533 } else {
1534 disp = 0;
1535 }
1536 break;
1537 case 1:
1538 disp = (int8_t)ldub_code(s->pc++);
1539 break;
1540 default:
1541 case 2:
1542 disp = ldl_code(s->pc);
1543 s->pc += 4;
1544 break;
1545 }
1546
1547 if (base >= 0) {
1548 /* for correct popl handling with esp */
1549 if (base == 4 && s->popl_esp_hack)
1550 disp += s->popl_esp_hack;
1551#ifdef TARGET_X86_64
1552 if (s->aflag == 2) {
1553 gen_op_movq_A0_reg[base]();
1554 if (disp != 0) {
1555 if ((int32_t)disp == disp)
1556 gen_op_addq_A0_im(disp);
1557 else
1558 gen_op_addq_A0_im64(disp >> 32, disp);
1559 }
1560 } else
1561#endif
1562 {
1563 gen_op_movl_A0_reg[base]();
1564 if (disp != 0)
1565 gen_op_addl_A0_im(disp);
1566 }
1567 } else {
1568#ifdef TARGET_X86_64
1569 if (s->aflag == 2) {
1570 if ((int32_t)disp == disp)
1571 gen_op_movq_A0_im(disp);
1572 else
1573 gen_op_movq_A0_im64(disp >> 32, disp);
1574 } else
1575#endif
1576 {
1577 gen_op_movl_A0_im(disp);
1578 }
1579 }
1580 /* XXX: index == 4 is always invalid */
1581 if (havesib && (index != 4 || scale != 0)) {
1582#ifdef TARGET_X86_64
1583 if (s->aflag == 2) {
1584 gen_op_addq_A0_reg_sN[scale][index]();
1585 } else
1586#endif
1587 {
1588 gen_op_addl_A0_reg_sN[scale][index]();
1589 }
1590 }
1591 if (must_add_seg) {
1592 if (override < 0) {
1593 if (base == R_EBP || base == R_ESP)
1594 override = R_SS;
1595 else
1596 override = R_DS;
1597 }
1598#ifdef TARGET_X86_64
1599 if (s->aflag == 2) {
1600 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1601 } else
1602#endif
1603 {
1604 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1605 }
1606 }
1607 } else {
1608 switch (mod) {
1609 case 0:
1610 if (rm == 6) {
1611 disp = lduw_code(s->pc);
1612 s->pc += 2;
1613 gen_op_movl_A0_im(disp);
1614 rm = 0; /* avoid SS override */
1615 goto no_rm;
1616 } else {
1617 disp = 0;
1618 }
1619 break;
1620 case 1:
1621 disp = (int8_t)ldub_code(s->pc++);
1622 break;
1623 default:
1624 case 2:
1625 disp = lduw_code(s->pc);
1626 s->pc += 2;
1627 break;
1628 }
1629 switch(rm) {
1630 case 0:
1631 gen_op_movl_A0_reg[R_EBX]();
1632 gen_op_addl_A0_reg_sN[0][R_ESI]();
1633 break;
1634 case 1:
1635 gen_op_movl_A0_reg[R_EBX]();
1636 gen_op_addl_A0_reg_sN[0][R_EDI]();
1637 break;
1638 case 2:
1639 gen_op_movl_A0_reg[R_EBP]();
1640 gen_op_addl_A0_reg_sN[0][R_ESI]();
1641 break;
1642 case 3:
1643 gen_op_movl_A0_reg[R_EBP]();
1644 gen_op_addl_A0_reg_sN[0][R_EDI]();
1645 break;
1646 case 4:
1647 gen_op_movl_A0_reg[R_ESI]();
1648 break;
1649 case 5:
1650 gen_op_movl_A0_reg[R_EDI]();
1651 break;
1652 case 6:
1653 gen_op_movl_A0_reg[R_EBP]();
1654 break;
1655 default:
1656 case 7:
1657 gen_op_movl_A0_reg[R_EBX]();
1658 break;
1659 }
1660 if (disp != 0)
1661 gen_op_addl_A0_im(disp);
1662 gen_op_andl_A0_ffff();
1663 no_rm:
1664 if (must_add_seg) {
1665 if (override < 0) {
1666 if (rm == 2 || rm == 3 || rm == 6)
1667 override = R_SS;
1668 else
1669 override = R_DS;
1670 }
1671 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1672 }
1673 }
1674
1675 opreg = OR_A0;
1676 disp = 0;
1677 *reg_ptr = opreg;
1678 *offset_ptr = disp;
1679}
1680
1681static void gen_nop_modrm(DisasContext *s, int modrm)
1682{
1683 int mod, rm, base, code;
1684
1685 mod = (modrm >> 6) & 3;
1686 if (mod == 3)
1687 return;
1688 rm = modrm & 7;
1689
1690 if (s->aflag) {
1691
1692 base = rm;
1693
1694 if (base == 4) {
1695 code = ldub_code(s->pc++);
1696 base = (code & 7);
1697 }
1698
1699 switch (mod) {
1700 case 0:
1701 if (base == 5) {
1702 s->pc += 4;
1703 }
1704 break;
1705 case 1:
1706 s->pc++;
1707 break;
1708 default:
1709 case 2:
1710 s->pc += 4;
1711 break;
1712 }
1713 } else {
1714 switch (mod) {
1715 case 0:
1716 if (rm == 6) {
1717 s->pc += 2;
1718 }
1719 break;
1720 case 1:
1721 s->pc++;
1722 break;
1723 default:
1724 case 2:
1725 s->pc += 2;
1726 break;
1727 }
1728 }
1729}
1730
1731/* used for LEA and MOV AX, mem */
1732static void gen_add_A0_ds_seg(DisasContext *s)
1733{
1734 int override, must_add_seg;
1735 must_add_seg = s->addseg;
1736 override = R_DS;
1737 if (s->override >= 0) {
1738 override = s->override;
1739 must_add_seg = 1;
1740 } else {
1741 override = R_DS;
1742 }
1743 if (must_add_seg) {
1744#ifdef TARGET_X86_64
1745 if (CODE64(s)) {
1746 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1747 } else
1748#endif
1749 {
1750 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1751 }
1752 }
1753}
1754
1755/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1756 OR_TMP0 */
1757static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1758{
1759 int mod, rm, opreg, disp;
1760
1761 mod = (modrm >> 6) & 3;
1762 rm = (modrm & 7) | REX_B(s);
1763 if (mod == 3) {
1764 if (is_store) {
1765 if (reg != OR_TMP0)
1766 gen_op_mov_TN_reg[ot][0][reg]();
1767 gen_op_mov_reg_T0[ot][rm]();
1768 } else {
1769 gen_op_mov_TN_reg[ot][0][rm]();
1770 if (reg != OR_TMP0)
1771 gen_op_mov_reg_T0[ot][reg]();
1772 }
1773 } else {
1774 gen_lea_modrm(s, modrm, &opreg, &disp);
1775 if (is_store) {
1776 if (reg != OR_TMP0)
1777 gen_op_mov_TN_reg[ot][0][reg]();
1778 gen_op_st_T0_A0[ot + s->mem_index]();
1779 } else {
1780 gen_op_ld_T0_A0[ot + s->mem_index]();
1781 if (reg != OR_TMP0)
1782 gen_op_mov_reg_T0[ot][reg]();
1783 }
1784 }
1785}
1786
1787static inline uint32_t insn_get(DisasContext *s, int ot)
1788{
1789 uint32_t ret;
1790
1791 switch(ot) {
1792 case OT_BYTE:
1793 ret = ldub_code(s->pc);
1794 s->pc++;
1795 break;
1796 case OT_WORD:
1797 ret = lduw_code(s->pc);
1798 s->pc += 2;
1799 break;
1800 default:
1801 case OT_LONG:
1802 ret = ldl_code(s->pc);
1803 s->pc += 4;
1804 break;
1805 }
1806 return ret;
1807}
1808
1809static inline int insn_const_size(unsigned int ot)
1810{
1811 if (ot <= OT_LONG)
1812 return 1 << ot;
1813 else
1814 return 4;
1815}
1816
1817static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1818{
1819 TranslationBlock *tb;
1820 target_ulong pc;
1821
1822 pc = s->cs_base + eip;
1823 tb = s->tb;
1824 /* NOTE: we handle the case where the TB spans two pages here */
1825 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1826 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1827 /* jump to same page: we can use a direct jump */
1828 if (tb_num == 0)
1829 gen_op_goto_tb0(TBPARAM(tb));
1830 else
1831 gen_op_goto_tb1(TBPARAM(tb));
1832 gen_jmp_im(eip);
1833 gen_op_movl_T0_im((long)tb + tb_num);
1834 gen_op_exit_tb();
1835 } else {
1836 /* jump to another page: currently not optimized */
1837 gen_jmp_im(eip);
1838 gen_eob(s);
1839 }
1840}
1841
1842static inline void gen_jcc(DisasContext *s, int b,
1843 target_ulong val, target_ulong next_eip)
1844{
1845 TranslationBlock *tb;
1846 int inv, jcc_op;
1847 GenOpFunc1 *func;
1848 target_ulong tmp;
1849 int l1, l2;
1850
1851 inv = b & 1;
1852 jcc_op = (b >> 1) & 7;
1853
1854 if (s->jmp_opt) {
1855#ifdef VBOX
1856 gen_check_external_event(s);
1857#endif /* VBOX */
1858 switch(s->cc_op) {
1859 /* we optimize the cmp/jcc case */
1860 case CC_OP_SUBB:
1861 case CC_OP_SUBW:
1862 case CC_OP_SUBL:
1863 case CC_OP_SUBQ:
1864 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1865 break;
1866
1867 /* some jumps are easy to compute */
1868 case CC_OP_ADDB:
1869 case CC_OP_ADDW:
1870 case CC_OP_ADDL:
1871 case CC_OP_ADDQ:
1872
1873 case CC_OP_ADCB:
1874 case CC_OP_ADCW:
1875 case CC_OP_ADCL:
1876 case CC_OP_ADCQ:
1877
1878 case CC_OP_SBBB:
1879 case CC_OP_SBBW:
1880 case CC_OP_SBBL:
1881 case CC_OP_SBBQ:
1882
1883 case CC_OP_LOGICB:
1884 case CC_OP_LOGICW:
1885 case CC_OP_LOGICL:
1886 case CC_OP_LOGICQ:
1887
1888 case CC_OP_INCB:
1889 case CC_OP_INCW:
1890 case CC_OP_INCL:
1891 case CC_OP_INCQ:
1892
1893 case CC_OP_DECB:
1894 case CC_OP_DECW:
1895 case CC_OP_DECL:
1896 case CC_OP_DECQ:
1897
1898 case CC_OP_SHLB:
1899 case CC_OP_SHLW:
1900 case CC_OP_SHLL:
1901 case CC_OP_SHLQ:
1902
1903 case CC_OP_SARB:
1904 case CC_OP_SARW:
1905 case CC_OP_SARL:
1906 case CC_OP_SARQ:
1907 switch(jcc_op) {
1908 case JCC_Z:
1909 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1910 break;
1911 case JCC_S:
1912 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1913 break;
1914 default:
1915 func = NULL;
1916 break;
1917 }
1918 break;
1919 default:
1920 func = NULL;
1921 break;
1922 }
1923
1924 if (s->cc_op != CC_OP_DYNAMIC) {
1925 gen_op_set_cc_op(s->cc_op);
1926 s->cc_op = CC_OP_DYNAMIC;
1927 }
1928
1929 if (!func) {
1930 gen_setcc_slow[jcc_op]();
1931 func = gen_op_jnz_T0_label;
1932 }
1933
1934 if (inv) {
1935 tmp = val;
1936 val = next_eip;
1937 next_eip = tmp;
1938 }
1939 tb = s->tb;
1940
1941 l1 = gen_new_label();
1942 func(l1);
1943
1944 gen_goto_tb(s, 0, next_eip);
1945
1946 gen_set_label(l1);
1947 gen_goto_tb(s, 1, val);
1948
1949 s->is_jmp = 3;
1950 } else {
1951
1952 if (s->cc_op != CC_OP_DYNAMIC) {
1953 gen_op_set_cc_op(s->cc_op);
1954 s->cc_op = CC_OP_DYNAMIC;
1955 }
1956 gen_setcc_slow[jcc_op]();
1957 if (inv) {
1958 tmp = val;
1959 val = next_eip;
1960 next_eip = tmp;
1961 }
1962 l1 = gen_new_label();
1963 l2 = gen_new_label();
1964 gen_op_jnz_T0_label(l1);
1965 gen_jmp_im(next_eip);
1966 gen_op_jmp_label(l2);
1967 gen_set_label(l1);
1968 gen_jmp_im(val);
1969 gen_set_label(l2);
1970 gen_eob(s);
1971 }
1972}
1973
1974static void gen_setcc(DisasContext *s, int b)
1975{
1976 int inv, jcc_op;
1977 GenOpFunc *func;
1978
1979 inv = b & 1;
1980 jcc_op = (b >> 1) & 7;
1981 switch(s->cc_op) {
1982 /* we optimize the cmp/jcc case */
1983 case CC_OP_SUBB:
1984 case CC_OP_SUBW:
1985 case CC_OP_SUBL:
1986 case CC_OP_SUBQ:
1987 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1988 if (!func)
1989 goto slow_jcc;
1990 break;
1991
1992 /* some jumps are easy to compute */
1993 case CC_OP_ADDB:
1994 case CC_OP_ADDW:
1995 case CC_OP_ADDL:
1996 case CC_OP_ADDQ:
1997
1998 case CC_OP_LOGICB:
1999 case CC_OP_LOGICW:
2000 case CC_OP_LOGICL:
2001 case CC_OP_LOGICQ:
2002
2003 case CC_OP_INCB:
2004 case CC_OP_INCW:
2005 case CC_OP_INCL:
2006 case CC_OP_INCQ:
2007
2008 case CC_OP_DECB:
2009 case CC_OP_DECW:
2010 case CC_OP_DECL:
2011 case CC_OP_DECQ:
2012
2013 case CC_OP_SHLB:
2014 case CC_OP_SHLW:
2015 case CC_OP_SHLL:
2016 case CC_OP_SHLQ:
2017 switch(jcc_op) {
2018 case JCC_Z:
2019 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2020 break;
2021 case JCC_S:
2022 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2023 break;
2024 default:
2025 goto slow_jcc;
2026 }
2027 break;
2028 default:
2029 slow_jcc:
2030 if (s->cc_op != CC_OP_DYNAMIC)
2031 gen_op_set_cc_op(s->cc_op);
2032 func = gen_setcc_slow[jcc_op];
2033 break;
2034 }
2035 func();
2036 if (inv) {
2037 gen_op_xor_T0_1();
2038 }
2039}
2040
2041/* move T0 to seg_reg and compute if the CPU state may change. Never
2042 call this function with seg_reg == R_CS */
2043static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2044{
2045 if (s->pe && !s->vm86) {
2046 /* XXX: optimize by finding processor state dynamically */
2047 if (s->cc_op != CC_OP_DYNAMIC)
2048 gen_op_set_cc_op(s->cc_op);
2049 gen_jmp_im(cur_eip);
2050 gen_op_movl_seg_T0(seg_reg);
2051 /* abort translation because the addseg value may change or
2052 because ss32 may change. For R_SS, translation must always
2053 stop as a special handling must be done to disable hardware
2054 interrupts for the next instruction */
2055 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2056 s->is_jmp = 3;
2057 } else {
2058 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2059 if (seg_reg == R_SS)
2060 s->is_jmp = 3;
2061 }
2062}
2063
2064static inline void gen_stack_update(DisasContext *s, int addend)
2065{
2066#ifdef TARGET_X86_64
2067 if (CODE64(s)) {
2068 if (addend == 8)
2069 gen_op_addq_ESP_8();
2070 else
2071 gen_op_addq_ESP_im(addend);
2072 } else
2073#endif
2074 if (s->ss32) {
2075 if (addend == 2)
2076 gen_op_addl_ESP_2();
2077 else if (addend == 4)
2078 gen_op_addl_ESP_4();
2079 else
2080 gen_op_addl_ESP_im(addend);
2081 } else {
2082 if (addend == 2)
2083 gen_op_addw_ESP_2();
2084 else if (addend == 4)
2085 gen_op_addw_ESP_4();
2086 else
2087 gen_op_addw_ESP_im(addend);
2088 }
2089}
2090
2091/* generate a push. It depends on ss32, addseg and dflag */
2092static void gen_push_T0(DisasContext *s)
2093{
2094#ifdef TARGET_X86_64
2095 if (CODE64(s)) {
2096 gen_op_movq_A0_reg[R_ESP]();
2097 if (s->dflag) {
2098 gen_op_subq_A0_8();
2099 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2100 } else {
2101 gen_op_subq_A0_2();
2102 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2103 }
2104 gen_op_movq_ESP_A0();
2105 } else
2106#endif
2107 {
2108 gen_op_movl_A0_reg[R_ESP]();
2109 if (!s->dflag)
2110 gen_op_subl_A0_2();
2111 else
2112 gen_op_subl_A0_4();
2113 if (s->ss32) {
2114 if (s->addseg) {
2115 gen_op_movl_T1_A0();
2116 gen_op_addl_A0_SS();
2117 }
2118 } else {
2119 gen_op_andl_A0_ffff();
2120 gen_op_movl_T1_A0();
2121 gen_op_addl_A0_SS();
2122 }
2123 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2124 if (s->ss32 && !s->addseg)
2125 gen_op_movl_ESP_A0();
2126 else
2127 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2128 }
2129}
2130
2131/* generate a push. It depends on ss32, addseg and dflag */
2132/* slower version for T1, only used for call Ev */
2133static void gen_push_T1(DisasContext *s)
2134{
2135#ifdef TARGET_X86_64
2136 if (CODE64(s)) {
2137 gen_op_movq_A0_reg[R_ESP]();
2138 if (s->dflag) {
2139 gen_op_subq_A0_8();
2140 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2141 } else {
2142 gen_op_subq_A0_2();
2143 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2144 }
2145 gen_op_movq_ESP_A0();
2146 } else
2147#endif
2148 {
2149 gen_op_movl_A0_reg[R_ESP]();
2150 if (!s->dflag)
2151 gen_op_subl_A0_2();
2152 else
2153 gen_op_subl_A0_4();
2154 if (s->ss32) {
2155 if (s->addseg) {
2156 gen_op_addl_A0_SS();
2157 }
2158 } else {
2159 gen_op_andl_A0_ffff();
2160 gen_op_addl_A0_SS();
2161 }
2162 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2163
2164 if (s->ss32 && !s->addseg)
2165 gen_op_movl_ESP_A0();
2166 else
2167 gen_stack_update(s, (-2) << s->dflag);
2168 }
2169}
2170
2171/* two step pop is necessary for precise exceptions */
2172static void gen_pop_T0(DisasContext *s)
2173{
2174#ifdef TARGET_X86_64
2175 if (CODE64(s)) {
2176 gen_op_movq_A0_reg[R_ESP]();
2177 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2178 } else
2179#endif
2180 {
2181 gen_op_movl_A0_reg[R_ESP]();
2182 if (s->ss32) {
2183 if (s->addseg)
2184 gen_op_addl_A0_SS();
2185 } else {
2186 gen_op_andl_A0_ffff();
2187 gen_op_addl_A0_SS();
2188 }
2189 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2190 }
2191}
2192
2193static void gen_pop_update(DisasContext *s)
2194{
2195#ifdef TARGET_X86_64
2196 if (CODE64(s) && s->dflag) {
2197 gen_stack_update(s, 8);
2198 } else
2199#endif
2200 {
2201 gen_stack_update(s, 2 << s->dflag);
2202 }
2203}
2204
2205static void gen_stack_A0(DisasContext *s)
2206{
2207 gen_op_movl_A0_ESP();
2208 if (!s->ss32)
2209 gen_op_andl_A0_ffff();
2210 gen_op_movl_T1_A0();
2211 if (s->addseg)
2212 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2213}
2214
2215/* NOTE: wrap around in 16 bit not fully handled */
2216static void gen_pusha(DisasContext *s)
2217{
2218 int i;
2219 gen_op_movl_A0_ESP();
2220 gen_op_addl_A0_im(-16 << s->dflag);
2221 if (!s->ss32)
2222 gen_op_andl_A0_ffff();
2223 gen_op_movl_T1_A0();
2224 if (s->addseg)
2225 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2226 for(i = 0;i < 8; i++) {
2227 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2228 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2229 gen_op_addl_A0_im(2 << s->dflag);
2230 }
2231 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2232}
2233
2234/* NOTE: wrap around in 16 bit not fully handled */
2235static void gen_popa(DisasContext *s)
2236{
2237 int i;
2238 gen_op_movl_A0_ESP();
2239 if (!s->ss32)
2240 gen_op_andl_A0_ffff();
2241 gen_op_movl_T1_A0();
2242 gen_op_addl_T1_im(16 << s->dflag);
2243 if (s->addseg)
2244 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2245 for(i = 0;i < 8; i++) {
2246 /* ESP is not reloaded */
2247 if (i != 3) {
2248 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2249 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2250 }
2251 gen_op_addl_A0_im(2 << s->dflag);
2252 }
2253 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2254}
2255
2256static void gen_enter(DisasContext *s, int esp_addend, int level)
2257{
2258 int ot, opsize;
2259
2260 level &= 0x1f;
2261#ifdef TARGET_X86_64
2262 if (CODE64(s)) {
2263 ot = s->dflag ? OT_QUAD : OT_WORD;
2264 opsize = 1 << ot;
2265
2266 gen_op_movl_A0_ESP();
2267 gen_op_addq_A0_im(-opsize);
2268 gen_op_movl_T1_A0();
2269
2270 /* push bp */
2271 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2272 gen_op_st_T0_A0[ot + s->mem_index]();
2273 if (level) {
2274 gen_op_enter64_level(level, (ot == OT_QUAD));
2275 }
2276 gen_op_mov_reg_T1[ot][R_EBP]();
2277 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2278 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2279 } else
2280#endif
2281 {
2282 ot = s->dflag + OT_WORD;
2283 opsize = 2 << s->dflag;
2284
2285 gen_op_movl_A0_ESP();
2286 gen_op_addl_A0_im(-opsize);
2287 if (!s->ss32)
2288 gen_op_andl_A0_ffff();
2289 gen_op_movl_T1_A0();
2290 if (s->addseg)
2291 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2292 /* push bp */
2293 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2294 gen_op_st_T0_A0[ot + s->mem_index]();
2295 if (level) {
2296 gen_op_enter_level(level, s->dflag);
2297 }
2298 gen_op_mov_reg_T1[ot][R_EBP]();
2299 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2300 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2301 }
2302}
2303
2304static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2305{
2306 if (s->cc_op != CC_OP_DYNAMIC)
2307 gen_op_set_cc_op(s->cc_op);
2308 gen_jmp_im(cur_eip);
2309 gen_op_raise_exception(trapno);
2310 s->is_jmp = 3;
2311}
2312
2313/* an interrupt is different from an exception because of the
2314 priviledge checks */
2315static void gen_interrupt(DisasContext *s, int intno,
2316 target_ulong cur_eip, target_ulong next_eip)
2317{
2318 if (s->cc_op != CC_OP_DYNAMIC)
2319 gen_op_set_cc_op(s->cc_op);
2320 gen_jmp_im(cur_eip);
2321 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2322 s->is_jmp = 3;
2323}
2324
2325static void gen_debug(DisasContext *s, target_ulong cur_eip)
2326{
2327 if (s->cc_op != CC_OP_DYNAMIC)
2328 gen_op_set_cc_op(s->cc_op);
2329 gen_jmp_im(cur_eip);
2330 gen_op_debug();
2331 s->is_jmp = 3;
2332}
2333
2334/* generate a generic end of block. Trace exception is also generated
2335 if needed */
2336static void gen_eob(DisasContext *s)
2337{
2338 if (s->cc_op != CC_OP_DYNAMIC)
2339 gen_op_set_cc_op(s->cc_op);
2340 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2341 gen_op_reset_inhibit_irq();
2342 }
2343 if (s->singlestep_enabled) {
2344 gen_op_debug();
2345 } else if (s->tf) {
2346 gen_op_single_step();
2347 } else {
2348 gen_op_movl_T0_0();
2349 gen_op_exit_tb();
2350 }
2351 s->is_jmp = 3;
2352}
2353
2354/* generate a jump to eip. No segment change must happen before as a
2355 direct call to the next block may occur */
2356static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2357{
2358 if (s->jmp_opt) {
2359#ifdef VBOX
2360 gen_check_external_event(s);
2361#endif /* VBOX */
2362 if (s->cc_op != CC_OP_DYNAMIC) {
2363 gen_op_set_cc_op(s->cc_op);
2364 s->cc_op = CC_OP_DYNAMIC;
2365 }
2366 gen_goto_tb(s, tb_num, eip);
2367 s->is_jmp = 3;
2368 } else {
2369 gen_jmp_im(eip);
2370 gen_eob(s);
2371 }
2372}
2373
2374static void gen_jmp(DisasContext *s, target_ulong eip)
2375{
2376 gen_jmp_tb(s, eip, 0);
2377}
2378
2379static void gen_movtl_T0_im(target_ulong val)
2380{
2381#ifdef TARGET_X86_64
2382 if ((int32_t)val == val) {
2383 gen_op_movl_T0_im(val);
2384 } else {
2385 gen_op_movq_T0_im64(val >> 32, val);
2386 }
2387#else
2388 gen_op_movl_T0_im(val);
2389#endif
2390}
2391
2392static void gen_movtl_T1_im(target_ulong val)
2393{
2394#ifdef TARGET_X86_64
2395 if ((int32_t)val == val) {
2396 gen_op_movl_T1_im(val);
2397 } else {
2398 gen_op_movq_T1_im64(val >> 32, val);
2399 }
2400#else
2401 gen_op_movl_T1_im(val);
2402#endif
2403}
2404
2405static void gen_add_A0_im(DisasContext *s, int val)
2406{
2407#ifdef TARGET_X86_64
2408 if (CODE64(s))
2409 gen_op_addq_A0_im(val);
2410 else
2411#endif
2412 gen_op_addl_A0_im(val);
2413}
2414
2415static GenOpFunc1 *gen_ldq_env_A0[3] = {
2416 gen_op_ldq_raw_env_A0,
2417#ifndef CONFIG_USER_ONLY
2418 gen_op_ldq_kernel_env_A0,
2419 gen_op_ldq_user_env_A0,
2420#endif
2421};
2422
2423static GenOpFunc1 *gen_stq_env_A0[3] = {
2424 gen_op_stq_raw_env_A0,
2425#ifndef CONFIG_USER_ONLY
2426 gen_op_stq_kernel_env_A0,
2427 gen_op_stq_user_env_A0,
2428#endif
2429};
2430
2431static GenOpFunc1 *gen_ldo_env_A0[3] = {
2432 gen_op_ldo_raw_env_A0,
2433#ifndef CONFIG_USER_ONLY
2434 gen_op_ldo_kernel_env_A0,
2435 gen_op_ldo_user_env_A0,
2436#endif
2437};
2438
2439static GenOpFunc1 *gen_sto_env_A0[3] = {
2440 gen_op_sto_raw_env_A0,
2441#ifndef CONFIG_USER_ONLY
2442 gen_op_sto_kernel_env_A0,
2443 gen_op_sto_user_env_A0,
2444#endif
2445};
2446
2447#define SSE_SPECIAL ((GenOpFunc2 *)1)
2448
2449#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2450#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2451 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2452
2453static GenOpFunc2 *sse_op_table1[256][4] = {
2454 /* pure SSE operations */
2455 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2456 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2457 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2458 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2459 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2460 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2461 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2462 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2463
2464 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2465 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2466 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2467 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2468 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2469 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2470 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2471 [0x2f] = { gen_op_comiss, gen_op_comisd },
2472 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2473 [0x51] = SSE_FOP(sqrt),
2474 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2475 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2476 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2477 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2478 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2479 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2480 [0x58] = SSE_FOP(add),
2481 [0x59] = SSE_FOP(mul),
2482 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2483 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2484 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2485 [0x5c] = SSE_FOP(sub),
2486 [0x5d] = SSE_FOP(min),
2487 [0x5e] = SSE_FOP(div),
2488 [0x5f] = SSE_FOP(max),
2489
2490 [0xc2] = SSE_FOP(cmpeq),
2491 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2492
2493 /* MMX ops and their SSE extensions */
2494 [0x60] = MMX_OP2(punpcklbw),
2495 [0x61] = MMX_OP2(punpcklwd),
2496 [0x62] = MMX_OP2(punpckldq),
2497 [0x63] = MMX_OP2(packsswb),
2498 [0x64] = MMX_OP2(pcmpgtb),
2499 [0x65] = MMX_OP2(pcmpgtw),
2500 [0x66] = MMX_OP2(pcmpgtl),
2501 [0x67] = MMX_OP2(packuswb),
2502 [0x68] = MMX_OP2(punpckhbw),
2503 [0x69] = MMX_OP2(punpckhwd),
2504 [0x6a] = MMX_OP2(punpckhdq),
2505 [0x6b] = MMX_OP2(packssdw),
2506 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2507 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2508 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2509 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2510 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2511 (GenOpFunc2 *)gen_op_pshufd_xmm,
2512 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2513 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2514 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2515 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2516 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2517 [0x74] = MMX_OP2(pcmpeqb),
2518 [0x75] = MMX_OP2(pcmpeqw),
2519 [0x76] = MMX_OP2(pcmpeql),
2520 [0x77] = { SSE_SPECIAL }, /* emms */
2521 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2522 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2523 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2524 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2525 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2526 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2527 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2528 [0xd1] = MMX_OP2(psrlw),
2529 [0xd2] = MMX_OP2(psrld),
2530 [0xd3] = MMX_OP2(psrlq),
2531 [0xd4] = MMX_OP2(paddq),
2532 [0xd5] = MMX_OP2(pmullw),
2533 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2534 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2535 [0xd8] = MMX_OP2(psubusb),
2536 [0xd9] = MMX_OP2(psubusw),
2537 [0xda] = MMX_OP2(pminub),
2538 [0xdb] = MMX_OP2(pand),
2539 [0xdc] = MMX_OP2(paddusb),
2540 [0xdd] = MMX_OP2(paddusw),
2541 [0xde] = MMX_OP2(pmaxub),
2542 [0xdf] = MMX_OP2(pandn),
2543 [0xe0] = MMX_OP2(pavgb),
2544 [0xe1] = MMX_OP2(psraw),
2545 [0xe2] = MMX_OP2(psrad),
2546 [0xe3] = MMX_OP2(pavgw),
2547 [0xe4] = MMX_OP2(pmulhuw),
2548 [0xe5] = MMX_OP2(pmulhw),
2549 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2550 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2551 [0xe8] = MMX_OP2(psubsb),
2552 [0xe9] = MMX_OP2(psubsw),
2553 [0xea] = MMX_OP2(pminsw),
2554 [0xeb] = MMX_OP2(por),
2555 [0xec] = MMX_OP2(paddsb),
2556 [0xed] = MMX_OP2(paddsw),
2557 [0xee] = MMX_OP2(pmaxsw),
2558 [0xef] = MMX_OP2(pxor),
2559 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2560 [0xf1] = MMX_OP2(psllw),
2561 [0xf2] = MMX_OP2(pslld),
2562 [0xf3] = MMX_OP2(psllq),
2563 [0xf4] = MMX_OP2(pmuludq),
2564 [0xf5] = MMX_OP2(pmaddwd),
2565 [0xf6] = MMX_OP2(psadbw),
2566 [0xf7] = MMX_OP2(maskmov),
2567 [0xf8] = MMX_OP2(psubb),
2568 [0xf9] = MMX_OP2(psubw),
2569 [0xfa] = MMX_OP2(psubl),
2570 [0xfb] = MMX_OP2(psubq),
2571 [0xfc] = MMX_OP2(paddb),
2572 [0xfd] = MMX_OP2(paddw),
2573 [0xfe] = MMX_OP2(paddl),
2574};
2575
2576static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2577 [0 + 2] = MMX_OP2(psrlw),
2578 [0 + 4] = MMX_OP2(psraw),
2579 [0 + 6] = MMX_OP2(psllw),
2580 [8 + 2] = MMX_OP2(psrld),
2581 [8 + 4] = MMX_OP2(psrad),
2582 [8 + 6] = MMX_OP2(pslld),
2583 [16 + 2] = MMX_OP2(psrlq),
2584 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2585 [16 + 6] = MMX_OP2(psllq),
2586 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2587};
2588
2589static GenOpFunc1 *sse_op_table3[4 * 3] = {
2590 gen_op_cvtsi2ss,
2591 gen_op_cvtsi2sd,
2592 X86_64_ONLY(gen_op_cvtsq2ss),
2593 X86_64_ONLY(gen_op_cvtsq2sd),
2594
2595 gen_op_cvttss2si,
2596 gen_op_cvttsd2si,
2597 X86_64_ONLY(gen_op_cvttss2sq),
2598 X86_64_ONLY(gen_op_cvttsd2sq),
2599
2600 gen_op_cvtss2si,
2601 gen_op_cvtsd2si,
2602 X86_64_ONLY(gen_op_cvtss2sq),
2603 X86_64_ONLY(gen_op_cvtsd2sq),
2604};
2605
2606static GenOpFunc2 *sse_op_table4[8][4] = {
2607 SSE_FOP(cmpeq),
2608 SSE_FOP(cmplt),
2609 SSE_FOP(cmple),
2610 SSE_FOP(cmpunord),
2611 SSE_FOP(cmpneq),
2612 SSE_FOP(cmpnlt),
2613 SSE_FOP(cmpnle),
2614 SSE_FOP(cmpord),
2615};
2616
2617static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2618{
2619 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2620 int modrm, mod, rm, reg, reg_addr, offset_addr;
2621 GenOpFunc2 *sse_op2;
2622 GenOpFunc3 *sse_op3;
2623
2624 b &= 0xff;
2625 if (s->prefix & PREFIX_DATA)
2626 b1 = 1;
2627 else if (s->prefix & PREFIX_REPZ)
2628 b1 = 2;
2629 else if (s->prefix & PREFIX_REPNZ)
2630 b1 = 3;
2631 else
2632 b1 = 0;
2633 sse_op2 = sse_op_table1[b][b1];
2634 if (!sse_op2)
2635 goto illegal_op;
2636 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2637 is_xmm = 1;
2638 } else {
2639 if (b1 == 0) {
2640 /* MMX case */
2641 is_xmm = 0;
2642 } else {
2643 is_xmm = 1;
2644 }
2645 }
2646 /* simple MMX/SSE operation */
2647 if (s->flags & HF_TS_MASK) {
2648 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2649 return;
2650 }
2651 if (s->flags & HF_EM_MASK) {
2652 illegal_op:
2653 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2654 return;
2655 }
2656 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2657 goto illegal_op;
2658 if (b == 0x77) {
2659 /* emms */
2660 gen_op_emms();
2661 return;
2662 }
2663 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2664 the static cpu state) */
2665 if (!is_xmm) {
2666 gen_op_enter_mmx();
2667 }
2668
2669 modrm = ldub_code(s->pc++);
2670 reg = ((modrm >> 3) & 7);
2671 if (is_xmm)
2672 reg |= rex_r;
2673 mod = (modrm >> 6) & 3;
2674 if (sse_op2 == SSE_SPECIAL) {
2675 b |= (b1 << 8);
2676 switch(b) {
2677 case 0x0e7: /* movntq */
2678 if (mod == 3)
2679 goto illegal_op;
2680 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2681 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2682 break;
2683 case 0x1e7: /* movntdq */
2684 case 0x02b: /* movntps */
2685 case 0x12b: /* movntps */
2686 case 0x3f0: /* lddqu */
2687 if (mod == 3)
2688 goto illegal_op;
2689 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2690 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2691 break;
2692 case 0x6e: /* movd mm, ea */
2693#ifdef TARGET_X86_64
2694 if (s->dflag == 2) {
2695 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2696 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2697 } else
2698#endif
2699 {
2700 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2701 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2702 }
2703 break;
2704 case 0x16e: /* movd xmm, ea */
2705#ifdef TARGET_X86_64
2706 if (s->dflag == 2) {
2707 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2708 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2709 } else
2710#endif
2711 {
2712 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2713 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2714 }
2715 break;
2716 case 0x6f: /* movq mm, ea */
2717 if (mod != 3) {
2718 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2719 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2720 } else {
2721 rm = (modrm & 7);
2722 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2723 offsetof(CPUX86State,fpregs[rm].mmx));
2724 }
2725 break;
2726 case 0x010: /* movups */
2727 case 0x110: /* movupd */
2728 case 0x028: /* movaps */
2729 case 0x128: /* movapd */
2730 case 0x16f: /* movdqa xmm, ea */
2731 case 0x26f: /* movdqu xmm, ea */
2732 if (mod != 3) {
2733 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2734 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2735 } else {
2736 rm = (modrm & 7) | REX_B(s);
2737 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2738 offsetof(CPUX86State,xmm_regs[rm]));
2739 }
2740 break;
2741 case 0x210: /* movss xmm, ea */
2742 if (mod != 3) {
2743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2744 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2745 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2746 gen_op_movl_T0_0();
2747 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2748 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2749 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2750 } else {
2751 rm = (modrm & 7) | REX_B(s);
2752 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2753 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2754 }
2755 break;
2756 case 0x310: /* movsd xmm, ea */
2757 if (mod != 3) {
2758 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2759 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2760 gen_op_movl_T0_0();
2761 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2762 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2763 } else {
2764 rm = (modrm & 7) | REX_B(s);
2765 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2766 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2767 }
2768 break;
2769 case 0x012: /* movlps */
2770 case 0x112: /* movlpd */
2771 if (mod != 3) {
2772 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2773 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2774 } else {
2775 /* movhlps */
2776 rm = (modrm & 7) | REX_B(s);
2777 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2778 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2779 }
2780 break;
2781 case 0x212: /* movsldup */
2782 if (mod != 3) {
2783 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2784 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2785 } else {
2786 rm = (modrm & 7) | REX_B(s);
2787 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2788 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2789 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2790 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2791 }
2792 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2793 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2794 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2795 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2796 break;
2797 case 0x312: /* movddup */
2798 if (mod != 3) {
2799 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2800 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2801 } else {
2802 rm = (modrm & 7) | REX_B(s);
2803 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2804 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2805 }
2806 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2807 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2808 break;
2809 case 0x016: /* movhps */
2810 case 0x116: /* movhpd */
2811 if (mod != 3) {
2812 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2813 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2814 } else {
2815 /* movlhps */
2816 rm = (modrm & 7) | REX_B(s);
2817 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2818 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2819 }
2820 break;
2821 case 0x216: /* movshdup */
2822 if (mod != 3) {
2823 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2824 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2825 } else {
2826 rm = (modrm & 7) | REX_B(s);
2827 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2828 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2829 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2830 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2831 }
2832 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2833 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2834 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2835 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2836 break;
2837 case 0x7e: /* movd ea, mm */
2838#ifdef TARGET_X86_64
2839 if (s->dflag == 2) {
2840 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2841 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2842 } else
2843#endif
2844 {
2845 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2846 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2847 }
2848 break;
2849 case 0x17e: /* movd ea, xmm */
2850#ifdef TARGET_X86_64
2851 if (s->dflag == 2) {
2852 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2853 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2854 } else
2855#endif
2856 {
2857 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2858 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2859 }
2860 break;
2861 case 0x27e: /* movq xmm, ea */
2862 if (mod != 3) {
2863 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2864 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2865 } else {
2866 rm = (modrm & 7) | REX_B(s);
2867 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2868 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2869 }
2870 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2871 break;
2872 case 0x7f: /* movq ea, mm */
2873 if (mod != 3) {
2874 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2875 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2876 } else {
2877 rm = (modrm & 7);
2878 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2879 offsetof(CPUX86State,fpregs[reg].mmx));
2880 }
2881 break;
2882 case 0x011: /* movups */
2883 case 0x111: /* movupd */
2884 case 0x029: /* movaps */
2885 case 0x129: /* movapd */
2886 case 0x17f: /* movdqa ea, xmm */
2887 case 0x27f: /* movdqu ea, xmm */
2888 if (mod != 3) {
2889 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2890 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2891 } else {
2892 rm = (modrm & 7) | REX_B(s);
2893 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2894 offsetof(CPUX86State,xmm_regs[reg]));
2895 }
2896 break;
2897 case 0x211: /* movss ea, xmm */
2898 if (mod != 3) {
2899 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2900 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2901 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2902 } else {
2903 rm = (modrm & 7) | REX_B(s);
2904 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2905 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2906 }
2907 break;
2908 case 0x311: /* movsd ea, xmm */
2909 if (mod != 3) {
2910 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2911 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2912 } else {
2913 rm = (modrm & 7) | REX_B(s);
2914 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2915 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2916 }
2917 break;
2918 case 0x013: /* movlps */
2919 case 0x113: /* movlpd */
2920 if (mod != 3) {
2921 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2922 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2923 } else {
2924 goto illegal_op;
2925 }
2926 break;
2927 case 0x017: /* movhps */
2928 case 0x117: /* movhpd */
2929 if (mod != 3) {
2930 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2931 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2932 } else {
2933 goto illegal_op;
2934 }
2935 break;
2936 case 0x71: /* shift mm, im */
2937 case 0x72:
2938 case 0x73:
2939 case 0x171: /* shift xmm, im */
2940 case 0x172:
2941 case 0x173:
2942 val = ldub_code(s->pc++);
2943 if (is_xmm) {
2944 gen_op_movl_T0_im(val);
2945 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2946 gen_op_movl_T0_0();
2947 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2948 op1_offset = offsetof(CPUX86State,xmm_t0);
2949 } else {
2950 gen_op_movl_T0_im(val);
2951 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2952 gen_op_movl_T0_0();
2953 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2954 op1_offset = offsetof(CPUX86State,mmx_t0);
2955 }
2956 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2957 if (!sse_op2)
2958 goto illegal_op;
2959 if (is_xmm) {
2960 rm = (modrm & 7) | REX_B(s);
2961 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2962 } else {
2963 rm = (modrm & 7);
2964 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2965 }
2966 sse_op2(op2_offset, op1_offset);
2967 break;
2968 case 0x050: /* movmskps */
2969 rm = (modrm & 7) | REX_B(s);
2970 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2971 gen_op_mov_reg_T0[OT_LONG][reg]();
2972 break;
2973 case 0x150: /* movmskpd */
2974 rm = (modrm & 7) | REX_B(s);
2975 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2976 gen_op_mov_reg_T0[OT_LONG][reg]();
2977 break;
2978 case 0x02a: /* cvtpi2ps */
2979 case 0x12a: /* cvtpi2pd */
2980 gen_op_enter_mmx();
2981 if (mod != 3) {
2982 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2983 op2_offset = offsetof(CPUX86State,mmx_t0);
2984 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2985 } else {
2986 rm = (modrm & 7);
2987 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2988 }
2989 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2990 switch(b >> 8) {
2991 case 0x0:
2992 gen_op_cvtpi2ps(op1_offset, op2_offset);
2993 break;
2994 default:
2995 case 0x1:
2996 gen_op_cvtpi2pd(op1_offset, op2_offset);
2997 break;
2998 }
2999 break;
3000 case 0x22a: /* cvtsi2ss */
3001 case 0x32a: /* cvtsi2sd */
3002 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3003 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3004 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3005 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3006 break;
3007 case 0x02c: /* cvttps2pi */
3008 case 0x12c: /* cvttpd2pi */
3009 case 0x02d: /* cvtps2pi */
3010 case 0x12d: /* cvtpd2pi */
3011 gen_op_enter_mmx();
3012 if (mod != 3) {
3013 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3014 op2_offset = offsetof(CPUX86State,xmm_t0);
3015 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3016 } else {
3017 rm = (modrm & 7) | REX_B(s);
3018 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3019 }
3020 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3021 switch(b) {
3022 case 0x02c:
3023 gen_op_cvttps2pi(op1_offset, op2_offset);
3024 break;
3025 case 0x12c:
3026 gen_op_cvttpd2pi(op1_offset, op2_offset);
3027 break;
3028 case 0x02d:
3029 gen_op_cvtps2pi(op1_offset, op2_offset);
3030 break;
3031 case 0x12d:
3032 gen_op_cvtpd2pi(op1_offset, op2_offset);
3033 break;
3034 }
3035 break;
3036 case 0x22c: /* cvttss2si */
3037 case 0x32c: /* cvttsd2si */
3038 case 0x22d: /* cvtss2si */
3039 case 0x32d: /* cvtsd2si */
3040 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3041 if (mod != 3) {
3042 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3043 if ((b >> 8) & 1) {
3044 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3045 } else {
3046 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3047 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3048 }
3049 op2_offset = offsetof(CPUX86State,xmm_t0);
3050 } else {
3051 rm = (modrm & 7) | REX_B(s);
3052 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3053 }
3054 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3055 (b & 1) * 4](op2_offset);
3056 gen_op_mov_reg_T0[ot][reg]();
3057 break;
3058 case 0xc4: /* pinsrw */
3059 case 0x1c4:
3060 s->rip_offset = 1;
3061 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3062 val = ldub_code(s->pc++);
3063 if (b1) {
3064 val &= 7;
3065 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3066 } else {
3067 val &= 3;
3068 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3069 }
3070 break;
3071 case 0xc5: /* pextrw */
3072 case 0x1c5:
3073 if (mod != 3)
3074 goto illegal_op;
3075 val = ldub_code(s->pc++);
3076 if (b1) {
3077 val &= 7;
3078 rm = (modrm & 7) | REX_B(s);
3079 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3080 } else {
3081 val &= 3;
3082 rm = (modrm & 7);
3083 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3084 }
3085 reg = ((modrm >> 3) & 7) | rex_r;
3086 gen_op_mov_reg_T0[OT_LONG][reg]();
3087 break;
3088 case 0x1d6: /* movq ea, xmm */
3089 if (mod != 3) {
3090 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3091 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3092 } else {
3093 rm = (modrm & 7) | REX_B(s);
3094 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3095 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3096 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3097 }
3098 break;
3099 case 0x2d6: /* movq2dq */
3100 gen_op_enter_mmx();
3101 rm = (modrm & 7);
3102 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3103 offsetof(CPUX86State,fpregs[rm].mmx));
3104 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3105 break;
3106 case 0x3d6: /* movdq2q */
3107 gen_op_enter_mmx();
3108 rm = (modrm & 7) | REX_B(s);
3109 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3110 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3111 break;
3112 case 0xd7: /* pmovmskb */
3113 case 0x1d7:
3114 if (mod != 3)
3115 goto illegal_op;
3116 if (b1) {
3117 rm = (modrm & 7) | REX_B(s);
3118 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3119 } else {
3120 rm = (modrm & 7);
3121 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3122 }
3123 reg = ((modrm >> 3) & 7) | rex_r;
3124 gen_op_mov_reg_T0[OT_LONG][reg]();
3125 break;
3126 default:
3127 goto illegal_op;
3128 }
3129 } else {
3130 /* generic MMX or SSE operation */
3131 switch(b) {
3132 case 0xf7:
3133 /* maskmov : we must prepare A0 */
3134 if (mod != 3)
3135 goto illegal_op;
3136#ifdef TARGET_X86_64
3137 if (s->aflag == 2) {
3138 gen_op_movq_A0_reg[R_EDI]();
3139 } else
3140#endif
3141 {
3142 gen_op_movl_A0_reg[R_EDI]();
3143 if (s->aflag == 0)
3144 gen_op_andl_A0_ffff();
3145 }
3146 gen_add_A0_ds_seg(s);
3147 break;
3148 case 0x70: /* pshufx insn */
3149 case 0xc6: /* pshufx insn */
3150 case 0xc2: /* compare insns */
3151 s->rip_offset = 1;
3152 break;
3153 default:
3154 break;
3155 }
3156 if (is_xmm) {
3157 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3158 if (mod != 3) {
3159 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3160 op2_offset = offsetof(CPUX86State,xmm_t0);
3161 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3162 b == 0xc2)) {
3163 /* specific case for SSE single instructions */
3164 if (b1 == 2) {
3165 /* 32 bit access */
3166 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3167 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3168 } else {
3169 /* 64 bit access */
3170 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3171 }
3172 } else {
3173 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3174 }
3175 } else {
3176 rm = (modrm & 7) | REX_B(s);
3177 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3178 }
3179 } else {
3180 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3181 if (mod != 3) {
3182 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3183 op2_offset = offsetof(CPUX86State,mmx_t0);
3184 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3185 } else {
3186 rm = (modrm & 7);
3187 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3188 }
3189 }
3190 switch(b) {
3191 case 0x70: /* pshufx insn */
3192 case 0xc6: /* pshufx insn */
3193 val = ldub_code(s->pc++);
3194 sse_op3 = (GenOpFunc3 *)sse_op2;
3195 sse_op3(op1_offset, op2_offset, val);
3196 break;
3197 case 0xc2:
3198 /* compare insns */
3199 val = ldub_code(s->pc++);
3200 if (val >= 8)
3201 goto illegal_op;
3202 sse_op2 = sse_op_table4[val][b1];
3203 sse_op2(op1_offset, op2_offset);
3204 break;
3205 default:
3206 sse_op2(op1_offset, op2_offset);
3207 break;
3208 }
3209 if (b == 0x2e || b == 0x2f) {
3210 s->cc_op = CC_OP_EFLAGS;
3211 }
3212 }
3213}
3214
3215#ifdef VBOX
3216/* Checks if it's an invalid lock sequence. Only a few instructions
3217 can be used together with the lock prefix and of those only the
3218 form that write a memory operand. So, this is kind of annoying
3219 work to do...
3220 The AMD manual lists the following instructions.
3221 ADC
3222 ADD
3223 AND
3224 BTC
3225 BTR
3226 BTS
3227 CMPXCHG
3228 CMPXCHG8B
3229 CMPXCHG16B
3230 DEC
3231 INC
3232 NEG
3233 NOT
3234 OR
3235 SBB
3236 SUB
3237 XADD
3238 XCHG
3239 XOR */
3240static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
3241{
3242 target_ulong pc = s->pc;
3243 int modrm, mod, op;
3244
3245 /* X={8,16,32,64} Y={16,32,64} */
3246 switch (b)
3247 {
3248 /* /2: ADC reg/memX, immX */
3249 /* /0: ADD reg/memX, immX */
3250 /* /4: AND reg/memX, immX */
3251 /* /1: OR reg/memX, immX */
3252 /* /3: SBB reg/memX, immX */
3253 /* /5: SUB reg/memX, immX */
3254 /* /6: XOR reg/memX, immX */
3255 case 0x80:
3256 case 0x81:
3257 case 0x83:
3258 modrm = ldub_code(pc++);
3259 op = (modrm >> 3) & 7;
3260 if (op == 7) /* /7: CMP */
3261 break;
3262 mod = (modrm >> 6) & 3;
3263 if (mod == 3) /* register destination */
3264 break;
3265 return false;
3266
3267 case 0x10: /* /r: ADC reg/mem8, reg8 */
3268 case 0x11: /* /r: ADC reg/memX, regY */
3269 case 0x00: /* /r: ADD reg/mem8, reg8 */
3270 case 0x01: /* /r: ADD reg/memX, regY */
3271 case 0x20: /* /r: AND reg/mem8, reg8 */
3272 case 0x21: /* /r: AND reg/memY, regY */
3273 case 0x08: /* /r: OR reg/mem8, reg8 */
3274 case 0x09: /* /r: OR reg/memY, regY */
3275 case 0x18: /* /r: SBB reg/mem8, reg8 */
3276 case 0x19: /* /r: SBB reg/memY, regY */
3277 case 0x28: /* /r: SUB reg/mem8, reg8 */
3278 case 0x29: /* /r: SUB reg/memY, regY */
3279 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
3280 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
3281 case 0x30: /* /r: XOR reg/mem8, reg8 */
3282 case 0x31: /* /r: XOR reg/memY, regY */
3283 modrm = ldub_code(pc++);
3284 mod = (modrm >> 6) & 3;
3285 if (mod == 3) /* register destination */
3286 break;
3287 return false;
3288
3289 /* /1: DEC reg/memX */
3290 /* /0: INC reg/memX */
3291 case 0xfe:
3292 case 0xff:
3293 modrm = ldub_code(pc++);
3294 mod = (modrm >> 6) & 3;
3295 if (mod == 3) /* register destination */
3296 break;
3297 return false;
3298
3299 /* /3: NEG reg/memX */
3300 /* /2: NOT reg/memX */
3301 case 0xf6:
3302 case 0xf7:
3303 modrm = ldub_code(pc++);
3304 mod = (modrm >> 6) & 3;
3305 if (mod == 3) /* register destination */
3306 break;
3307 return false;
3308
3309 case 0x0f:
3310 b = ldub_code(pc++);
3311 switch (b)
3312 {
3313 /* /7: BTC reg/memY, imm8 */
3314 /* /6: BTR reg/memY, imm8 */
3315 /* /5: BTS reg/memY, imm8 */
3316 case 0xba:
3317 modrm = ldub_code(pc++);
3318 op = (modrm >> 3) & 7;
3319 if (op < 5)
3320 break;
3321 mod = (modrm >> 6) & 3;
3322 if (mod == 3) /* register destination */
3323 break;
3324 return false;
3325
3326 case 0xbb: /* /r: BTC reg/memY, regY */
3327 case 0xb3: /* /r: BTR reg/memY, regY */
3328 case 0xab: /* /r: BTS reg/memY, regY */
3329 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
3330 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
3331 case 0xc0: /* /r: XADD reg/mem8, reg8 */
3332 case 0xc1: /* /r: XADD reg/memY, regY */
3333 modrm = ldub_code(pc++);
3334 mod = (modrm >> 6) & 3;
3335 if (mod == 3) /* register destination */
3336 break;
3337 return false;
3338
3339 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
3340 case 0xc7:
3341 modrm = ldub_code(pc++);
3342 op = (modrm >> 3) & 7;
3343 if (op != 1)
3344 break;
3345 return false;
3346 }
3347 break;
3348 }
3349
3350 /* illegal sequence. The s->pc is past the lock prefix and that
3351 is sufficient for the TB, I think. */
3352 Log(("illegal lock sequence %VGv (b=%#x)\n", pc_start, b));
3353 return true;
3354}
3355#endif /* VBOX */
3356
3357
3358/* convert one instruction. s->is_jmp is set if the translation must
3359 be stopped. Return the next pc value */
3360static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3361{
3362 int b, prefixes, aflag, dflag;
3363 int shift, ot;
3364 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3365 target_ulong next_eip, tval;
3366 int rex_w, rex_r;
3367
3368 s->pc = pc_start;
3369 prefixes = 0;
3370 aflag = s->code32;
3371 dflag = s->code32;
3372 s->override = -1;
3373 rex_w = -1;
3374 rex_r = 0;
3375#ifdef TARGET_X86_64
3376 s->rex_x = 0;
3377 s->rex_b = 0;
3378 x86_64_hregs = 0;
3379#endif
3380 s->rip_offset = 0; /* for relative ip address */
3381
3382#ifdef VBOX
3383 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
3384 gen_update_eip(pc_start - s->cs_base);
3385#endif
3386
3387 next_byte:
3388 b = ldub_code(s->pc);
3389 s->pc++;
3390 /* check prefixes */
3391#ifdef TARGET_X86_64
3392 if (CODE64(s)) {
3393 switch (b) {
3394 case 0xf3:
3395 prefixes |= PREFIX_REPZ;
3396 goto next_byte;
3397 case 0xf2:
3398 prefixes |= PREFIX_REPNZ;
3399 goto next_byte;
3400 case 0xf0:
3401 prefixes |= PREFIX_LOCK;
3402 goto next_byte;
3403 case 0x2e:
3404 s->override = R_CS;
3405 goto next_byte;
3406 case 0x36:
3407 s->override = R_SS;
3408 goto next_byte;
3409 case 0x3e:
3410 s->override = R_DS;
3411 goto next_byte;
3412 case 0x26:
3413 s->override = R_ES;
3414 goto next_byte;
3415 case 0x64:
3416 s->override = R_FS;
3417 goto next_byte;
3418 case 0x65:
3419 s->override = R_GS;
3420 goto next_byte;
3421 case 0x66:
3422 prefixes |= PREFIX_DATA;
3423 goto next_byte;
3424 case 0x67:
3425 prefixes |= PREFIX_ADR;
3426 goto next_byte;
3427 case 0x40 ... 0x4f:
3428 /* REX prefix */
3429 rex_w = (b >> 3) & 1;
3430 rex_r = (b & 0x4) << 1;
3431 s->rex_x = (b & 0x2) << 2;
3432 REX_B(s) = (b & 0x1) << 3;
3433 x86_64_hregs = 1; /* select uniform byte register addressing */
3434 goto next_byte;
3435 }
3436 if (rex_w == 1) {
3437 /* 0x66 is ignored if rex.w is set */
3438 dflag = 2;
3439 } else {
3440 if (prefixes & PREFIX_DATA)
3441 dflag ^= 1;
3442 }
3443 if (!(prefixes & PREFIX_ADR))
3444 aflag = 2;
3445 } else
3446#endif
3447 {
3448 switch (b) {
3449 case 0xf3:
3450 prefixes |= PREFIX_REPZ;
3451 goto next_byte;
3452 case 0xf2:
3453 prefixes |= PREFIX_REPNZ;
3454 goto next_byte;
3455 case 0xf0:
3456 prefixes |= PREFIX_LOCK;
3457 goto next_byte;
3458 case 0x2e:
3459 s->override = R_CS;
3460 goto next_byte;
3461 case 0x36:
3462 s->override = R_SS;
3463 goto next_byte;
3464 case 0x3e:
3465 s->override = R_DS;
3466 goto next_byte;
3467 case 0x26:
3468 s->override = R_ES;
3469 goto next_byte;
3470 case 0x64:
3471 s->override = R_FS;
3472 goto next_byte;
3473 case 0x65:
3474 s->override = R_GS;
3475 goto next_byte;
3476 case 0x66:
3477 prefixes |= PREFIX_DATA;
3478 goto next_byte;
3479 case 0x67:
3480 prefixes |= PREFIX_ADR;
3481 goto next_byte;
3482 }
3483 if (prefixes & PREFIX_DATA)
3484 dflag ^= 1;
3485 if (prefixes & PREFIX_ADR)
3486 aflag ^= 1;
3487 }
3488
3489 s->prefix = prefixes;
3490 s->aflag = aflag;
3491 s->dflag = dflag;
3492
3493 /* lock generation */
3494#ifndef VBOX
3495 if (prefixes & PREFIX_LOCK)
3496 gen_op_lock();
3497#else /* VBOX */
3498 if (prefixes & PREFIX_LOCK) {
3499 if (is_invalid_lock_sequence(s, pc_start, b)) {
3500 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3501 return s->pc;
3502 }
3503 gen_op_lock();
3504 }
3505#endif /* VBOX */
3506
3507 /* now check op code */
3508 reswitch:
3509 switch(b) {
3510 case 0x0f:
3511 /**************************/
3512 /* extended op code */
3513 b = ldub_code(s->pc++) | 0x100;
3514 goto reswitch;
3515
3516 /**************************/
3517 /* arith & logic */
3518 case 0x00 ... 0x05:
3519 case 0x08 ... 0x0d:
3520 case 0x10 ... 0x15:
3521 case 0x18 ... 0x1d:
3522 case 0x20 ... 0x25:
3523 case 0x28 ... 0x2d:
3524 case 0x30 ... 0x35:
3525 case 0x38 ... 0x3d:
3526 {
3527 int op, f, val;
3528 op = (b >> 3) & 7;
3529 f = (b >> 1) & 3;
3530
3531 if ((b & 1) == 0)
3532 ot = OT_BYTE;
3533 else
3534 ot = dflag + OT_WORD;
3535
3536 switch(f) {
3537 case 0: /* OP Ev, Gv */
3538 modrm = ldub_code(s->pc++);
3539 reg = ((modrm >> 3) & 7) | rex_r;
3540 mod = (modrm >> 6) & 3;
3541 rm = (modrm & 7) | REX_B(s);
3542 if (mod != 3) {
3543 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3544 opreg = OR_TMP0;
3545 } else if (op == OP_XORL && rm == reg) {
3546 xor_zero:
3547 /* xor reg, reg optimisation */
3548 gen_op_movl_T0_0();
3549 s->cc_op = CC_OP_LOGICB + ot;
3550 gen_op_mov_reg_T0[ot][reg]();
3551 gen_op_update1_cc();
3552 break;
3553 } else {
3554 opreg = rm;
3555 }
3556 gen_op_mov_TN_reg[ot][1][reg]();
3557 gen_op(s, op, ot, opreg);
3558 break;
3559 case 1: /* OP Gv, Ev */
3560 modrm = ldub_code(s->pc++);
3561 mod = (modrm >> 6) & 3;
3562 reg = ((modrm >> 3) & 7) | rex_r;
3563 rm = (modrm & 7) | REX_B(s);
3564 if (mod != 3) {
3565 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3566 gen_op_ld_T1_A0[ot + s->mem_index]();
3567 } else if (op == OP_XORL && rm == reg) {
3568 goto xor_zero;
3569 } else {
3570 gen_op_mov_TN_reg[ot][1][rm]();
3571 }
3572 gen_op(s, op, ot, reg);
3573 break;
3574 case 2: /* OP A, Iv */
3575 val = insn_get(s, ot);
3576 gen_op_movl_T1_im(val);
3577 gen_op(s, op, ot, OR_EAX);
3578 break;
3579 }
3580 }
3581 break;
3582
3583 case 0x80: /* GRP1 */
3584 case 0x81:
3585 case 0x82:
3586 case 0x83:
3587 {
3588 int val;
3589
3590 if ((b & 1) == 0)
3591 ot = OT_BYTE;
3592 else
3593 ot = dflag + OT_WORD;
3594
3595 modrm = ldub_code(s->pc++);
3596 mod = (modrm >> 6) & 3;
3597 rm = (modrm & 7) | REX_B(s);
3598 op = (modrm >> 3) & 7;
3599
3600 if (mod != 3) {
3601 if (b == 0x83)
3602 s->rip_offset = 1;
3603 else
3604 s->rip_offset = insn_const_size(ot);
3605 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3606 opreg = OR_TMP0;
3607 } else {
3608 opreg = rm;
3609 }
3610
3611 switch(b) {
3612 default:
3613 case 0x80:
3614 case 0x81:
3615 case 0x82:
3616 val = insn_get(s, ot);
3617 break;
3618 case 0x83:
3619 val = (int8_t)insn_get(s, OT_BYTE);
3620 break;
3621 }
3622 gen_op_movl_T1_im(val);
3623 gen_op(s, op, ot, opreg);
3624 }
3625 break;
3626
3627 /**************************/
3628 /* inc, dec, and other misc arith */
3629 case 0x40 ... 0x47: /* inc Gv */
3630 ot = dflag ? OT_LONG : OT_WORD;
3631 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3632 break;
3633 case 0x48 ... 0x4f: /* dec Gv */
3634 ot = dflag ? OT_LONG : OT_WORD;
3635 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3636 break;
3637 case 0xf6: /* GRP3 */
3638 case 0xf7:
3639 if ((b & 1) == 0)
3640 ot = OT_BYTE;
3641 else
3642 ot = dflag + OT_WORD;
3643
3644 modrm = ldub_code(s->pc++);
3645 mod = (modrm >> 6) & 3;
3646 rm = (modrm & 7) | REX_B(s);
3647 op = (modrm >> 3) & 7;
3648 if (mod != 3) {
3649 if (op == 0)
3650 s->rip_offset = insn_const_size(ot);
3651 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3652 gen_op_ld_T0_A0[ot + s->mem_index]();
3653 } else {
3654 gen_op_mov_TN_reg[ot][0][rm]();
3655 }
3656
3657 switch(op) {
3658 case 0: /* test */
3659 val = insn_get(s, ot);
3660 gen_op_movl_T1_im(val);
3661 gen_op_testl_T0_T1_cc();
3662 s->cc_op = CC_OP_LOGICB + ot;
3663 break;
3664 case 2: /* not */
3665 gen_op_notl_T0();
3666 if (mod != 3) {
3667 gen_op_st_T0_A0[ot + s->mem_index]();
3668 } else {
3669 gen_op_mov_reg_T0[ot][rm]();
3670 }
3671 break;
3672 case 3: /* neg */
3673 gen_op_negl_T0();
3674 if (mod != 3) {
3675 gen_op_st_T0_A0[ot + s->mem_index]();
3676 } else {
3677 gen_op_mov_reg_T0[ot][rm]();
3678 }
3679 gen_op_update_neg_cc();
3680 s->cc_op = CC_OP_SUBB + ot;
3681 break;
3682 case 4: /* mul */
3683 switch(ot) {
3684 case OT_BYTE:
3685 gen_op_mulb_AL_T0();
3686 s->cc_op = CC_OP_MULB;
3687 break;
3688 case OT_WORD:
3689 gen_op_mulw_AX_T0();
3690 s->cc_op = CC_OP_MULW;
3691 break;
3692 default:
3693 case OT_LONG:
3694 gen_op_mull_EAX_T0();
3695 s->cc_op = CC_OP_MULL;
3696 break;
3697#ifdef TARGET_X86_64
3698 case OT_QUAD:
3699 gen_op_mulq_EAX_T0();
3700 s->cc_op = CC_OP_MULQ;
3701 break;
3702#endif
3703 }
3704 break;
3705 case 5: /* imul */
3706 switch(ot) {
3707 case OT_BYTE:
3708 gen_op_imulb_AL_T0();
3709 s->cc_op = CC_OP_MULB;
3710 break;
3711 case OT_WORD:
3712 gen_op_imulw_AX_T0();
3713 s->cc_op = CC_OP_MULW;
3714 break;
3715 default:
3716 case OT_LONG:
3717 gen_op_imull_EAX_T0();
3718 s->cc_op = CC_OP_MULL;
3719 break;
3720#ifdef TARGET_X86_64
3721 case OT_QUAD:
3722 gen_op_imulq_EAX_T0();
3723 s->cc_op = CC_OP_MULQ;
3724 break;
3725#endif
3726 }
3727 break;
3728 case 6: /* div */
3729 switch(ot) {
3730 case OT_BYTE:
3731 gen_jmp_im(pc_start - s->cs_base);
3732 gen_op_divb_AL_T0();
3733 break;
3734 case OT_WORD:
3735 gen_jmp_im(pc_start - s->cs_base);
3736 gen_op_divw_AX_T0();
3737 break;
3738 default:
3739 case OT_LONG:
3740 gen_jmp_im(pc_start - s->cs_base);
3741 gen_op_divl_EAX_T0();
3742 break;
3743#ifdef TARGET_X86_64
3744 case OT_QUAD:
3745 gen_jmp_im(pc_start - s->cs_base);
3746 gen_op_divq_EAX_T0();
3747 break;
3748#endif
3749 }
3750 break;
3751 case 7: /* idiv */
3752 switch(ot) {
3753 case OT_BYTE:
3754 gen_jmp_im(pc_start - s->cs_base);
3755 gen_op_idivb_AL_T0();
3756 break;
3757 case OT_WORD:
3758 gen_jmp_im(pc_start - s->cs_base);
3759 gen_op_idivw_AX_T0();
3760 break;
3761 default:
3762 case OT_LONG:
3763 gen_jmp_im(pc_start - s->cs_base);
3764 gen_op_idivl_EAX_T0();
3765 break;
3766#ifdef TARGET_X86_64
3767 case OT_QUAD:
3768 gen_jmp_im(pc_start - s->cs_base);
3769 gen_op_idivq_EAX_T0();
3770 break;
3771#endif
3772 }
3773 break;
3774 default:
3775 goto illegal_op;
3776 }
3777 break;
3778
3779 case 0xfe: /* GRP4 */
3780 case 0xff: /* GRP5 */
3781 if ((b & 1) == 0)
3782 ot = OT_BYTE;
3783 else
3784 ot = dflag + OT_WORD;
3785
3786 modrm = ldub_code(s->pc++);
3787 mod = (modrm >> 6) & 3;
3788 rm = (modrm & 7) | REX_B(s);
3789 op = (modrm >> 3) & 7;
3790 if (op >= 2 && b == 0xfe) {
3791 goto illegal_op;
3792 }
3793 if (CODE64(s)) {
3794 if (op == 2 || op == 4) {
3795 /* operand size for jumps is 64 bit */
3796 ot = OT_QUAD;
3797 } else if (op == 3 || op == 5) {
3798 /* for call calls, the operand is 16 or 32 bit, even
3799 in long mode */
3800 ot = dflag ? OT_LONG : OT_WORD;
3801 } else if (op == 6) {
3802 /* default push size is 64 bit */
3803 ot = dflag ? OT_QUAD : OT_WORD;
3804 }
3805 }
3806 if (mod != 3) {
3807 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3808 if (op >= 2 && op != 3 && op != 5)
3809 gen_op_ld_T0_A0[ot + s->mem_index]();
3810 } else {
3811 gen_op_mov_TN_reg[ot][0][rm]();
3812 }
3813
3814 switch(op) {
3815 case 0: /* inc Ev */
3816 if (mod != 3)
3817 opreg = OR_TMP0;
3818 else
3819 opreg = rm;
3820 gen_inc(s, ot, opreg, 1);
3821 break;
3822 case 1: /* dec Ev */
3823 if (mod != 3)
3824 opreg = OR_TMP0;
3825 else
3826 opreg = rm;
3827 gen_inc(s, ot, opreg, -1);
3828 break;
3829 case 2: /* call Ev */
3830 /* XXX: optimize if memory (no 'and' is necessary) */
3831#ifdef VBOX_WITH_CALL_RECORD
3832 if (s->record_call)
3833 gen_op_record_call();
3834#endif
3835 if (s->dflag == 0)
3836 gen_op_andl_T0_ffff();
3837 next_eip = s->pc - s->cs_base;
3838 gen_movtl_T1_im(next_eip);
3839 gen_push_T1(s);
3840 gen_op_jmp_T0();
3841 gen_eob(s);
3842 break;
3843 case 3: /* lcall Ev */
3844 gen_op_ld_T1_A0[ot + s->mem_index]();
3845 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3846 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3847 do_lcall:
3848 if (s->pe && !s->vm86) {
3849 if (s->cc_op != CC_OP_DYNAMIC)
3850 gen_op_set_cc_op(s->cc_op);
3851 gen_jmp_im(pc_start - s->cs_base);
3852 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3853 } else {
3854 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3855 }
3856 gen_eob(s);
3857 break;
3858 case 4: /* jmp Ev */
3859 if (s->dflag == 0)
3860 gen_op_andl_T0_ffff();
3861 gen_op_jmp_T0();
3862 gen_eob(s);
3863 break;
3864 case 5: /* ljmp Ev */
3865 gen_op_ld_T1_A0[ot + s->mem_index]();
3866 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3867 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3868 do_ljmp:
3869 if (s->pe && !s->vm86) {
3870 if (s->cc_op != CC_OP_DYNAMIC)
3871 gen_op_set_cc_op(s->cc_op);
3872 gen_jmp_im(pc_start - s->cs_base);
3873 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3874 } else {
3875 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3876 gen_op_movl_T0_T1();
3877 gen_op_jmp_T0();
3878 }
3879 gen_eob(s);
3880 break;
3881 case 6: /* push Ev */
3882 gen_push_T0(s);
3883 break;
3884 default:
3885 goto illegal_op;
3886 }
3887 break;
3888
3889 case 0x84: /* test Ev, Gv */
3890 case 0x85:
3891 if ((b & 1) == 0)
3892 ot = OT_BYTE;
3893 else
3894 ot = dflag + OT_WORD;
3895
3896 modrm = ldub_code(s->pc++);
3897 mod = (modrm >> 6) & 3;
3898 rm = (modrm & 7) | REX_B(s);
3899 reg = ((modrm >> 3) & 7) | rex_r;
3900
3901 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3902 gen_op_mov_TN_reg[ot][1][reg]();
3903 gen_op_testl_T0_T1_cc();
3904 s->cc_op = CC_OP_LOGICB + ot;
3905 break;
3906
3907 case 0xa8: /* test eAX, Iv */
3908 case 0xa9:
3909 if ((b & 1) == 0)
3910 ot = OT_BYTE;
3911 else
3912 ot = dflag + OT_WORD;
3913 val = insn_get(s, ot);
3914
3915 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3916 gen_op_movl_T1_im(val);
3917 gen_op_testl_T0_T1_cc();
3918 s->cc_op = CC_OP_LOGICB + ot;
3919 break;
3920
3921 case 0x98: /* CWDE/CBW */
3922#ifdef TARGET_X86_64
3923 if (dflag == 2) {
3924 gen_op_movslq_RAX_EAX();
3925 } else
3926#endif
3927 if (dflag == 1)
3928 gen_op_movswl_EAX_AX();
3929 else
3930 gen_op_movsbw_AX_AL();
3931 break;
3932 case 0x99: /* CDQ/CWD */
3933#ifdef TARGET_X86_64
3934 if (dflag == 2) {
3935 gen_op_movsqo_RDX_RAX();
3936 } else
3937#endif
3938 if (dflag == 1)
3939 gen_op_movslq_EDX_EAX();
3940 else
3941 gen_op_movswl_DX_AX();
3942 break;
3943 case 0x1af: /* imul Gv, Ev */
3944 case 0x69: /* imul Gv, Ev, I */
3945 case 0x6b:
3946 ot = dflag + OT_WORD;
3947 modrm = ldub_code(s->pc++);
3948 reg = ((modrm >> 3) & 7) | rex_r;
3949 if (b == 0x69)
3950 s->rip_offset = insn_const_size(ot);
3951 else if (b == 0x6b)
3952 s->rip_offset = 1;
3953 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3954 if (b == 0x69) {
3955 val = insn_get(s, ot);
3956 gen_op_movl_T1_im(val);
3957 } else if (b == 0x6b) {
3958 val = (int8_t)insn_get(s, OT_BYTE);
3959 gen_op_movl_T1_im(val);
3960 } else {
3961 gen_op_mov_TN_reg[ot][1][reg]();
3962 }
3963
3964#ifdef TARGET_X86_64
3965 if (ot == OT_QUAD) {
3966 gen_op_imulq_T0_T1();
3967 } else
3968#endif
3969 if (ot == OT_LONG) {
3970 gen_op_imull_T0_T1();
3971 } else {
3972 gen_op_imulw_T0_T1();
3973 }
3974 gen_op_mov_reg_T0[ot][reg]();
3975 s->cc_op = CC_OP_MULB + ot;
3976 break;
3977 case 0x1c0:
3978 case 0x1c1: /* xadd Ev, Gv */
3979 if ((b & 1) == 0)
3980 ot = OT_BYTE;
3981 else
3982 ot = dflag + OT_WORD;
3983 modrm = ldub_code(s->pc++);
3984 reg = ((modrm >> 3) & 7) | rex_r;
3985 mod = (modrm >> 6) & 3;
3986 if (mod == 3) {
3987 rm = (modrm & 7) | REX_B(s);
3988 gen_op_mov_TN_reg[ot][0][reg]();
3989 gen_op_mov_TN_reg[ot][1][rm]();
3990 gen_op_addl_T0_T1();
3991 gen_op_mov_reg_T1[ot][reg]();
3992 gen_op_mov_reg_T0[ot][rm]();
3993 } else {
3994 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3995 gen_op_mov_TN_reg[ot][0][reg]();
3996 gen_op_ld_T1_A0[ot + s->mem_index]();
3997 gen_op_addl_T0_T1();
3998 gen_op_st_T0_A0[ot + s->mem_index]();
3999 gen_op_mov_reg_T1[ot][reg]();
4000 }
4001 gen_op_update2_cc();
4002 s->cc_op = CC_OP_ADDB + ot;
4003 break;
4004 case 0x1b0:
4005 case 0x1b1: /* cmpxchg Ev, Gv */
4006 if ((b & 1) == 0)
4007 ot = OT_BYTE;
4008 else
4009 ot = dflag + OT_WORD;
4010 modrm = ldub_code(s->pc++);
4011 reg = ((modrm >> 3) & 7) | rex_r;
4012 mod = (modrm >> 6) & 3;
4013 gen_op_mov_TN_reg[ot][1][reg]();
4014 if (mod == 3) {
4015 rm = (modrm & 7) | REX_B(s);
4016 gen_op_mov_TN_reg[ot][0][rm]();
4017 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
4018 gen_op_mov_reg_T0[ot][rm]();
4019 } else {
4020 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4021 gen_op_ld_T0_A0[ot + s->mem_index]();
4022 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
4023 }
4024 s->cc_op = CC_OP_SUBB + ot;
4025 break;
4026 case 0x1c7: /* cmpxchg8b */
4027 modrm = ldub_code(s->pc++);
4028 mod = (modrm >> 6) & 3;
4029 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4030 goto illegal_op;
4031 if (s->cc_op != CC_OP_DYNAMIC)
4032 gen_op_set_cc_op(s->cc_op);
4033 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4034 gen_op_cmpxchg8b();
4035 s->cc_op = CC_OP_EFLAGS;
4036 break;
4037
4038 /**************************/
4039 /* push/pop */
4040 case 0x50 ... 0x57: /* push */
4041 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
4042 gen_push_T0(s);
4043 break;
4044 case 0x58 ... 0x5f: /* pop */
4045 if (CODE64(s)) {
4046 ot = dflag ? OT_QUAD : OT_WORD;
4047 } else {
4048 ot = dflag + OT_WORD;
4049 }
4050 gen_pop_T0(s);
4051 /* NOTE: order is important for pop %sp */
4052 gen_pop_update(s);
4053 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
4054 break;
4055 case 0x60: /* pusha */
4056 if (CODE64(s))
4057 goto illegal_op;
4058 gen_pusha(s);
4059 break;
4060 case 0x61: /* popa */
4061 if (CODE64(s))
4062 goto illegal_op;
4063 gen_popa(s);
4064 break;
4065 case 0x68: /* push Iv */
4066 case 0x6a:
4067 if (CODE64(s)) {
4068 ot = dflag ? OT_QUAD : OT_WORD;
4069 } else {
4070 ot = dflag + OT_WORD;
4071 }
4072 if (b == 0x68)
4073 val = insn_get(s, ot);
4074 else
4075 val = (int8_t)insn_get(s, OT_BYTE);
4076 gen_op_movl_T0_im(val);
4077 gen_push_T0(s);
4078 break;
4079 case 0x8f: /* pop Ev */
4080 if (CODE64(s)) {
4081 ot = dflag ? OT_QUAD : OT_WORD;
4082 } else {
4083 ot = dflag + OT_WORD;
4084 }
4085 modrm = ldub_code(s->pc++);
4086 mod = (modrm >> 6) & 3;
4087 gen_pop_T0(s);
4088 if (mod == 3) {
4089 /* NOTE: order is important for pop %sp */
4090 gen_pop_update(s);
4091 rm = (modrm & 7) | REX_B(s);
4092 gen_op_mov_reg_T0[ot][rm]();
4093 } else {
4094 /* NOTE: order is important too for MMU exceptions */
4095 s->popl_esp_hack = 1 << ot;
4096 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4097 s->popl_esp_hack = 0;
4098 gen_pop_update(s);
4099 }
4100 break;
4101 case 0xc8: /* enter */
4102 {
4103 int level;
4104 val = lduw_code(s->pc);
4105 s->pc += 2;
4106 level = ldub_code(s->pc++);
4107 gen_enter(s, val, level);
4108 }
4109 break;
4110 case 0xc9: /* leave */
4111 /* XXX: exception not precise (ESP is updated before potential exception) */
4112 if (CODE64(s)) {
4113 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
4114 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
4115 } else if (s->ss32) {
4116 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
4117 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
4118 } else {
4119 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
4120 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
4121 }
4122 gen_pop_T0(s);
4123 if (CODE64(s)) {
4124 ot = dflag ? OT_QUAD : OT_WORD;
4125 } else {
4126 ot = dflag + OT_WORD;
4127 }
4128 gen_op_mov_reg_T0[ot][R_EBP]();
4129 gen_pop_update(s);
4130 break;
4131 case 0x06: /* push es */
4132 case 0x0e: /* push cs */
4133 case 0x16: /* push ss */
4134 case 0x1e: /* push ds */
4135 if (CODE64(s))
4136 goto illegal_op;
4137 gen_op_movl_T0_seg(b >> 3);
4138 gen_push_T0(s);
4139 break;
4140 case 0x1a0: /* push fs */
4141 case 0x1a8: /* push gs */
4142 gen_op_movl_T0_seg((b >> 3) & 7);
4143 gen_push_T0(s);
4144 break;
4145 case 0x07: /* pop es */
4146 case 0x17: /* pop ss */
4147 case 0x1f: /* pop ds */
4148 if (CODE64(s))
4149 goto illegal_op;
4150 reg = b >> 3;
4151 gen_pop_T0(s);
4152 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4153 gen_pop_update(s);
4154 if (reg == R_SS) {
4155 /* if reg == SS, inhibit interrupts/trace. */
4156 /* If several instructions disable interrupts, only the
4157 _first_ does it */
4158 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4159 gen_op_set_inhibit_irq();
4160 s->tf = 0;
4161 }
4162 if (s->is_jmp) {
4163 gen_jmp_im(s->pc - s->cs_base);
4164 gen_eob(s);
4165 }
4166 break;
4167 case 0x1a1: /* pop fs */
4168 case 0x1a9: /* pop gs */
4169 gen_pop_T0(s);
4170 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4171 gen_pop_update(s);
4172 if (s->is_jmp) {
4173 gen_jmp_im(s->pc - s->cs_base);
4174 gen_eob(s);
4175 }
4176 break;
4177
4178 /**************************/
4179 /* mov */
4180 case 0x88:
4181 case 0x89: /* mov Gv, Ev */
4182 if ((b & 1) == 0)
4183 ot = OT_BYTE;
4184 else
4185 ot = dflag + OT_WORD;
4186 modrm = ldub_code(s->pc++);
4187 reg = ((modrm >> 3) & 7) | rex_r;
4188
4189 /* generate a generic store */
4190 gen_ldst_modrm(s, modrm, ot, reg, 1);
4191 break;
4192 case 0xc6:
4193 case 0xc7: /* mov Ev, Iv */
4194 if ((b & 1) == 0)
4195 ot = OT_BYTE;
4196 else
4197 ot = dflag + OT_WORD;
4198 modrm = ldub_code(s->pc++);
4199 mod = (modrm >> 6) & 3;
4200 if (mod != 3) {
4201 s->rip_offset = insn_const_size(ot);
4202 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4203 }
4204 val = insn_get(s, ot);
4205 gen_op_movl_T0_im(val);
4206 if (mod != 3)
4207 gen_op_st_T0_A0[ot + s->mem_index]();
4208 else
4209 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
4210 break;
4211 case 0x8a:
4212 case 0x8b: /* mov Ev, Gv */
4213#ifdef VBOX /* dtrace hot fix */
4214 if (prefixes & PREFIX_LOCK)
4215 goto illegal_op;
4216#endif
4217 if ((b & 1) == 0)
4218 ot = OT_BYTE;
4219 else
4220 ot = OT_WORD + dflag;
4221 modrm = ldub_code(s->pc++);
4222 reg = ((modrm >> 3) & 7) | rex_r;
4223
4224 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4225 gen_op_mov_reg_T0[ot][reg]();
4226 break;
4227 case 0x8e: /* mov seg, Gv */
4228 modrm = ldub_code(s->pc++);
4229 reg = (modrm >> 3) & 7;
4230 if (reg >= 6 || reg == R_CS)
4231 goto illegal_op;
4232 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4233 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4234 if (reg == R_SS) {
4235 /* if reg == SS, inhibit interrupts/trace */
4236 /* If several instructions disable interrupts, only the
4237 _first_ does it */
4238 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4239 gen_op_set_inhibit_irq();
4240 s->tf = 0;
4241 }
4242 if (s->is_jmp) {
4243 gen_jmp_im(s->pc - s->cs_base);
4244 gen_eob(s);
4245 }
4246 break;
4247 case 0x8c: /* mov Gv, seg */
4248 modrm = ldub_code(s->pc++);
4249 reg = (modrm >> 3) & 7;
4250 mod = (modrm >> 6) & 3;
4251 if (reg >= 6)
4252 goto illegal_op;
4253 gen_op_movl_T0_seg(reg);
4254 if (mod == 3)
4255 ot = OT_WORD + dflag;
4256 else
4257 ot = OT_WORD;
4258 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4259 break;
4260
4261 case 0x1b6: /* movzbS Gv, Eb */
4262 case 0x1b7: /* movzwS Gv, Eb */
4263 case 0x1be: /* movsbS Gv, Eb */
4264 case 0x1bf: /* movswS Gv, Eb */
4265 {
4266 int d_ot;
4267 /* d_ot is the size of destination */
4268 d_ot = dflag + OT_WORD;
4269 /* ot is the size of source */
4270 ot = (b & 1) + OT_BYTE;
4271 modrm = ldub_code(s->pc++);
4272 reg = ((modrm >> 3) & 7) | rex_r;
4273 mod = (modrm >> 6) & 3;
4274 rm = (modrm & 7) | REX_B(s);
4275
4276 if (mod == 3) {
4277 gen_op_mov_TN_reg[ot][0][rm]();
4278 switch(ot | (b & 8)) {
4279 case OT_BYTE:
4280 gen_op_movzbl_T0_T0();
4281 break;
4282 case OT_BYTE | 8:
4283 gen_op_movsbl_T0_T0();
4284 break;
4285 case OT_WORD:
4286 gen_op_movzwl_T0_T0();
4287 break;
4288 default:
4289 case OT_WORD | 8:
4290 gen_op_movswl_T0_T0();
4291 break;
4292 }
4293 gen_op_mov_reg_T0[d_ot][reg]();
4294 } else {
4295 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4296 if (b & 8) {
4297 gen_op_lds_T0_A0[ot + s->mem_index]();
4298 } else {
4299 gen_op_ldu_T0_A0[ot + s->mem_index]();
4300 }
4301 gen_op_mov_reg_T0[d_ot][reg]();
4302 }
4303 }
4304 break;
4305
4306 case 0x8d: /* lea */
4307 ot = dflag + OT_WORD;
4308 modrm = ldub_code(s->pc++);
4309 mod = (modrm >> 6) & 3;
4310 if (mod == 3)
4311 goto illegal_op;
4312 reg = ((modrm >> 3) & 7) | rex_r;
4313 /* we must ensure that no segment is added */
4314 s->override = -1;
4315 val = s->addseg;
4316 s->addseg = 0;
4317 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4318 s->addseg = val;
4319 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4320 break;
4321
4322 case 0xa0: /* mov EAX, Ov */
4323 case 0xa1:
4324 case 0xa2: /* mov Ov, EAX */
4325 case 0xa3:
4326 {
4327 target_ulong offset_addr;
4328
4329 if ((b & 1) == 0)
4330 ot = OT_BYTE;
4331 else
4332 ot = dflag + OT_WORD;
4333#ifdef TARGET_X86_64
4334 if (s->aflag == 2) {
4335 offset_addr = ldq_code(s->pc);
4336 s->pc += 8;
4337 if (offset_addr == (int32_t)offset_addr)
4338 gen_op_movq_A0_im(offset_addr);
4339 else
4340 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4341 } else
4342#endif
4343 {
4344 if (s->aflag) {
4345 offset_addr = insn_get(s, OT_LONG);
4346 } else {
4347 offset_addr = insn_get(s, OT_WORD);
4348 }
4349 gen_op_movl_A0_im(offset_addr);
4350 }
4351 gen_add_A0_ds_seg(s);
4352 if ((b & 2) == 0) {
4353 gen_op_ld_T0_A0[ot + s->mem_index]();
4354 gen_op_mov_reg_T0[ot][R_EAX]();
4355 } else {
4356 gen_op_mov_TN_reg[ot][0][R_EAX]();
4357 gen_op_st_T0_A0[ot + s->mem_index]();
4358 }
4359 }
4360 break;
4361 case 0xd7: /* xlat */
4362#ifdef TARGET_X86_64
4363 if (s->aflag == 2) {
4364 gen_op_movq_A0_reg[R_EBX]();
4365 gen_op_addq_A0_AL();
4366 } else
4367#endif
4368 {
4369 gen_op_movl_A0_reg[R_EBX]();
4370 gen_op_addl_A0_AL();
4371 if (s->aflag == 0)
4372 gen_op_andl_A0_ffff();
4373 }
4374 gen_add_A0_ds_seg(s);
4375 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4376 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4377 break;
4378 case 0xb0 ... 0xb7: /* mov R, Ib */
4379 val = insn_get(s, OT_BYTE);
4380 gen_op_movl_T0_im(val);
4381 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4382 break;
4383 case 0xb8 ... 0xbf: /* mov R, Iv */
4384#ifdef TARGET_X86_64
4385 if (dflag == 2) {
4386 uint64_t tmp;
4387 /* 64 bit case */
4388 tmp = ldq_code(s->pc);
4389 s->pc += 8;
4390 reg = (b & 7) | REX_B(s);
4391 gen_movtl_T0_im(tmp);
4392 gen_op_mov_reg_T0[OT_QUAD][reg]();
4393 } else
4394#endif
4395 {
4396 ot = dflag ? OT_LONG : OT_WORD;
4397 val = insn_get(s, ot);
4398 reg = (b & 7) | REX_B(s);
4399 gen_op_movl_T0_im(val);
4400 gen_op_mov_reg_T0[ot][reg]();
4401 }
4402 break;
4403
4404 case 0x91 ... 0x97: /* xchg R, EAX */
4405 ot = dflag + OT_WORD;
4406 reg = (b & 7) | REX_B(s);
4407 rm = R_EAX;
4408 goto do_xchg_reg;
4409 case 0x86:
4410 case 0x87: /* xchg Ev, Gv */
4411 if ((b & 1) == 0)
4412 ot = OT_BYTE;
4413 else
4414 ot = dflag + OT_WORD;
4415 modrm = ldub_code(s->pc++);
4416 reg = ((modrm >> 3) & 7) | rex_r;
4417 mod = (modrm >> 6) & 3;
4418 if (mod == 3) {
4419 rm = (modrm & 7) | REX_B(s);
4420 do_xchg_reg:
4421 gen_op_mov_TN_reg[ot][0][reg]();
4422 gen_op_mov_TN_reg[ot][1][rm]();
4423 gen_op_mov_reg_T0[ot][rm]();
4424 gen_op_mov_reg_T1[ot][reg]();
4425 } else {
4426 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4427 gen_op_mov_TN_reg[ot][0][reg]();
4428 /* for xchg, lock is implicit */
4429 if (!(prefixes & PREFIX_LOCK))
4430 gen_op_lock();
4431 gen_op_ld_T1_A0[ot + s->mem_index]();
4432 gen_op_st_T0_A0[ot + s->mem_index]();
4433 if (!(prefixes & PREFIX_LOCK))
4434 gen_op_unlock();
4435 gen_op_mov_reg_T1[ot][reg]();
4436 }
4437 break;
4438 case 0xc4: /* les Gv */
4439 if (CODE64(s))
4440 goto illegal_op;
4441 op = R_ES;
4442 goto do_lxx;
4443 case 0xc5: /* lds Gv */
4444 if (CODE64(s))
4445 goto illegal_op;
4446 op = R_DS;
4447 goto do_lxx;
4448 case 0x1b2: /* lss Gv */
4449 op = R_SS;
4450 goto do_lxx;
4451 case 0x1b4: /* lfs Gv */
4452 op = R_FS;
4453 goto do_lxx;
4454 case 0x1b5: /* lgs Gv */
4455 op = R_GS;
4456 do_lxx:
4457 ot = dflag ? OT_LONG : OT_WORD;
4458 modrm = ldub_code(s->pc++);
4459 reg = ((modrm >> 3) & 7) | rex_r;
4460 mod = (modrm >> 6) & 3;
4461 if (mod == 3)
4462 goto illegal_op;
4463 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4464 gen_op_ld_T1_A0[ot + s->mem_index]();
4465 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4466 /* load the segment first to handle exceptions properly */
4467 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4468 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4469 /* then put the data */
4470 gen_op_mov_reg_T1[ot][reg]();
4471 if (s->is_jmp) {
4472 gen_jmp_im(s->pc - s->cs_base);
4473 gen_eob(s);
4474 }
4475 break;
4476
4477 /************************/
4478 /* shifts */
4479 case 0xc0:
4480 case 0xc1:
4481 /* shift Ev,Ib */
4482 shift = 2;
4483 grp2:
4484 {
4485 if ((b & 1) == 0)
4486 ot = OT_BYTE;
4487 else
4488 ot = dflag + OT_WORD;
4489
4490 modrm = ldub_code(s->pc++);
4491 mod = (modrm >> 6) & 3;
4492 op = (modrm >> 3) & 7;
4493
4494 if (mod != 3) {
4495 if (shift == 2) {
4496 s->rip_offset = 1;
4497 }
4498 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4499 opreg = OR_TMP0;
4500 } else {
4501 opreg = (modrm & 7) | REX_B(s);
4502 }
4503
4504 /* simpler op */
4505 if (shift == 0) {
4506 gen_shift(s, op, ot, opreg, OR_ECX);
4507 } else {
4508 if (shift == 2) {
4509 shift = ldub_code(s->pc++);
4510 }
4511 gen_shifti(s, op, ot, opreg, shift);
4512 }
4513 }
4514 break;
4515 case 0xd0:
4516 case 0xd1:
4517 /* shift Ev,1 */
4518 shift = 1;
4519 goto grp2;
4520 case 0xd2:
4521 case 0xd3:
4522 /* shift Ev,cl */
4523 shift = 0;
4524 goto grp2;
4525
4526 case 0x1a4: /* shld imm */
4527 op = 0;
4528 shift = 1;
4529 goto do_shiftd;
4530 case 0x1a5: /* shld cl */
4531 op = 0;
4532 shift = 0;
4533 goto do_shiftd;
4534 case 0x1ac: /* shrd imm */
4535 op = 1;
4536 shift = 1;
4537 goto do_shiftd;
4538 case 0x1ad: /* shrd cl */
4539 op = 1;
4540 shift = 0;
4541 do_shiftd:
4542 ot = dflag + OT_WORD;
4543 modrm = ldub_code(s->pc++);
4544 mod = (modrm >> 6) & 3;
4545 rm = (modrm & 7) | REX_B(s);
4546 reg = ((modrm >> 3) & 7) | rex_r;
4547
4548 if (mod != 3) {
4549 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4550 gen_op_ld_T0_A0[ot + s->mem_index]();
4551 } else {
4552 gen_op_mov_TN_reg[ot][0][rm]();
4553 }
4554 gen_op_mov_TN_reg[ot][1][reg]();
4555
4556 if (shift) {
4557 val = ldub_code(s->pc++);
4558 if (ot == OT_QUAD)
4559 val &= 0x3f;
4560 else
4561 val &= 0x1f;
4562 if (val) {
4563 if (mod == 3)
4564 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4565 else
4566 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4567 if (op == 0 && ot != OT_WORD)
4568 s->cc_op = CC_OP_SHLB + ot;
4569 else
4570 s->cc_op = CC_OP_SARB + ot;
4571 }
4572 } else {
4573 if (s->cc_op != CC_OP_DYNAMIC)
4574 gen_op_set_cc_op(s->cc_op);
4575 if (mod == 3)
4576 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4577 else
4578 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4579 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4580 }
4581 if (mod == 3) {
4582 gen_op_mov_reg_T0[ot][rm]();
4583 }
4584 break;
4585
4586 /************************/
4587 /* floats */
4588 case 0xd8 ... 0xdf:
4589 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4590 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4591 /* XXX: what to do if illegal op ? */
4592 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4593 break;
4594 }
4595 modrm = ldub_code(s->pc++);
4596 mod = (modrm >> 6) & 3;
4597 rm = modrm & 7;
4598 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4599 if (mod != 3) {
4600 /* memory op */
4601 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4602 switch(op) {
4603 case 0x00 ... 0x07: /* fxxxs */
4604 case 0x10 ... 0x17: /* fixxxl */
4605 case 0x20 ... 0x27: /* fxxxl */
4606 case 0x30 ... 0x37: /* fixxx */
4607 {
4608 int op1;
4609 op1 = op & 7;
4610
4611 switch(op >> 4) {
4612 case 0:
4613 gen_op_flds_FT0_A0();
4614 break;
4615 case 1:
4616 gen_op_fildl_FT0_A0();
4617 break;
4618 case 2:
4619 gen_op_fldl_FT0_A0();
4620 break;
4621 case 3:
4622 default:
4623 gen_op_fild_FT0_A0();
4624 break;
4625 }
4626
4627 gen_op_fp_arith_ST0_FT0[op1]();
4628 if (op1 == 3) {
4629 /* fcomp needs pop */
4630 gen_op_fpop();
4631 }
4632 }
4633 break;
4634 case 0x08: /* flds */
4635 case 0x0a: /* fsts */
4636 case 0x0b: /* fstps */
4637 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4638 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4639 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4640 switch(op & 7) {
4641 case 0:
4642 switch(op >> 4) {
4643 case 0:
4644 gen_op_flds_ST0_A0();
4645 break;
4646 case 1:
4647 gen_op_fildl_ST0_A0();
4648 break;
4649 case 2:
4650 gen_op_fldl_ST0_A0();
4651 break;
4652 case 3:
4653 default:
4654 gen_op_fild_ST0_A0();
4655 break;
4656 }
4657 break;
4658 case 1:
4659 switch(op >> 4) {
4660 case 1:
4661 gen_op_fisttl_ST0_A0();
4662 break;
4663 case 2:
4664 gen_op_fisttll_ST0_A0();
4665 break;
4666 case 3:
4667 default:
4668 gen_op_fistt_ST0_A0();
4669 }
4670 gen_op_fpop();
4671 break;
4672 default:
4673 switch(op >> 4) {
4674 case 0:
4675 gen_op_fsts_ST0_A0();
4676 break;
4677 case 1:
4678 gen_op_fistl_ST0_A0();
4679 break;
4680 case 2:
4681 gen_op_fstl_ST0_A0();
4682 break;
4683 case 3:
4684 default:
4685 gen_op_fist_ST0_A0();
4686 break;
4687 }
4688 if ((op & 7) == 3)
4689 gen_op_fpop();
4690 break;
4691 }
4692 break;
4693 case 0x0c: /* fldenv mem */
4694 gen_op_fldenv_A0(s->dflag);
4695 break;
4696 case 0x0d: /* fldcw mem */
4697 gen_op_fldcw_A0();
4698 break;
4699 case 0x0e: /* fnstenv mem */
4700 gen_op_fnstenv_A0(s->dflag);
4701 break;
4702 case 0x0f: /* fnstcw mem */
4703 gen_op_fnstcw_A0();
4704 break;
4705 case 0x1d: /* fldt mem */
4706 gen_op_fldt_ST0_A0();
4707 break;
4708 case 0x1f: /* fstpt mem */
4709 gen_op_fstt_ST0_A0();
4710 gen_op_fpop();
4711 break;
4712 case 0x2c: /* frstor mem */
4713 gen_op_frstor_A0(s->dflag);
4714 break;
4715 case 0x2e: /* fnsave mem */
4716 gen_op_fnsave_A0(s->dflag);
4717 break;
4718 case 0x2f: /* fnstsw mem */
4719 gen_op_fnstsw_A0();
4720 break;
4721 case 0x3c: /* fbld */
4722 gen_op_fbld_ST0_A0();
4723 break;
4724 case 0x3e: /* fbstp */
4725 gen_op_fbst_ST0_A0();
4726 gen_op_fpop();
4727 break;
4728 case 0x3d: /* fildll */
4729 gen_op_fildll_ST0_A0();
4730 break;
4731 case 0x3f: /* fistpll */
4732 gen_op_fistll_ST0_A0();
4733 gen_op_fpop();
4734 break;
4735 default:
4736 goto illegal_op;
4737 }
4738 } else {
4739 /* register float ops */
4740 opreg = rm;
4741
4742 switch(op) {
4743 case 0x08: /* fld sti */
4744 gen_op_fpush();
4745 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4746 break;
4747 case 0x09: /* fxchg sti */
4748 case 0x29: /* fxchg4 sti, undocumented op */
4749 case 0x39: /* fxchg7 sti, undocumented op */
4750 gen_op_fxchg_ST0_STN(opreg);
4751 break;
4752 case 0x0a: /* grp d9/2 */
4753 switch(rm) {
4754 case 0: /* fnop */
4755 /* check exceptions (FreeBSD FPU probe) */
4756 if (s->cc_op != CC_OP_DYNAMIC)
4757 gen_op_set_cc_op(s->cc_op);
4758 gen_jmp_im(pc_start - s->cs_base);
4759 gen_op_fwait();
4760 break;
4761 default:
4762 goto illegal_op;
4763 }
4764 break;
4765 case 0x0c: /* grp d9/4 */
4766 switch(rm) {
4767 case 0: /* fchs */
4768 gen_op_fchs_ST0();
4769 break;
4770 case 1: /* fabs */
4771 gen_op_fabs_ST0();
4772 break;
4773 case 4: /* ftst */
4774 gen_op_fldz_FT0();
4775 gen_op_fcom_ST0_FT0();
4776 break;
4777 case 5: /* fxam */
4778 gen_op_fxam_ST0();
4779 break;
4780 default:
4781 goto illegal_op;
4782 }
4783 break;
4784 case 0x0d: /* grp d9/5 */
4785 {
4786 switch(rm) {
4787 case 0:
4788 gen_op_fpush();
4789 gen_op_fld1_ST0();
4790 break;
4791 case 1:
4792 gen_op_fpush();
4793 gen_op_fldl2t_ST0();
4794 break;
4795 case 2:
4796 gen_op_fpush();
4797 gen_op_fldl2e_ST0();
4798 break;
4799 case 3:
4800 gen_op_fpush();
4801 gen_op_fldpi_ST0();
4802 break;
4803 case 4:
4804 gen_op_fpush();
4805 gen_op_fldlg2_ST0();
4806 break;
4807 case 5:
4808 gen_op_fpush();
4809 gen_op_fldln2_ST0();
4810 break;
4811 case 6:
4812 gen_op_fpush();
4813 gen_op_fldz_ST0();
4814 break;
4815 default:
4816 goto illegal_op;
4817 }
4818 }
4819 break;
4820 case 0x0e: /* grp d9/6 */
4821 switch(rm) {
4822 case 0: /* f2xm1 */
4823 gen_op_f2xm1();
4824 break;
4825 case 1: /* fyl2x */
4826 gen_op_fyl2x();
4827 break;
4828 case 2: /* fptan */
4829 gen_op_fptan();
4830 break;
4831 case 3: /* fpatan */
4832 gen_op_fpatan();
4833 break;
4834 case 4: /* fxtract */
4835 gen_op_fxtract();
4836 break;
4837 case 5: /* fprem1 */
4838 gen_op_fprem1();
4839 break;
4840 case 6: /* fdecstp */
4841 gen_op_fdecstp();
4842 break;
4843 default:
4844 case 7: /* fincstp */
4845 gen_op_fincstp();
4846 break;
4847 }
4848 break;
4849 case 0x0f: /* grp d9/7 */
4850 switch(rm) {
4851 case 0: /* fprem */
4852 gen_op_fprem();
4853 break;
4854 case 1: /* fyl2xp1 */
4855 gen_op_fyl2xp1();
4856 break;
4857 case 2: /* fsqrt */
4858 gen_op_fsqrt();
4859 break;
4860 case 3: /* fsincos */
4861 gen_op_fsincos();
4862 break;
4863 case 5: /* fscale */
4864 gen_op_fscale();
4865 break;
4866 case 4: /* frndint */
4867 gen_op_frndint();
4868 break;
4869 case 6: /* fsin */
4870 gen_op_fsin();
4871 break;
4872 default:
4873 case 7: /* fcos */
4874 gen_op_fcos();
4875 break;
4876 }
4877 break;
4878 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4879 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4880 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4881 {
4882 int op1;
4883
4884 op1 = op & 7;
4885 if (op >= 0x20) {
4886 gen_op_fp_arith_STN_ST0[op1](opreg);
4887 if (op >= 0x30)
4888 gen_op_fpop();
4889 } else {
4890 gen_op_fmov_FT0_STN(opreg);
4891 gen_op_fp_arith_ST0_FT0[op1]();
4892 }
4893 }
4894 break;
4895 case 0x02: /* fcom */
4896 case 0x22: /* fcom2, undocumented op */
4897 gen_op_fmov_FT0_STN(opreg);
4898 gen_op_fcom_ST0_FT0();
4899 break;
4900 case 0x03: /* fcomp */
4901 case 0x23: /* fcomp3, undocumented op */
4902 case 0x32: /* fcomp5, undocumented op */
4903 gen_op_fmov_FT0_STN(opreg);
4904 gen_op_fcom_ST0_FT0();
4905 gen_op_fpop();
4906 break;
4907 case 0x15: /* da/5 */
4908 switch(rm) {
4909 case 1: /* fucompp */
4910 gen_op_fmov_FT0_STN(1);
4911 gen_op_fucom_ST0_FT0();
4912 gen_op_fpop();
4913 gen_op_fpop();
4914 break;
4915 default:
4916 goto illegal_op;
4917 }
4918 break;
4919 case 0x1c:
4920 switch(rm) {
4921 case 0: /* feni (287 only, just do nop here) */
4922 break;
4923 case 1: /* fdisi (287 only, just do nop here) */
4924 break;
4925 case 2: /* fclex */
4926 gen_op_fclex();
4927 break;
4928 case 3: /* fninit */
4929 gen_op_fninit();
4930 break;
4931 case 4: /* fsetpm (287 only, just do nop here) */
4932 break;
4933 default:
4934 goto illegal_op;
4935 }
4936 break;
4937 case 0x1d: /* fucomi */
4938 if (s->cc_op != CC_OP_DYNAMIC)
4939 gen_op_set_cc_op(s->cc_op);
4940 gen_op_fmov_FT0_STN(opreg);
4941 gen_op_fucomi_ST0_FT0();
4942 s->cc_op = CC_OP_EFLAGS;
4943 break;
4944 case 0x1e: /* fcomi */
4945 if (s->cc_op != CC_OP_DYNAMIC)
4946 gen_op_set_cc_op(s->cc_op);
4947 gen_op_fmov_FT0_STN(opreg);
4948 gen_op_fcomi_ST0_FT0();
4949 s->cc_op = CC_OP_EFLAGS;
4950 break;
4951 case 0x28: /* ffree sti */
4952 gen_op_ffree_STN(opreg);
4953 break;
4954 case 0x2a: /* fst sti */
4955 gen_op_fmov_STN_ST0(opreg);
4956 break;
4957 case 0x2b: /* fstp sti */
4958 case 0x0b: /* fstp1 sti, undocumented op */
4959 case 0x3a: /* fstp8 sti, undocumented op */
4960 case 0x3b: /* fstp9 sti, undocumented op */
4961 gen_op_fmov_STN_ST0(opreg);
4962 gen_op_fpop();
4963 break;
4964 case 0x2c: /* fucom st(i) */
4965 gen_op_fmov_FT0_STN(opreg);
4966 gen_op_fucom_ST0_FT0();
4967 break;
4968 case 0x2d: /* fucomp st(i) */
4969 gen_op_fmov_FT0_STN(opreg);
4970 gen_op_fucom_ST0_FT0();
4971 gen_op_fpop();
4972 break;
4973 case 0x33: /* de/3 */
4974 switch(rm) {
4975 case 1: /* fcompp */
4976 gen_op_fmov_FT0_STN(1);
4977 gen_op_fcom_ST0_FT0();
4978 gen_op_fpop();
4979 gen_op_fpop();
4980 break;
4981 default:
4982 goto illegal_op;
4983 }
4984 break;
4985 case 0x38: /* ffreep sti, undocumented op */
4986 gen_op_ffree_STN(opreg);
4987 gen_op_fpop();
4988 break;
4989 case 0x3c: /* df/4 */
4990 switch(rm) {
4991 case 0:
4992 gen_op_fnstsw_EAX();
4993 break;
4994 default:
4995 goto illegal_op;
4996 }
4997 break;
4998 case 0x3d: /* fucomip */
4999 if (s->cc_op != CC_OP_DYNAMIC)
5000 gen_op_set_cc_op(s->cc_op);
5001 gen_op_fmov_FT0_STN(opreg);
5002 gen_op_fucomi_ST0_FT0();
5003 gen_op_fpop();
5004 s->cc_op = CC_OP_EFLAGS;
5005 break;
5006 case 0x3e: /* fcomip */
5007 if (s->cc_op != CC_OP_DYNAMIC)
5008 gen_op_set_cc_op(s->cc_op);
5009 gen_op_fmov_FT0_STN(opreg);
5010 gen_op_fcomi_ST0_FT0();
5011 gen_op_fpop();
5012 s->cc_op = CC_OP_EFLAGS;
5013 break;
5014 case 0x10 ... 0x13: /* fcmovxx */
5015 case 0x18 ... 0x1b:
5016 {
5017 int op1;
5018 const static uint8_t fcmov_cc[8] = {
5019 (JCC_B << 1),
5020 (JCC_Z << 1),
5021 (JCC_BE << 1),
5022 (JCC_P << 1),
5023 };
5024 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5025 gen_setcc(s, op1);
5026 gen_op_fcmov_ST0_STN_T0(opreg);
5027 }
5028 break;
5029 default:
5030 goto illegal_op;
5031 }
5032 }
5033#ifdef USE_CODE_COPY
5034 s->tb->cflags |= CF_TB_FP_USED;
5035#endif
5036 break;
5037 /************************/
5038 /* string ops */
5039
5040 case 0xa4: /* movsS */
5041 case 0xa5:
5042 if ((b & 1) == 0)
5043 ot = OT_BYTE;
5044 else
5045 ot = dflag + OT_WORD;
5046
5047 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5048 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5049 } else {
5050 gen_movs(s, ot);
5051 }
5052 break;
5053
5054 case 0xaa: /* stosS */
5055 case 0xab:
5056 if ((b & 1) == 0)
5057 ot = OT_BYTE;
5058 else
5059 ot = dflag + OT_WORD;
5060
5061 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5062 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5063 } else {
5064 gen_stos(s, ot);
5065 }
5066 break;
5067 case 0xac: /* lodsS */
5068 case 0xad:
5069 if ((b & 1) == 0)
5070 ot = OT_BYTE;
5071 else
5072 ot = dflag + OT_WORD;
5073 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5074 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5075 } else {
5076 gen_lods(s, ot);
5077 }
5078 break;
5079 case 0xae: /* scasS */
5080 case 0xaf:
5081 if ((b & 1) == 0)
5082 ot = OT_BYTE;
5083 else
5084 ot = dflag + OT_WORD;
5085 if (prefixes & PREFIX_REPNZ) {
5086 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5087 } else if (prefixes & PREFIX_REPZ) {
5088 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5089 } else {
5090 gen_scas(s, ot);
5091 s->cc_op = CC_OP_SUBB + ot;
5092 }
5093 break;
5094
5095 case 0xa6: /* cmpsS */
5096 case 0xa7:
5097 if ((b & 1) == 0)
5098 ot = OT_BYTE;
5099 else
5100 ot = dflag + OT_WORD;
5101 if (prefixes & PREFIX_REPNZ) {
5102 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5103 } else if (prefixes & PREFIX_REPZ) {
5104 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5105 } else {
5106 gen_cmps(s, ot);
5107 s->cc_op = CC_OP_SUBB + ot;
5108 }
5109 break;
5110 case 0x6c: /* insS */
5111 case 0x6d:
5112 if ((b & 1) == 0)
5113 ot = OT_BYTE;
5114 else
5115 ot = dflag ? OT_LONG : OT_WORD;
5116 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5117 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5118 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5119 } else {
5120 gen_ins(s, ot);
5121 }
5122 break;
5123 case 0x6e: /* outsS */
5124 case 0x6f:
5125 if ((b & 1) == 0)
5126 ot = OT_BYTE;
5127 else
5128 ot = dflag ? OT_LONG : OT_WORD;
5129 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5130 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5131 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5132 } else {
5133 gen_outs(s, ot);
5134 }
5135 break;
5136
5137 /************************/
5138 /* port I/O */
5139 case 0xe4:
5140 case 0xe5:
5141 if ((b & 1) == 0)
5142 ot = OT_BYTE;
5143 else
5144 ot = dflag ? OT_LONG : OT_WORD;
5145 val = ldub_code(s->pc++);
5146 gen_op_movl_T0_im(val);
5147 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5148 gen_op_in[ot]();
5149 gen_op_mov_reg_T1[ot][R_EAX]();
5150 break;
5151 case 0xe6:
5152 case 0xe7:
5153 if ((b & 1) == 0)
5154 ot = OT_BYTE;
5155 else
5156 ot = dflag ? OT_LONG : OT_WORD;
5157 val = ldub_code(s->pc++);
5158 gen_op_movl_T0_im(val);
5159 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5160#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
5161 if (val == 0x80)
5162 break;
5163#endif /* VBOX */
5164 gen_op_mov_TN_reg[ot][1][R_EAX]();
5165 gen_op_out[ot]();
5166 break;
5167 case 0xec:
5168 case 0xed:
5169 if ((b & 1) == 0)
5170 ot = OT_BYTE;
5171 else
5172 ot = dflag ? OT_LONG : OT_WORD;
5173 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5174 gen_op_andl_T0_ffff();
5175 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5176 gen_op_in[ot]();
5177 gen_op_mov_reg_T1[ot][R_EAX]();
5178 break;
5179 case 0xee:
5180 case 0xef:
5181 if ((b & 1) == 0)
5182 ot = OT_BYTE;
5183 else
5184 ot = dflag ? OT_LONG : OT_WORD;
5185 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5186 gen_op_andl_T0_ffff();
5187 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5188 gen_op_mov_TN_reg[ot][1][R_EAX]();
5189 gen_op_out[ot]();
5190 break;
5191
5192 /************************/
5193 /* control */
5194 case 0xc2: /* ret im */
5195 val = ldsw_code(s->pc);
5196 s->pc += 2;
5197 gen_pop_T0(s);
5198 if (CODE64(s) && s->dflag)
5199 s->dflag = 2;
5200 gen_stack_update(s, val + (2 << s->dflag));
5201 if (s->dflag == 0)
5202 gen_op_andl_T0_ffff();
5203 gen_op_jmp_T0();
5204 gen_eob(s);
5205 break;
5206 case 0xc3: /* ret */
5207 gen_pop_T0(s);
5208 gen_pop_update(s);
5209 if (s->dflag == 0)
5210 gen_op_andl_T0_ffff();
5211 gen_op_jmp_T0();
5212 gen_eob(s);
5213 break;
5214 case 0xca: /* lret im */
5215 val = ldsw_code(s->pc);
5216 s->pc += 2;
5217 do_lret:
5218 if (s->pe && !s->vm86) {
5219 if (s->cc_op != CC_OP_DYNAMIC)
5220 gen_op_set_cc_op(s->cc_op);
5221 gen_jmp_im(pc_start - s->cs_base);
5222 gen_op_lret_protected(s->dflag, val);
5223 } else {
5224 gen_stack_A0(s);
5225 /* pop offset */
5226 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5227 if (s->dflag == 0)
5228 gen_op_andl_T0_ffff();
5229 /* NOTE: keeping EIP updated is not a problem in case of
5230 exception */
5231 gen_op_jmp_T0();
5232 /* pop selector */
5233 gen_op_addl_A0_im(2 << s->dflag);
5234 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5235 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5236 /* add stack offset */
5237 gen_stack_update(s, val + (4 << s->dflag));
5238 }
5239 gen_eob(s);
5240 break;
5241 case 0xcb: /* lret */
5242 val = 0;
5243 goto do_lret;
5244 case 0xcf: /* iret */
5245 if (!s->pe) {
5246 /* real mode */
5247 gen_op_iret_real(s->dflag);
5248 s->cc_op = CC_OP_EFLAGS;
5249 } else if (s->vm86) {
5250#ifdef VBOX
5251 if (s->iopl != 3 && (!s->vme || s->dflag)) {
5252#else
5253 if (s->iopl != 3) {
5254#endif
5255 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5256 } else {
5257 gen_op_iret_real(s->dflag);
5258 s->cc_op = CC_OP_EFLAGS;
5259 }
5260 } else {
5261 if (s->cc_op != CC_OP_DYNAMIC)
5262 gen_op_set_cc_op(s->cc_op);
5263 gen_jmp_im(pc_start - s->cs_base);
5264 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5265 s->cc_op = CC_OP_EFLAGS;
5266 }
5267 gen_eob(s);
5268 break;
5269 case 0xe8: /* call im */
5270 {
5271 if (dflag)
5272 tval = (int32_t)insn_get(s, OT_LONG);
5273 else
5274 tval = (int16_t)insn_get(s, OT_WORD);
5275 next_eip = s->pc - s->cs_base;
5276 tval += next_eip;
5277 if (s->dflag == 0)
5278 tval &= 0xffff;
5279 gen_movtl_T0_im(next_eip);
5280 gen_push_T0(s);
5281 gen_jmp(s, tval);
5282 }
5283 break;
5284 case 0x9a: /* lcall im */
5285 {
5286 unsigned int selector, offset;
5287
5288 if (CODE64(s))
5289 goto illegal_op;
5290 ot = dflag ? OT_LONG : OT_WORD;
5291 offset = insn_get(s, ot);
5292 selector = insn_get(s, OT_WORD);
5293
5294 gen_op_movl_T0_im(selector);
5295 gen_op_movl_T1_imu(offset);
5296 }
5297 goto do_lcall;
5298 case 0xe9: /* jmp im */
5299 if (dflag)
5300 tval = (int32_t)insn_get(s, OT_LONG);
5301 else
5302 tval = (int16_t)insn_get(s, OT_WORD);
5303 tval += s->pc - s->cs_base;
5304 if (s->dflag == 0)
5305 tval &= 0xffff;
5306 gen_jmp(s, tval);
5307 break;
5308 case 0xea: /* ljmp im */
5309 {
5310 unsigned int selector, offset;
5311
5312 if (CODE64(s))
5313 goto illegal_op;
5314 ot = dflag ? OT_LONG : OT_WORD;
5315 offset = insn_get(s, ot);
5316 selector = insn_get(s, OT_WORD);
5317
5318 gen_op_movl_T0_im(selector);
5319 gen_op_movl_T1_imu(offset);
5320 }
5321 goto do_ljmp;
5322 case 0xeb: /* jmp Jb */
5323 tval = (int8_t)insn_get(s, OT_BYTE);
5324 tval += s->pc - s->cs_base;
5325 if (s->dflag == 0)
5326 tval &= 0xffff;
5327 gen_jmp(s, tval);
5328 break;
5329 case 0x70 ... 0x7f: /* jcc Jb */
5330 tval = (int8_t)insn_get(s, OT_BYTE);
5331 goto do_jcc;
5332 case 0x180 ... 0x18f: /* jcc Jv */
5333 if (dflag) {
5334 tval = (int32_t)insn_get(s, OT_LONG);
5335 } else {
5336 tval = (int16_t)insn_get(s, OT_WORD);
5337 }
5338 do_jcc:
5339 next_eip = s->pc - s->cs_base;
5340 tval += next_eip;
5341 if (s->dflag == 0)
5342 tval &= 0xffff;
5343 gen_jcc(s, b, tval, next_eip);
5344 break;
5345
5346 case 0x190 ... 0x19f: /* setcc Gv */
5347 modrm = ldub_code(s->pc++);
5348 gen_setcc(s, b);
5349 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5350 break;
5351 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5352 ot = dflag + OT_WORD;
5353 modrm = ldub_code(s->pc++);
5354 reg = ((modrm >> 3) & 7) | rex_r;
5355 mod = (modrm >> 6) & 3;
5356 gen_setcc(s, b);
5357 if (mod != 3) {
5358 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5359 gen_op_ld_T1_A0[ot + s->mem_index]();
5360 } else {
5361 rm = (modrm & 7) | REX_B(s);
5362 gen_op_mov_TN_reg[ot][1][rm]();
5363 }
5364 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5365 break;
5366
5367 /************************/
5368 /* flags */
5369 case 0x9c: /* pushf */
5370#ifdef VBOX
5371 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5372#else
5373 if (s->vm86 && s->iopl != 3) {
5374#endif
5375 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5376 } else {
5377 if (s->cc_op != CC_OP_DYNAMIC)
5378 gen_op_set_cc_op(s->cc_op);
5379#ifdef VBOX
5380 if (s->vm86 && s->vme && s->iopl != 3)
5381 gen_op_movl_T0_eflags_vme();
5382 else
5383#endif
5384 gen_op_movl_T0_eflags();
5385 gen_push_T0(s);
5386 }
5387 break;
5388 case 0x9d: /* popf */
5389#ifdef VBOX
5390 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5391#else
5392 if (s->vm86 && s->iopl != 3) {
5393#endif
5394 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5395 } else {
5396 gen_pop_T0(s);
5397 if (s->cpl == 0) {
5398 if (s->dflag) {
5399 gen_op_movl_eflags_T0_cpl0();
5400 } else {
5401 gen_op_movw_eflags_T0_cpl0();
5402 }
5403 } else {
5404 if (s->cpl <= s->iopl) {
5405 if (s->dflag) {
5406 gen_op_movl_eflags_T0_io();
5407 } else {
5408 gen_op_movw_eflags_T0_io();
5409 }
5410 } else {
5411 if (s->dflag) {
5412 gen_op_movl_eflags_T0();
5413 } else {
5414#ifdef VBOX
5415 if (s->vm86 && s->vme)
5416 gen_op_movw_eflags_T0_vme();
5417 else
5418#endif
5419 gen_op_movw_eflags_T0();
5420 }
5421 }
5422 }
5423 gen_pop_update(s);
5424 s->cc_op = CC_OP_EFLAGS;
5425 /* abort translation because TF flag may change */
5426 gen_jmp_im(s->pc - s->cs_base);
5427 gen_eob(s);
5428 }
5429 break;
5430 case 0x9e: /* sahf */
5431 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5432 goto illegal_op;
5433 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5434 if (s->cc_op != CC_OP_DYNAMIC)
5435 gen_op_set_cc_op(s->cc_op);
5436 gen_op_movb_eflags_T0();
5437 s->cc_op = CC_OP_EFLAGS;
5438 break;
5439 case 0x9f: /* lahf */
5440 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5441 goto illegal_op;
5442 if (s->cc_op != CC_OP_DYNAMIC)
5443 gen_op_set_cc_op(s->cc_op);
5444 gen_op_movl_T0_eflags();
5445 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5446 break;
5447 case 0xf5: /* cmc */
5448 if (s->cc_op != CC_OP_DYNAMIC)
5449 gen_op_set_cc_op(s->cc_op);
5450 gen_op_cmc();
5451 s->cc_op = CC_OP_EFLAGS;
5452 break;
5453 case 0xf8: /* clc */
5454 if (s->cc_op != CC_OP_DYNAMIC)
5455 gen_op_set_cc_op(s->cc_op);
5456 gen_op_clc();
5457 s->cc_op = CC_OP_EFLAGS;
5458 break;
5459 case 0xf9: /* stc */
5460 if (s->cc_op != CC_OP_DYNAMIC)
5461 gen_op_set_cc_op(s->cc_op);
5462 gen_op_stc();
5463 s->cc_op = CC_OP_EFLAGS;
5464 break;
5465 case 0xfc: /* cld */
5466 gen_op_cld();
5467 break;
5468 case 0xfd: /* std */
5469 gen_op_std();
5470 break;
5471
5472 /************************/
5473 /* bit operations */
5474 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5475 ot = dflag + OT_WORD;
5476 modrm = ldub_code(s->pc++);
5477 op = (modrm >> 3) & 7;
5478 mod = (modrm >> 6) & 3;
5479 rm = (modrm & 7) | REX_B(s);
5480 if (mod != 3) {
5481 s->rip_offset = 1;
5482 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5483 gen_op_ld_T0_A0[ot + s->mem_index]();
5484 } else {
5485 gen_op_mov_TN_reg[ot][0][rm]();
5486 }
5487 /* load shift */
5488 val = ldub_code(s->pc++);
5489 gen_op_movl_T1_im(val);
5490 if (op < 4)
5491 goto illegal_op;
5492 op -= 4;
5493 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5494 s->cc_op = CC_OP_SARB + ot;
5495 if (op != 0) {
5496 if (mod != 3)
5497 gen_op_st_T0_A0[ot + s->mem_index]();
5498 else
5499 gen_op_mov_reg_T0[ot][rm]();
5500 gen_op_update_bt_cc();
5501 }
5502 break;
5503 case 0x1a3: /* bt Gv, Ev */
5504 op = 0;
5505 goto do_btx;
5506 case 0x1ab: /* bts */
5507 op = 1;
5508 goto do_btx;
5509 case 0x1b3: /* btr */
5510 op = 2;
5511 goto do_btx;
5512 case 0x1bb: /* btc */
5513 op = 3;
5514 do_btx:
5515 ot = dflag + OT_WORD;
5516 modrm = ldub_code(s->pc++);
5517 reg = ((modrm >> 3) & 7) | rex_r;
5518 mod = (modrm >> 6) & 3;
5519 rm = (modrm & 7) | REX_B(s);
5520 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5521 if (mod != 3) {
5522 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5523 /* specific case: we need to add a displacement */
5524 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5525 gen_op_ld_T0_A0[ot + s->mem_index]();
5526 } else {
5527 gen_op_mov_TN_reg[ot][0][rm]();
5528 }
5529 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5530 s->cc_op = CC_OP_SARB + ot;
5531 if (op != 0) {
5532 if (mod != 3)
5533 gen_op_st_T0_A0[ot + s->mem_index]();
5534 else
5535 gen_op_mov_reg_T0[ot][rm]();
5536 gen_op_update_bt_cc();
5537 }
5538 break;
5539 case 0x1bc: /* bsf */
5540 case 0x1bd: /* bsr */
5541 ot = dflag + OT_WORD;
5542 modrm = ldub_code(s->pc++);
5543 reg = ((modrm >> 3) & 7) | rex_r;
5544 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5545 /* NOTE: in order to handle the 0 case, we must load the
5546 result. It could be optimized with a generated jump */
5547 gen_op_mov_TN_reg[ot][1][reg]();
5548 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5549 gen_op_mov_reg_T1[ot][reg]();
5550 s->cc_op = CC_OP_LOGICB + ot;
5551 break;
5552 /************************/
5553 /* bcd */
5554 case 0x27: /* daa */
5555 if (CODE64(s))
5556 goto illegal_op;
5557 if (s->cc_op != CC_OP_DYNAMIC)
5558 gen_op_set_cc_op(s->cc_op);
5559 gen_op_daa();
5560 s->cc_op = CC_OP_EFLAGS;
5561 break;
5562 case 0x2f: /* das */
5563 if (CODE64(s))
5564 goto illegal_op;
5565 if (s->cc_op != CC_OP_DYNAMIC)
5566 gen_op_set_cc_op(s->cc_op);
5567 gen_op_das();
5568 s->cc_op = CC_OP_EFLAGS;
5569 break;
5570 case 0x37: /* aaa */
5571 if (CODE64(s))
5572 goto illegal_op;
5573 if (s->cc_op != CC_OP_DYNAMIC)
5574 gen_op_set_cc_op(s->cc_op);
5575 gen_op_aaa();
5576 s->cc_op = CC_OP_EFLAGS;
5577 break;
5578 case 0x3f: /* aas */
5579 if (CODE64(s))
5580 goto illegal_op;
5581 if (s->cc_op != CC_OP_DYNAMIC)
5582 gen_op_set_cc_op(s->cc_op);
5583 gen_op_aas();
5584 s->cc_op = CC_OP_EFLAGS;
5585 break;
5586 case 0xd4: /* aam */
5587 if (CODE64(s))
5588 goto illegal_op;
5589 val = ldub_code(s->pc++);
5590 if (val == 0) {
5591 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5592 } else {
5593 gen_op_aam(val);
5594 s->cc_op = CC_OP_LOGICB;
5595 }
5596 break;
5597 case 0xd5: /* aad */
5598 if (CODE64(s))
5599 goto illegal_op;
5600 val = ldub_code(s->pc++);
5601 gen_op_aad(val);
5602 s->cc_op = CC_OP_LOGICB;
5603 break;
5604 /************************/
5605 /* misc */
5606 case 0x90: /* nop */
5607 /* XXX: xchg + rex handling */
5608 /* XXX: correct lock test for all insn */
5609 if (prefixes & PREFIX_LOCK)
5610 goto illegal_op;
5611 break;
5612 case 0x9b: /* fwait */
5613 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5614 (HF_MP_MASK | HF_TS_MASK)) {
5615 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5616 } else {
5617 if (s->cc_op != CC_OP_DYNAMIC)
5618 gen_op_set_cc_op(s->cc_op);
5619 gen_jmp_im(pc_start - s->cs_base);
5620 gen_op_fwait();
5621 }
5622 break;
5623 case 0xcc: /* int3 */
5624#ifdef VBOX
5625 if (s->vm86 && s->iopl != 3 && !s->vme) {
5626 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5627 } else
5628#endif
5629 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5630 break;
5631 case 0xcd: /* int N */
5632 val = ldub_code(s->pc++);
5633#ifdef VBOX
5634 if (s->vm86 && s->iopl != 3 && !s->vme) {
5635#else
5636 if (s->vm86 && s->iopl != 3) {
5637#endif
5638 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5639 } else {
5640 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5641 }
5642 break;
5643 case 0xce: /* into */
5644 if (CODE64(s))
5645 goto illegal_op;
5646 if (s->cc_op != CC_OP_DYNAMIC)
5647 gen_op_set_cc_op(s->cc_op);
5648 gen_jmp_im(pc_start - s->cs_base);
5649 gen_op_into(s->pc - pc_start);
5650 break;
5651 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5652#if 1
5653 gen_debug(s, pc_start - s->cs_base);
5654#else
5655 /* start debug */
5656 tb_flush(cpu_single_env);
5657 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5658#endif
5659 break;
5660 case 0xfa: /* cli */
5661 if (!s->vm86) {
5662 if (s->cpl <= s->iopl) {
5663 gen_op_cli();
5664 } else {
5665 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5666 }
5667 } else {
5668 if (s->iopl == 3) {
5669 gen_op_cli();
5670#ifdef VBOX
5671 } else if (s->iopl != 3 && s->vme) {
5672 gen_op_cli_vme();
5673#endif
5674 } else {
5675 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5676 }
5677 }
5678 break;
5679 case 0xfb: /* sti */
5680 if (!s->vm86) {
5681 if (s->cpl <= s->iopl) {
5682 gen_sti:
5683 gen_op_sti();
5684 /* interruptions are enabled only the first insn after sti */
5685 /* If several instructions disable interrupts, only the
5686 _first_ does it */
5687 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5688 gen_op_set_inhibit_irq();
5689 /* give a chance to handle pending irqs */
5690 gen_jmp_im(s->pc - s->cs_base);
5691 gen_eob(s);
5692 } else {
5693 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5694 }
5695 } else {
5696 if (s->iopl == 3) {
5697 goto gen_sti;
5698#ifdef VBOX
5699 } else if (s->iopl != 3 && s->vme) {
5700 gen_op_sti_vme();
5701 /* give a chance to handle pending irqs */
5702 gen_jmp_im(s->pc - s->cs_base);
5703 gen_eob(s);
5704#endif
5705 } else {
5706 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5707 }
5708 }
5709 break;
5710 case 0x62: /* bound */
5711 if (CODE64(s))
5712 goto illegal_op;
5713 ot = dflag ? OT_LONG : OT_WORD;
5714 modrm = ldub_code(s->pc++);
5715 reg = (modrm >> 3) & 7;
5716 mod = (modrm >> 6) & 3;
5717 if (mod == 3)
5718 goto illegal_op;
5719 gen_op_mov_TN_reg[ot][0][reg]();
5720 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5721 gen_jmp_im(pc_start - s->cs_base);
5722 if (ot == OT_WORD)
5723 gen_op_boundw();
5724 else
5725 gen_op_boundl();
5726 break;
5727 case 0x1c8 ... 0x1cf: /* bswap reg */
5728 reg = (b & 7) | REX_B(s);
5729#ifdef TARGET_X86_64
5730 if (dflag == 2) {
5731 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5732 gen_op_bswapq_T0();
5733 gen_op_mov_reg_T0[OT_QUAD][reg]();
5734 } else
5735#endif
5736 {
5737 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5738 gen_op_bswapl_T0();
5739 gen_op_mov_reg_T0[OT_LONG][reg]();
5740 }
5741 break;
5742 case 0xd6: /* salc */
5743 if (CODE64(s))
5744 goto illegal_op;
5745 if (s->cc_op != CC_OP_DYNAMIC)
5746 gen_op_set_cc_op(s->cc_op);
5747 gen_op_salc();
5748 break;
5749 case 0xe0: /* loopnz */
5750 case 0xe1: /* loopz */
5751 if (s->cc_op != CC_OP_DYNAMIC)
5752 gen_op_set_cc_op(s->cc_op);
5753 /* FALL THRU */
5754 case 0xe2: /* loop */
5755 case 0xe3: /* jecxz */
5756 {
5757 int l1, l2;
5758
5759 tval = (int8_t)insn_get(s, OT_BYTE);
5760 next_eip = s->pc - s->cs_base;
5761 tval += next_eip;
5762 if (s->dflag == 0)
5763 tval &= 0xffff;
5764
5765 l1 = gen_new_label();
5766 l2 = gen_new_label();
5767 b &= 3;
5768 if (b == 3) {
5769 gen_op_jz_ecx[s->aflag](l1);
5770 } else {
5771 gen_op_dec_ECX[s->aflag]();
5772 if (b <= 1)
5773 gen_op_mov_T0_cc();
5774 gen_op_loop[s->aflag][b](l1);
5775 }
5776
5777 gen_jmp_im(next_eip);
5778 gen_op_jmp_label(l2);
5779 gen_set_label(l1);
5780 gen_jmp_im(tval);
5781 gen_set_label(l2);
5782 gen_eob(s);
5783 }
5784 break;
5785 case 0x130: /* wrmsr */
5786 case 0x132: /* rdmsr */
5787 if (s->cpl != 0) {
5788 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5789 } else {
5790 if (b & 2)
5791 gen_op_rdmsr();
5792 else
5793 gen_op_wrmsr();
5794 }
5795 break;
5796 case 0x131: /* rdtsc */
5797 gen_jmp_im(pc_start - s->cs_base);
5798 gen_op_rdtsc();
5799 break;
5800 case 0x134: /* sysenter */
5801 if (CODE64(s))
5802 goto illegal_op;
5803 if (!s->pe) {
5804 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5805 } else {
5806 if (s->cc_op != CC_OP_DYNAMIC) {
5807 gen_op_set_cc_op(s->cc_op);
5808 s->cc_op = CC_OP_DYNAMIC;
5809 }
5810 gen_jmp_im(pc_start - s->cs_base);
5811 gen_op_sysenter();
5812 gen_eob(s);
5813 }
5814 break;
5815 case 0x135: /* sysexit */
5816 if (CODE64(s))
5817 goto illegal_op;
5818 if (!s->pe) {
5819 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5820 } else {
5821 if (s->cc_op != CC_OP_DYNAMIC) {
5822 gen_op_set_cc_op(s->cc_op);
5823 s->cc_op = CC_OP_DYNAMIC;
5824 }
5825 gen_jmp_im(pc_start - s->cs_base);
5826 gen_op_sysexit();
5827 gen_eob(s);
5828 }
5829 break;
5830#ifdef TARGET_X86_64
5831 case 0x105: /* syscall */
5832 /* XXX: is it usable in real mode ? */
5833 if (s->cc_op != CC_OP_DYNAMIC) {
5834 gen_op_set_cc_op(s->cc_op);
5835 s->cc_op = CC_OP_DYNAMIC;
5836 }
5837 gen_jmp_im(pc_start - s->cs_base);
5838 gen_op_syscall(s->pc - pc_start);
5839 gen_eob(s);
5840 break;
5841 case 0x107: /* sysret */
5842 if (!s->pe) {
5843 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5844 } else {
5845 if (s->cc_op != CC_OP_DYNAMIC) {
5846 gen_op_set_cc_op(s->cc_op);
5847 s->cc_op = CC_OP_DYNAMIC;
5848 }
5849 gen_jmp_im(pc_start - s->cs_base);
5850 gen_op_sysret(s->dflag);
5851 /* condition codes are modified only in long mode */
5852 if (s->lma)
5853 s->cc_op = CC_OP_EFLAGS;
5854 gen_eob(s);
5855 }
5856 break;
5857#endif
5858 case 0x1a2: /* cpuid */
5859 gen_op_cpuid();
5860 break;
5861 case 0xf4: /* hlt */
5862 if (s->cpl != 0) {
5863 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5864 } else {
5865 if (s->cc_op != CC_OP_DYNAMIC)
5866 gen_op_set_cc_op(s->cc_op);
5867 gen_jmp_im(s->pc - s->cs_base);
5868 gen_op_hlt();
5869 s->is_jmp = 3;
5870 }
5871 break;
5872 case 0x100:
5873 modrm = ldub_code(s->pc++);
5874 mod = (modrm >> 6) & 3;
5875 op = (modrm >> 3) & 7;
5876 switch(op) {
5877 case 0: /* sldt */
5878 if (!s->pe || s->vm86)
5879 goto illegal_op;
5880 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5881 ot = OT_WORD;
5882 if (mod == 3)
5883 ot += s->dflag;
5884 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5885 break;
5886 case 2: /* lldt */
5887 if (!s->pe || s->vm86)
5888 goto illegal_op;
5889 if (s->cpl != 0) {
5890 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5891 } else {
5892 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5893 gen_jmp_im(pc_start - s->cs_base);
5894 gen_op_lldt_T0();
5895 }
5896 break;
5897 case 1: /* str */
5898 if (!s->pe || s->vm86)
5899 goto illegal_op;
5900 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5901 ot = OT_WORD;
5902 if (mod == 3)
5903 ot += s->dflag;
5904 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5905 break;
5906 case 3: /* ltr */
5907 if (!s->pe || s->vm86)
5908 goto illegal_op;
5909 if (s->cpl != 0) {
5910 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5911 } else {
5912 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5913 gen_jmp_im(pc_start - s->cs_base);
5914 gen_op_ltr_T0();
5915 }
5916 break;
5917 case 4: /* verr */
5918 case 5: /* verw */
5919 if (!s->pe || s->vm86)
5920 goto illegal_op;
5921 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5922 if (s->cc_op != CC_OP_DYNAMIC)
5923 gen_op_set_cc_op(s->cc_op);
5924 if (op == 4)
5925 gen_op_verr();
5926 else
5927 gen_op_verw();
5928 s->cc_op = CC_OP_EFLAGS;
5929 break;
5930 default:
5931 goto illegal_op;
5932 }
5933 break;
5934 case 0x101:
5935 modrm = ldub_code(s->pc++);
5936 mod = (modrm >> 6) & 3;
5937 op = (modrm >> 3) & 7;
5938 rm = modrm & 7;
5939 switch(op) {
5940 case 0: /* sgdt */
5941 if (mod == 3)
5942 goto illegal_op;
5943 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5944 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5945 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5946 gen_add_A0_im(s, 2);
5947 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5948 if (!s->dflag)
5949 gen_op_andl_T0_im(0xffffff);
5950 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5951 break;
5952 case 1:
5953 if (mod == 3) {
5954 switch (rm) {
5955 case 0: /* monitor */
5956 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5957 s->cpl != 0)
5958 goto illegal_op;
5959 gen_jmp_im(pc_start - s->cs_base);
5960#ifdef TARGET_X86_64
5961 if (s->aflag == 2) {
5962 gen_op_movq_A0_reg[R_EBX]();
5963 gen_op_addq_A0_AL();
5964 } else
5965#endif
5966 {
5967 gen_op_movl_A0_reg[R_EBX]();
5968 gen_op_addl_A0_AL();
5969 if (s->aflag == 0)
5970 gen_op_andl_A0_ffff();
5971 }
5972 gen_add_A0_ds_seg(s);
5973 gen_op_monitor();
5974 break;
5975 case 1: /* mwait */
5976 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5977 s->cpl != 0)
5978 goto illegal_op;
5979 if (s->cc_op != CC_OP_DYNAMIC) {
5980 gen_op_set_cc_op(s->cc_op);
5981 s->cc_op = CC_OP_DYNAMIC;
5982 }
5983 gen_jmp_im(s->pc - s->cs_base);
5984 gen_op_mwait();
5985 gen_eob(s);
5986 break;
5987 default:
5988 goto illegal_op;
5989 }
5990 } else { /* sidt */
5991 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5992 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5993 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5994 gen_add_A0_im(s, 2);
5995 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5996 if (!s->dflag)
5997 gen_op_andl_T0_im(0xffffff);
5998 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5999 }
6000 break;
6001 case 2: /* lgdt */
6002 case 3: /* lidt */
6003 if (mod == 3)
6004 goto illegal_op;
6005 if (s->cpl != 0) {
6006 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6007 } else {
6008 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6009 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
6010 gen_add_A0_im(s, 2);
6011 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
6012 if (!s->dflag)
6013 gen_op_andl_T0_im(0xffffff);
6014 if (op == 2) {
6015 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6016 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6017 } else {
6018 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6019 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6020 }
6021 }
6022 break;
6023 case 4: /* smsw */
6024 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6025 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6026 break;
6027 case 6: /* lmsw */
6028 if (s->cpl != 0) {
6029 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6030 } else {
6031 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6032 gen_op_lmsw_T0();
6033 gen_jmp_im(s->pc - s->cs_base);
6034 gen_eob(s);
6035 }
6036 break;
6037 case 7: /* invlpg */
6038 if (s->cpl != 0) {
6039 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6040 } else {
6041 if (mod == 3) {
6042#ifdef TARGET_X86_64
6043 if (CODE64(s) && rm == 0) {
6044 /* swapgs */
6045 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6046 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6047 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6048 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6049 } else
6050#endif
6051 {
6052 goto illegal_op;
6053 }
6054 } else {
6055 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6056 gen_op_invlpg_A0();
6057 gen_jmp_im(s->pc - s->cs_base);
6058 gen_eob(s);
6059 }
6060 }
6061 break;
6062 default:
6063 goto illegal_op;
6064 }
6065 break;
6066 case 0x108: /* invd */
6067 case 0x109: /* wbinvd */
6068 if (s->cpl != 0) {
6069 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6070 } else {
6071 /* nothing to do */
6072 }
6073 break;
6074 case 0x63: /* arpl or movslS (x86_64) */
6075#ifdef TARGET_X86_64
6076 if (CODE64(s)) {
6077 int d_ot;
6078 /* d_ot is the size of destination */
6079 d_ot = dflag + OT_WORD;
6080
6081 modrm = ldub_code(s->pc++);
6082 reg = ((modrm >> 3) & 7) | rex_r;
6083 mod = (modrm >> 6) & 3;
6084 rm = (modrm & 7) | REX_B(s);
6085
6086 if (mod == 3) {
6087 gen_op_mov_TN_reg[OT_LONG][0][rm]();
6088 /* sign extend */
6089 if (d_ot == OT_QUAD)
6090 gen_op_movslq_T0_T0();
6091 gen_op_mov_reg_T0[d_ot][reg]();
6092 } else {
6093 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6094 if (d_ot == OT_QUAD) {
6095 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
6096 } else {
6097 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6098 }
6099 gen_op_mov_reg_T0[d_ot][reg]();
6100 }
6101 } else
6102#endif
6103 {
6104 if (!s->pe || s->vm86)
6105 goto illegal_op;
6106 ot = dflag ? OT_LONG : OT_WORD;
6107 modrm = ldub_code(s->pc++);
6108 reg = (modrm >> 3) & 7;
6109 mod = (modrm >> 6) & 3;
6110 rm = modrm & 7;
6111#ifdef VBOX /* Fix for obvious bug - T1 needs to be loaded */
6112 gen_op_mov_TN_reg[ot][1][reg]();
6113#endif
6114 if (mod != 3) {
6115 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6116 gen_op_ld_T0_A0[ot + s->mem_index]();
6117 } else {
6118 gen_op_mov_TN_reg[ot][0][rm]();
6119 }
6120 if (s->cc_op != CC_OP_DYNAMIC)
6121 gen_op_set_cc_op(s->cc_op);
6122 gen_op_arpl();
6123 s->cc_op = CC_OP_EFLAGS;
6124 if (mod != 3) {
6125 gen_op_st_T0_A0[ot + s->mem_index]();
6126 } else {
6127 gen_op_mov_reg_T0[ot][rm]();
6128 }
6129 gen_op_arpl_update();
6130 }
6131 break;
6132 case 0x102: /* lar */
6133 case 0x103: /* lsl */
6134 if (!s->pe || s->vm86)
6135 goto illegal_op;
6136 ot = dflag ? OT_LONG : OT_WORD;
6137 modrm = ldub_code(s->pc++);
6138 reg = ((modrm >> 3) & 7) | rex_r;
6139 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6140 gen_op_mov_TN_reg[ot][1][reg]();
6141 if (s->cc_op != CC_OP_DYNAMIC)
6142 gen_op_set_cc_op(s->cc_op);
6143 if (b == 0x102)
6144 gen_op_lar();
6145 else
6146 gen_op_lsl();
6147 s->cc_op = CC_OP_EFLAGS;
6148 gen_op_mov_reg_T1[ot][reg]();
6149 break;
6150 case 0x118:
6151 modrm = ldub_code(s->pc++);
6152 mod = (modrm >> 6) & 3;
6153 op = (modrm >> 3) & 7;
6154 switch(op) {
6155 case 0: /* prefetchnta */
6156 case 1: /* prefetchnt0 */
6157 case 2: /* prefetchnt0 */
6158 case 3: /* prefetchnt0 */
6159 if (mod == 3)
6160 goto illegal_op;
6161 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6162 /* nothing more to do */
6163 break;
6164 default: /* nop (multi byte) */
6165 gen_nop_modrm(s, modrm);
6166 break;
6167 }
6168 break;
6169 case 0x119 ... 0x11f: /* nop (multi byte) */
6170 modrm = ldub_code(s->pc++);
6171 gen_nop_modrm(s, modrm);
6172 break;
6173 case 0x120: /* mov reg, crN */
6174 case 0x122: /* mov crN, reg */
6175 if (s->cpl != 0) {
6176 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6177 } else {
6178 modrm = ldub_code(s->pc++);
6179 if ((modrm & 0xc0) != 0xc0)
6180 goto illegal_op;
6181 rm = (modrm & 7) | REX_B(s);
6182 reg = ((modrm >> 3) & 7) | rex_r;
6183 if (CODE64(s))
6184 ot = OT_QUAD;
6185 else
6186 ot = OT_LONG;
6187 switch(reg) {
6188 case 0:
6189 case 2:
6190 case 3:
6191 case 4:
6192 case 8:
6193 if (b & 2) {
6194 gen_op_mov_TN_reg[ot][0][rm]();
6195 gen_op_movl_crN_T0(reg);
6196 gen_jmp_im(s->pc - s->cs_base);
6197 gen_eob(s);
6198 } else {
6199#if !defined(CONFIG_USER_ONLY)
6200 if (reg == 8)
6201 gen_op_movtl_T0_cr8();
6202 else
6203#endif
6204 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6205 gen_op_mov_reg_T0[ot][rm]();
6206 }
6207 break;
6208 default:
6209 goto illegal_op;
6210 }
6211 }
6212 break;
6213 case 0x121: /* mov reg, drN */
6214 case 0x123: /* mov drN, reg */
6215 if (s->cpl != 0) {
6216 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6217 } else {
6218 modrm = ldub_code(s->pc++);
6219 if ((modrm & 0xc0) != 0xc0)
6220 goto illegal_op;
6221 rm = (modrm & 7) | REX_B(s);
6222 reg = ((modrm >> 3) & 7) | rex_r;
6223 if (CODE64(s))
6224 ot = OT_QUAD;
6225 else
6226 ot = OT_LONG;
6227 /* XXX: do it dynamically with CR4.DE bit */
6228 if (reg == 4 || reg == 5 || reg >= 8)
6229 goto illegal_op;
6230 if (b & 2) {
6231 gen_op_mov_TN_reg[ot][0][rm]();
6232 gen_op_movl_drN_T0(reg);
6233 gen_jmp_im(s->pc - s->cs_base);
6234 gen_eob(s);
6235 } else {
6236 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6237 gen_op_mov_reg_T0[ot][rm]();
6238 }
6239 }
6240 break;
6241 case 0x106: /* clts */
6242 if (s->cpl != 0) {
6243 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6244 } else {
6245 gen_op_clts();
6246 /* abort block because static cpu state changed */
6247 gen_jmp_im(s->pc - s->cs_base);
6248 gen_eob(s);
6249 }
6250 break;
6251 /* MMX/SSE/SSE2/PNI support */
6252 case 0x1c3: /* MOVNTI reg, mem */
6253 if (!(s->cpuid_features & CPUID_SSE2))
6254 goto illegal_op;
6255 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6256 modrm = ldub_code(s->pc++);
6257 mod = (modrm >> 6) & 3;
6258 if (mod == 3)
6259 goto illegal_op;
6260 reg = ((modrm >> 3) & 7) | rex_r;
6261 /* generate a generic store */
6262 gen_ldst_modrm(s, modrm, ot, reg, 1);
6263 break;
6264 case 0x1ae:
6265 modrm = ldub_code(s->pc++);
6266 mod = (modrm >> 6) & 3;
6267 op = (modrm >> 3) & 7;
6268 switch(op) {
6269 case 0: /* fxsave */
6270 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6271 (s->flags & HF_EM_MASK))
6272 goto illegal_op;
6273 if (s->flags & HF_TS_MASK) {
6274 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6275 break;
6276 }
6277 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6278 gen_op_fxsave_A0((s->dflag == 2));
6279 break;
6280 case 1: /* fxrstor */
6281 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6282 (s->flags & HF_EM_MASK))
6283 goto illegal_op;
6284 if (s->flags & HF_TS_MASK) {
6285 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6286 break;
6287 }
6288 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6289 gen_op_fxrstor_A0((s->dflag == 2));
6290 break;
6291 case 2: /* ldmxcsr */
6292 case 3: /* stmxcsr */
6293 if (s->flags & HF_TS_MASK) {
6294 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6295 break;
6296 }
6297 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6298 mod == 3)
6299 goto illegal_op;
6300 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6301 if (op == 2) {
6302 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6303 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6304 } else {
6305 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6306 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6307 }
6308 break;
6309 case 5: /* lfence */
6310 case 6: /* mfence */
6311 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6312 goto illegal_op;
6313 break;
6314 case 7: /* sfence / clflush */
6315 if ((modrm & 0xc7) == 0xc0) {
6316 /* sfence */
6317 if (!(s->cpuid_features & CPUID_SSE))
6318 goto illegal_op;
6319 } else {
6320 /* clflush */
6321 if (!(s->cpuid_features & CPUID_CLFLUSH))
6322 goto illegal_op;
6323 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6324 }
6325 break;
6326 default:
6327 goto illegal_op;
6328 }
6329 break;
6330 case 0x10d: /* prefetch */
6331 modrm = ldub_code(s->pc++);
6332 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6333 /* ignore for now */
6334 break;
6335 case 0x1aa: /* rsm */
6336 if (!(s->flags & HF_SMM_MASK))
6337 goto illegal_op;
6338 if (s->cc_op != CC_OP_DYNAMIC) {
6339 gen_op_set_cc_op(s->cc_op);
6340 s->cc_op = CC_OP_DYNAMIC;
6341 }
6342 gen_jmp_im(s->pc - s->cs_base);
6343 gen_op_rsm();
6344 gen_eob(s);
6345 break;
6346 case 0x110 ... 0x117:
6347 case 0x128 ... 0x12f:
6348 case 0x150 ... 0x177:
6349 case 0x17c ... 0x17f:
6350 case 0x1c2:
6351 case 0x1c4 ... 0x1c6:
6352 case 0x1d0 ... 0x1fe:
6353 gen_sse(s, b, pc_start, rex_r);
6354 break;
6355 default:
6356 goto illegal_op;
6357 }
6358 /* lock generation */
6359 if (s->prefix & PREFIX_LOCK)
6360 gen_op_unlock();
6361 return s->pc;
6362 illegal_op:
6363 if (s->prefix & PREFIX_LOCK)
6364 gen_op_unlock();
6365 /* XXX: ensure that no lock was generated */
6366 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6367 return s->pc;
6368}
6369
6370#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6371#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6372
6373/* flags read by an operation */
6374static uint16_t opc_read_flags[NB_OPS] = {
6375 [INDEX_op_aas] = CC_A,
6376 [INDEX_op_aaa] = CC_A,
6377 [INDEX_op_das] = CC_A | CC_C,
6378 [INDEX_op_daa] = CC_A | CC_C,
6379
6380 /* subtle: due to the incl/decl implementation, C is used */
6381 [INDEX_op_update_inc_cc] = CC_C,
6382
6383 [INDEX_op_into] = CC_O,
6384
6385 [INDEX_op_jb_subb] = CC_C,
6386 [INDEX_op_jb_subw] = CC_C,
6387 [INDEX_op_jb_subl] = CC_C,
6388
6389 [INDEX_op_jz_subb] = CC_Z,
6390 [INDEX_op_jz_subw] = CC_Z,
6391 [INDEX_op_jz_subl] = CC_Z,
6392
6393 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6394 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6395 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6396
6397 [INDEX_op_js_subb] = CC_S,
6398 [INDEX_op_js_subw] = CC_S,
6399 [INDEX_op_js_subl] = CC_S,
6400
6401 [INDEX_op_jl_subb] = CC_O | CC_S,
6402 [INDEX_op_jl_subw] = CC_O | CC_S,
6403 [INDEX_op_jl_subl] = CC_O | CC_S,
6404
6405 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6406 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6407 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6408
6409 [INDEX_op_loopnzw] = CC_Z,
6410 [INDEX_op_loopnzl] = CC_Z,
6411 [INDEX_op_loopzw] = CC_Z,
6412 [INDEX_op_loopzl] = CC_Z,
6413
6414 [INDEX_op_seto_T0_cc] = CC_O,
6415 [INDEX_op_setb_T0_cc] = CC_C,
6416 [INDEX_op_setz_T0_cc] = CC_Z,
6417 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6418 [INDEX_op_sets_T0_cc] = CC_S,
6419 [INDEX_op_setp_T0_cc] = CC_P,
6420 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6421 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6422
6423 [INDEX_op_setb_T0_subb] = CC_C,
6424 [INDEX_op_setb_T0_subw] = CC_C,
6425 [INDEX_op_setb_T0_subl] = CC_C,
6426
6427 [INDEX_op_setz_T0_subb] = CC_Z,
6428 [INDEX_op_setz_T0_subw] = CC_Z,
6429 [INDEX_op_setz_T0_subl] = CC_Z,
6430
6431 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6432 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6433 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6434
6435 [INDEX_op_sets_T0_subb] = CC_S,
6436 [INDEX_op_sets_T0_subw] = CC_S,
6437 [INDEX_op_sets_T0_subl] = CC_S,
6438
6439 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6440 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6441 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6442
6443 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6444 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6445 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6446
6447 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6448 [INDEX_op_cmc] = CC_C,
6449 [INDEX_op_salc] = CC_C,
6450
6451 /* needed for correct flag optimisation before string ops */
6452 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6453 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6454 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6455 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6456
6457#ifdef TARGET_X86_64
6458 [INDEX_op_jb_subq] = CC_C,
6459 [INDEX_op_jz_subq] = CC_Z,
6460 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6461 [INDEX_op_js_subq] = CC_S,
6462 [INDEX_op_jl_subq] = CC_O | CC_S,
6463 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6464
6465 [INDEX_op_loopnzq] = CC_Z,
6466 [INDEX_op_loopzq] = CC_Z,
6467
6468 [INDEX_op_setb_T0_subq] = CC_C,
6469 [INDEX_op_setz_T0_subq] = CC_Z,
6470 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6471 [INDEX_op_sets_T0_subq] = CC_S,
6472 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6473 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6474
6475 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6476 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6477#endif
6478
6479#define DEF_READF(SUFFIX)\
6480 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6481 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6482 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6483 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6484 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6485 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6486 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6487 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6488\
6489 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6490 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6491 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6492 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6493 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6494 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6495 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6496 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6497
6498 DEF_READF( )
6499 DEF_READF(_raw)
6500#ifndef CONFIG_USER_ONLY
6501 DEF_READF(_kernel)
6502 DEF_READF(_user)
6503#endif
6504};
6505
6506/* flags written by an operation */
6507static uint16_t opc_write_flags[NB_OPS] = {
6508 [INDEX_op_update2_cc] = CC_OSZAPC,
6509 [INDEX_op_update1_cc] = CC_OSZAPC,
6510 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6511 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6512 /* subtle: due to the incl/decl implementation, C is used */
6513 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6514 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6515
6516 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6517 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6518 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6519 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6520 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6521 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6522 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6523 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6524 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6525 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6526 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6527
6528 /* sse */
6529 [INDEX_op_ucomiss] = CC_OSZAPC,
6530 [INDEX_op_ucomisd] = CC_OSZAPC,
6531 [INDEX_op_comiss] = CC_OSZAPC,
6532 [INDEX_op_comisd] = CC_OSZAPC,
6533
6534 /* bcd */
6535 [INDEX_op_aam] = CC_OSZAPC,
6536 [INDEX_op_aad] = CC_OSZAPC,
6537 [INDEX_op_aas] = CC_OSZAPC,
6538 [INDEX_op_aaa] = CC_OSZAPC,
6539 [INDEX_op_das] = CC_OSZAPC,
6540 [INDEX_op_daa] = CC_OSZAPC,
6541
6542 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6543 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6544 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6545 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6546 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6547 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6548 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6549 [INDEX_op_clc] = CC_C,
6550 [INDEX_op_stc] = CC_C,
6551 [INDEX_op_cmc] = CC_C,
6552
6553 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6554 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6555 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6556 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6557 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6558 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6559 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6560 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6561 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6562 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6563 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6564 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6565
6566 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6567 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6568 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6569 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6570 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6571 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6572
6573 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6574 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6575 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6576 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6577
6578 [INDEX_op_cmpxchg8b] = CC_Z,
6579 [INDEX_op_lar] = CC_Z,
6580 [INDEX_op_lsl] = CC_Z,
6581 [INDEX_op_verr] = CC_Z,
6582 [INDEX_op_verw] = CC_Z,
6583 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6584 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6585
6586#define DEF_WRITEF(SUFFIX)\
6587 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6588 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6589 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6590 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6591 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6592 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6593 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6594 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6595\
6596 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6597 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6598 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6599 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6600 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6601 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6602 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6603 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6604\
6605 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6606 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6607 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6608 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6609 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6610 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6611 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6612 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6613\
6614 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6615 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6616 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6617 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6618\
6619 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6620 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6621 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6622 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6623\
6624 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6625 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6626 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6627 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6628\
6629 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6630 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6631 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6632 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6633 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6634 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6635\
6636 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6637 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6638 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6639 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6640 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6641 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6642\
6643 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6644 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6645 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6646 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6647
6648
6649 DEF_WRITEF( )
6650 DEF_WRITEF(_raw)
6651#ifndef CONFIG_USER_ONLY
6652 DEF_WRITEF(_kernel)
6653 DEF_WRITEF(_user)
6654#endif
6655};
6656
6657/* simpler form of an operation if no flags need to be generated */
6658static uint16_t opc_simpler[NB_OPS] = {
6659 [INDEX_op_update2_cc] = INDEX_op_nop,
6660 [INDEX_op_update1_cc] = INDEX_op_nop,
6661 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6662#if 0
6663 /* broken: CC_OP logic must be rewritten */
6664 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6665#endif
6666
6667 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6668 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6669 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6670 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6671
6672 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6673 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6674 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6675 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6676
6677 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6678 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6679 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6680 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6681
6682#define DEF_SIMPLER(SUFFIX)\
6683 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6684 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6685 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6686 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6687\
6688 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6689 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6690 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6691 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6692
6693 DEF_SIMPLER( )
6694 DEF_SIMPLER(_raw)
6695#ifndef CONFIG_USER_ONLY
6696 DEF_SIMPLER(_kernel)
6697 DEF_SIMPLER(_user)
6698#endif
6699};
6700
6701void optimize_flags_init(void)
6702{
6703 int i;
6704 /* put default values in arrays */
6705 for(i = 0; i < NB_OPS; i++) {
6706 if (opc_simpler[i] == 0)
6707 opc_simpler[i] = i;
6708 }
6709}
6710
6711/* CPU flags computation optimization: we move backward thru the
6712 generated code to see which flags are needed. The operation is
6713 modified if suitable */
6714static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6715{
6716 uint16_t *opc_ptr;
6717 int live_flags, write_flags, op;
6718
6719 opc_ptr = opc_buf + opc_buf_len;
6720 /* live_flags contains the flags needed by the next instructions
6721 in the code. At the end of the bloc, we consider that all the
6722 flags are live. */
6723 live_flags = CC_OSZAPC;
6724 while (opc_ptr > opc_buf) {
6725 op = *--opc_ptr;
6726 /* if none of the flags written by the instruction is used,
6727 then we can try to find a simpler instruction */
6728 write_flags = opc_write_flags[op];
6729 if ((live_flags & write_flags) == 0) {
6730 *opc_ptr = opc_simpler[op];
6731 }
6732 /* compute the live flags before the instruction */
6733 live_flags &= ~write_flags;
6734 live_flags |= opc_read_flags[op];
6735 }
6736}
6737
6738/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6739 basic block 'tb'. If search_pc is TRUE, also generate PC
6740 information for each intermediate instruction. */
6741static inline int gen_intermediate_code_internal(CPUState *env,
6742 TranslationBlock *tb,
6743 int search_pc)
6744{
6745 DisasContext dc1, *dc = &dc1;
6746 target_ulong pc_ptr;
6747 uint16_t *gen_opc_end;
6748 int flags, j, lj, cflags;
6749 target_ulong pc_start;
6750 target_ulong cs_base;
6751
6752 /* generate intermediate code */
6753 pc_start = tb->pc;
6754 cs_base = tb->cs_base;
6755 flags = tb->flags;
6756 cflags = tb->cflags;
6757
6758 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6759 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6760 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6761 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6762 dc->f_st = 0;
6763 dc->vm86 = (flags >> VM_SHIFT) & 1;
6764#ifdef VBOX_WITH_CALL_RECORD
6765 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
6766 if ( !(env->state & CPU_RAW_RING0)
6767 && (env->cr[0] & CR0_PG_MASK)
6768 && !(env->eflags & X86_EFL_IF)
6769 && dc->code32)
6770 dc->record_call = 1;
6771 else
6772 dc->record_call = 0;
6773#endif
6774 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6775 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6776 dc->tf = (flags >> TF_SHIFT) & 1;
6777 dc->singlestep_enabled = env->singlestep_enabled;
6778 dc->cc_op = CC_OP_DYNAMIC;
6779 dc->cs_base = cs_base;
6780 dc->tb = tb;
6781 dc->popl_esp_hack = 0;
6782 /* select memory access functions */
6783 dc->mem_index = 0;
6784 if (flags & HF_SOFTMMU_MASK) {
6785 if (dc->cpl == 3)
6786 dc->mem_index = 2 * 4;
6787 else
6788 dc->mem_index = 1 * 4;
6789 }
6790 dc->cpuid_features = env->cpuid_features;
6791 dc->cpuid_ext_features = env->cpuid_ext_features;
6792 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6793 dc->cpuid_ext3_features = env->cpuid_ext3_features;
6794#ifdef TARGET_X86_64
6795 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6796 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6797#endif
6798 dc->flags = flags;
6799 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6800 (flags & HF_INHIBIT_IRQ_MASK)
6801#ifndef CONFIG_SOFTMMU
6802 || (flags & HF_SOFTMMU_MASK)
6803#endif
6804 );
6805#if 0
6806 /* check addseg logic */
6807 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6808 printf("ERROR addseg\n");
6809#endif
6810
6811 gen_opc_ptr = gen_opc_buf;
6812 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6813 gen_opparam_ptr = gen_opparam_buf;
6814 nb_gen_labels = 0;
6815
6816 dc->is_jmp = DISAS_NEXT;
6817 pc_ptr = pc_start;
6818 lj = -1;
6819
6820 for(;;) {
6821 if (env->nb_breakpoints > 0) {
6822 for(j = 0; j < env->nb_breakpoints; j++) {
6823 if (env->breakpoints[j] == pc_ptr) {
6824 gen_debug(dc, pc_ptr - dc->cs_base);
6825 break;
6826 }
6827 }
6828 }
6829 if (search_pc) {
6830 j = gen_opc_ptr - gen_opc_buf;
6831 if (lj < j) {
6832 lj++;
6833 while (lj < j)
6834 gen_opc_instr_start[lj++] = 0;
6835 }
6836 gen_opc_pc[lj] = pc_ptr;
6837 gen_opc_cc_op[lj] = dc->cc_op;
6838 gen_opc_instr_start[lj] = 1;
6839 }
6840 pc_ptr = disas_insn(dc, pc_ptr);
6841 /* stop translation if indicated */
6842 if (dc->is_jmp)
6843 break;
6844
6845#ifdef VBOX
6846#ifdef DEBUG
6847/*
6848 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6849 {
6850 //should never happen as the jump to the patch code terminates the translation block
6851 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6852 }
6853*/
6854#endif
6855 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6856 {
6857 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6858 gen_jmp_im(pc_ptr - dc->cs_base);
6859 gen_eob(dc);
6860 break;
6861 }
6862#endif /* VBOX */
6863
6864 /* if single step mode, we generate only one instruction and
6865 generate an exception */
6866 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6867 the flag and abort the translation to give the irqs a
6868 change to be happen */
6869 if (dc->tf || dc->singlestep_enabled ||
6870 (flags & HF_INHIBIT_IRQ_MASK) ||
6871 (cflags & CF_SINGLE_INSN)) {
6872 gen_jmp_im(pc_ptr - dc->cs_base);
6873 gen_eob(dc);
6874 break;
6875 }
6876 /* if too long translation, stop generation too */
6877 if (gen_opc_ptr >= gen_opc_end ||
6878 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6879 gen_jmp_im(pc_ptr - dc->cs_base);
6880 gen_eob(dc);
6881 break;
6882 }
6883 }
6884 *gen_opc_ptr = INDEX_op_end;
6885 /* we don't forget to fill the last values */
6886 if (search_pc) {
6887 j = gen_opc_ptr - gen_opc_buf;
6888 lj++;
6889 while (lj <= j)
6890 gen_opc_instr_start[lj++] = 0;
6891 }
6892
6893#ifdef DEBUG_DISAS
6894 if (loglevel & CPU_LOG_TB_CPU) {
6895 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6896 }
6897 if (loglevel & CPU_LOG_TB_IN_ASM) {
6898 int disas_flags;
6899 fprintf(logfile, "----------------\n");
6900 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6901#ifdef TARGET_X86_64
6902 if (dc->code64)
6903 disas_flags = 2;
6904 else
6905#endif
6906 disas_flags = !dc->code32;
6907 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6908 fprintf(logfile, "\n");
6909 if (loglevel & CPU_LOG_TB_OP) {
6910 fprintf(logfile, "OP:\n");
6911 dump_ops(gen_opc_buf, gen_opparam_buf);
6912 fprintf(logfile, "\n");
6913 }
6914 }
6915#endif
6916
6917 /* optimize flag computations */
6918 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6919
6920#ifdef DEBUG_DISAS
6921 if (loglevel & CPU_LOG_TB_OP_OPT) {
6922 fprintf(logfile, "AFTER FLAGS OPT:\n");
6923 dump_ops(gen_opc_buf, gen_opparam_buf);
6924 fprintf(logfile, "\n");
6925 }
6926#endif
6927 if (!search_pc)
6928 tb->size = pc_ptr - pc_start;
6929 return 0;
6930}
6931
6932int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6933{
6934 return gen_intermediate_code_internal(env, tb, 0);
6935}
6936
6937int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6938{
6939 return gen_intermediate_code_internal(env, tb, 1);
6940}
6941
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette