VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 1478

最後變更 在這個檔案從1478是 1478,由 vboxsync 提交於 18 年 前

Support VME in guests. (v86 extensions)

  • 屬性 svn:eol-style 設為 native
檔案大小: 196.9 KB
 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20#include <stdarg.h>
21#include <stdlib.h>
22#include <stdio.h>
23#include <string.h>
24#include <inttypes.h>
25#include <signal.h>
26#include <assert.h>
27
28#include "cpu.h"
29#include "exec-all.h"
30#include "disas.h"
31
32/* XXX: move that elsewhere */
33static uint16_t *gen_opc_ptr;
34static uint32_t *gen_opparam_ptr;
35
36#define PREFIX_REPZ 0x01
37#define PREFIX_REPNZ 0x02
38#define PREFIX_LOCK 0x04
39#define PREFIX_DATA 0x08
40#define PREFIX_ADR 0x10
41
42#ifdef TARGET_X86_64
43#define X86_64_ONLY(x) x
44#define X86_64_DEF(x...) x
45#define CODE64(s) ((s)->code64)
46#define REX_X(s) ((s)->rex_x)
47#define REX_B(s) ((s)->rex_b)
48/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49#if 1
50#define BUGGY_64(x) NULL
51#endif
52#else
53#define X86_64_ONLY(x) NULL
54#define X86_64_DEF(x...)
55#define CODE64(s) 0
56#define REX_X(s) 0
57#define REX_B(s) 0
58#endif
59
60#ifdef TARGET_X86_64
61static int x86_64_hregs;
62#endif
63
64#ifdef USE_DIRECT_JUMP
65#define TBPARAM(x)
66#else
67#define TBPARAM(x) (long)(x)
68#endif
69
70#ifdef VBOX
71/* Special/override code readers to hide patched code. */
72
73uint8_t ldub_code_raw(target_ulong pc)
74{
75 uint8_t b;
76
77 if (!remR3GetOpcode(cpu_single_env, pc, &b))
78 b = ldub_code(pc);
79 return b;
80}
81#define ldub_code(a) ldub_code_raw(a)
82
83uint16_t lduw_code_raw(target_ulong pc)
84{
85 return (ldub_code(pc+1) << 8) | ldub_code(pc);
86}
87#define lduw_code(a) lduw_code_raw(a)
88
89
90uint32_t ldl_code_raw(target_ulong pc)
91{
92 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
93}
94#define ldl_code(a) ldl_code_raw(a)
95
96#endif /* VBOX */
97
98
99typedef struct DisasContext {
100 /* current insn context */
101 int override; /* -1 if no override */
102 int prefix;
103 int aflag, dflag;
104 target_ulong pc; /* pc = eip + cs_base */
105 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
106 static state change (stop translation) */
107 /* current block context */
108 target_ulong cs_base; /* base of CS segment */
109 int pe; /* protected mode */
110 int code32; /* 32 bit code segment */
111#ifdef TARGET_X86_64
112 int lma; /* long mode active */
113 int code64; /* 64 bit code segment */
114 int rex_x, rex_b;
115#endif
116 int ss32; /* 32 bit stack segment */
117 int cc_op; /* current CC operation */
118 int addseg; /* non zero if either DS/ES/SS have a non zero base */
119 int f_st; /* currently unused */
120 int vm86; /* vm86 mode */
121#ifdef VBOX
122 int vme; /* CR4.VME */
123#endif
124 int cpl;
125 int iopl;
126 int tf; /* TF cpu flag */
127 int singlestep_enabled; /* "hardware" single step enabled */
128 int jmp_opt; /* use direct block chaining for direct jumps */
129 int mem_index; /* select memory access functions */
130 int flags; /* all execution flags */
131 struct TranslationBlock *tb;
132 int popl_esp_hack; /* for correct popl with esp base handling */
133 int rip_offset; /* only used in x86_64, but left for simplicity */
134 int cpuid_features;
135 int cpuid_ext_features;
136} DisasContext;
137
138static void gen_eob(DisasContext *s);
139static void gen_jmp(DisasContext *s, target_ulong eip);
140static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
141
142/* i386 arith/logic operations */
143enum {
144 OP_ADDL,
145 OP_ORL,
146 OP_ADCL,
147 OP_SBBL,
148 OP_ANDL,
149 OP_SUBL,
150 OP_XORL,
151 OP_CMPL,
152};
153
154/* i386 shift ops */
155enum {
156 OP_ROL,
157 OP_ROR,
158 OP_RCL,
159 OP_RCR,
160 OP_SHL,
161 OP_SHR,
162 OP_SHL1, /* undocumented */
163 OP_SAR = 7,
164};
165
166enum {
167#define DEF(s, n, copy_size) INDEX_op_ ## s,
168#include "opc.h"
169#undef DEF
170 NB_OPS,
171};
172
173#include "gen-op.h"
174
175/* operand size */
176enum {
177 OT_BYTE = 0,
178 OT_WORD,
179 OT_LONG,
180 OT_QUAD,
181};
182
183enum {
184 /* I386 int registers */
185 OR_EAX, /* MUST be even numbered */
186 OR_ECX,
187 OR_EDX,
188 OR_EBX,
189 OR_ESP,
190 OR_EBP,
191 OR_ESI,
192 OR_EDI,
193
194 OR_TMP0 = 16, /* temporary operand register */
195 OR_TMP1,
196 OR_A0, /* temporary register used when doing address evaluation */
197};
198
199#ifdef TARGET_X86_64
200
201#define NB_OP_SIZES 4
202
203#define DEF_REGS(prefix, suffix) \
204 prefix ## EAX ## suffix,\
205 prefix ## ECX ## suffix,\
206 prefix ## EDX ## suffix,\
207 prefix ## EBX ## suffix,\
208 prefix ## ESP ## suffix,\
209 prefix ## EBP ## suffix,\
210 prefix ## ESI ## suffix,\
211 prefix ## EDI ## suffix,\
212 prefix ## R8 ## suffix,\
213 prefix ## R9 ## suffix,\
214 prefix ## R10 ## suffix,\
215 prefix ## R11 ## suffix,\
216 prefix ## R12 ## suffix,\
217 prefix ## R13 ## suffix,\
218 prefix ## R14 ## suffix,\
219 prefix ## R15 ## suffix,
220
221#define DEF_BREGS(prefixb, prefixh, suffix) \
222 \
223static void prefixb ## ESP ## suffix ## _wrapper(void) \
224{ \
225 if (x86_64_hregs) \
226 prefixb ## ESP ## suffix (); \
227 else \
228 prefixh ## EAX ## suffix (); \
229} \
230 \
231static void prefixb ## EBP ## suffix ## _wrapper(void) \
232{ \
233 if (x86_64_hregs) \
234 prefixb ## EBP ## suffix (); \
235 else \
236 prefixh ## ECX ## suffix (); \
237} \
238 \
239static void prefixb ## ESI ## suffix ## _wrapper(void) \
240{ \
241 if (x86_64_hregs) \
242 prefixb ## ESI ## suffix (); \
243 else \
244 prefixh ## EDX ## suffix (); \
245} \
246 \
247static void prefixb ## EDI ## suffix ## _wrapper(void) \
248{ \
249 if (x86_64_hregs) \
250 prefixb ## EDI ## suffix (); \
251 else \
252 prefixh ## EBX ## suffix (); \
253}
254
255DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
256DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
257DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
258DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
259
260#else /* !TARGET_X86_64 */
261
262#define NB_OP_SIZES 3
263
264#define DEF_REGS(prefix, suffix) \
265 prefix ## EAX ## suffix,\
266 prefix ## ECX ## suffix,\
267 prefix ## EDX ## suffix,\
268 prefix ## EBX ## suffix,\
269 prefix ## ESP ## suffix,\
270 prefix ## EBP ## suffix,\
271 prefix ## ESI ## suffix,\
272 prefix ## EDI ## suffix,
273
274#endif /* !TARGET_X86_64 */
275
276static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
277 [OT_BYTE] = {
278 gen_op_movb_EAX_T0,
279 gen_op_movb_ECX_T0,
280 gen_op_movb_EDX_T0,
281 gen_op_movb_EBX_T0,
282#ifdef TARGET_X86_64
283 gen_op_movb_ESP_T0_wrapper,
284 gen_op_movb_EBP_T0_wrapper,
285 gen_op_movb_ESI_T0_wrapper,
286 gen_op_movb_EDI_T0_wrapper,
287 gen_op_movb_R8_T0,
288 gen_op_movb_R9_T0,
289 gen_op_movb_R10_T0,
290 gen_op_movb_R11_T0,
291 gen_op_movb_R12_T0,
292 gen_op_movb_R13_T0,
293 gen_op_movb_R14_T0,
294 gen_op_movb_R15_T0,
295#else
296 gen_op_movh_EAX_T0,
297 gen_op_movh_ECX_T0,
298 gen_op_movh_EDX_T0,
299 gen_op_movh_EBX_T0,
300#endif
301 },
302 [OT_WORD] = {
303 DEF_REGS(gen_op_movw_, _T0)
304 },
305 [OT_LONG] = {
306 DEF_REGS(gen_op_movl_, _T0)
307 },
308#ifdef TARGET_X86_64
309 [OT_QUAD] = {
310 DEF_REGS(gen_op_movq_, _T0)
311 },
312#endif
313};
314
315static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
316 [OT_BYTE] = {
317 gen_op_movb_EAX_T1,
318 gen_op_movb_ECX_T1,
319 gen_op_movb_EDX_T1,
320 gen_op_movb_EBX_T1,
321#ifdef TARGET_X86_64
322 gen_op_movb_ESP_T1_wrapper,
323 gen_op_movb_EBP_T1_wrapper,
324 gen_op_movb_ESI_T1_wrapper,
325 gen_op_movb_EDI_T1_wrapper,
326 gen_op_movb_R8_T1,
327 gen_op_movb_R9_T1,
328 gen_op_movb_R10_T1,
329 gen_op_movb_R11_T1,
330 gen_op_movb_R12_T1,
331 gen_op_movb_R13_T1,
332 gen_op_movb_R14_T1,
333 gen_op_movb_R15_T1,
334#else
335 gen_op_movh_EAX_T1,
336 gen_op_movh_ECX_T1,
337 gen_op_movh_EDX_T1,
338 gen_op_movh_EBX_T1,
339#endif
340 },
341 [OT_WORD] = {
342 DEF_REGS(gen_op_movw_, _T1)
343 },
344 [OT_LONG] = {
345 DEF_REGS(gen_op_movl_, _T1)
346 },
347#ifdef TARGET_X86_64
348 [OT_QUAD] = {
349 DEF_REGS(gen_op_movq_, _T1)
350 },
351#endif
352};
353
354static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
355 [0] = {
356 DEF_REGS(gen_op_movw_, _A0)
357 },
358 [1] = {
359 DEF_REGS(gen_op_movl_, _A0)
360 },
361#ifdef TARGET_X86_64
362 [2] = {
363 DEF_REGS(gen_op_movq_, _A0)
364 },
365#endif
366};
367
368static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
369{
370 [OT_BYTE] = {
371 {
372 gen_op_movl_T0_EAX,
373 gen_op_movl_T0_ECX,
374 gen_op_movl_T0_EDX,
375 gen_op_movl_T0_EBX,
376#ifdef TARGET_X86_64
377 gen_op_movl_T0_ESP_wrapper,
378 gen_op_movl_T0_EBP_wrapper,
379 gen_op_movl_T0_ESI_wrapper,
380 gen_op_movl_T0_EDI_wrapper,
381 gen_op_movl_T0_R8,
382 gen_op_movl_T0_R9,
383 gen_op_movl_T0_R10,
384 gen_op_movl_T0_R11,
385 gen_op_movl_T0_R12,
386 gen_op_movl_T0_R13,
387 gen_op_movl_T0_R14,
388 gen_op_movl_T0_R15,
389#else
390 gen_op_movh_T0_EAX,
391 gen_op_movh_T0_ECX,
392 gen_op_movh_T0_EDX,
393 gen_op_movh_T0_EBX,
394#endif
395 },
396 {
397 gen_op_movl_T1_EAX,
398 gen_op_movl_T1_ECX,
399 gen_op_movl_T1_EDX,
400 gen_op_movl_T1_EBX,
401#ifdef TARGET_X86_64
402 gen_op_movl_T1_ESP_wrapper,
403 gen_op_movl_T1_EBP_wrapper,
404 gen_op_movl_T1_ESI_wrapper,
405 gen_op_movl_T1_EDI_wrapper,
406 gen_op_movl_T1_R8,
407 gen_op_movl_T1_R9,
408 gen_op_movl_T1_R10,
409 gen_op_movl_T1_R11,
410 gen_op_movl_T1_R12,
411 gen_op_movl_T1_R13,
412 gen_op_movl_T1_R14,
413 gen_op_movl_T1_R15,
414#else
415 gen_op_movh_T1_EAX,
416 gen_op_movh_T1_ECX,
417 gen_op_movh_T1_EDX,
418 gen_op_movh_T1_EBX,
419#endif
420 },
421 },
422 [OT_WORD] = {
423 {
424 DEF_REGS(gen_op_movl_T0_, )
425 },
426 {
427 DEF_REGS(gen_op_movl_T1_, )
428 },
429 },
430 [OT_LONG] = {
431 {
432 DEF_REGS(gen_op_movl_T0_, )
433 },
434 {
435 DEF_REGS(gen_op_movl_T1_, )
436 },
437 },
438#ifdef TARGET_X86_64
439 [OT_QUAD] = {
440 {
441 DEF_REGS(gen_op_movl_T0_, )
442 },
443 {
444 DEF_REGS(gen_op_movl_T1_, )
445 },
446 },
447#endif
448};
449
450static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
451 DEF_REGS(gen_op_movl_A0_, )
452};
453
454static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
455 [0] = {
456 DEF_REGS(gen_op_addl_A0_, )
457 },
458 [1] = {
459 DEF_REGS(gen_op_addl_A0_, _s1)
460 },
461 [2] = {
462 DEF_REGS(gen_op_addl_A0_, _s2)
463 },
464 [3] = {
465 DEF_REGS(gen_op_addl_A0_, _s3)
466 },
467};
468
469#ifdef TARGET_X86_64
470static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
471 DEF_REGS(gen_op_movq_A0_, )
472};
473
474static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
475 [0] = {
476 DEF_REGS(gen_op_addq_A0_, )
477 },
478 [1] = {
479 DEF_REGS(gen_op_addq_A0_, _s1)
480 },
481 [2] = {
482 DEF_REGS(gen_op_addq_A0_, _s2)
483 },
484 [3] = {
485 DEF_REGS(gen_op_addq_A0_, _s3)
486 },
487};
488#endif
489
490static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
491 [0] = {
492 DEF_REGS(gen_op_cmovw_, _T1_T0)
493 },
494 [1] = {
495 DEF_REGS(gen_op_cmovl_, _T1_T0)
496 },
497#ifdef TARGET_X86_64
498 [2] = {
499 DEF_REGS(gen_op_cmovq_, _T1_T0)
500 },
501#endif
502};
503
504static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
505 NULL,
506 gen_op_orl_T0_T1,
507 NULL,
508 NULL,
509 gen_op_andl_T0_T1,
510 NULL,
511 gen_op_xorl_T0_T1,
512 NULL,
513};
514
515#define DEF_ARITHC(SUFFIX)\
516 {\
517 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
518 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
519 },\
520 {\
521 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
522 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
523 },\
524 {\
525 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
526 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
527 },\
528 {\
529 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
530 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
531 },
532
533static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
534 DEF_ARITHC( )
535};
536
537static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
538 DEF_ARITHC(_raw)
539#ifndef CONFIG_USER_ONLY
540 DEF_ARITHC(_kernel)
541 DEF_ARITHC(_user)
542#endif
543};
544
545static const int cc_op_arithb[8] = {
546 CC_OP_ADDB,
547 CC_OP_LOGICB,
548 CC_OP_ADDB,
549 CC_OP_SUBB,
550 CC_OP_LOGICB,
551 CC_OP_SUBB,
552 CC_OP_LOGICB,
553 CC_OP_SUBB,
554};
555
556#define DEF_CMPXCHG(SUFFIX)\
557 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
558 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
559 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
560 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
561
562static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
563 DEF_CMPXCHG( )
564};
565
566static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
567 DEF_CMPXCHG(_raw)
568#ifndef CONFIG_USER_ONLY
569 DEF_CMPXCHG(_kernel)
570 DEF_CMPXCHG(_user)
571#endif
572};
573
574#define DEF_SHIFT(SUFFIX)\
575 {\
576 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
577 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
578 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
579 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
580 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
581 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
582 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
583 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
584 },\
585 {\
586 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
587 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
588 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
589 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
590 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
591 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
592 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
593 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
594 },\
595 {\
596 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
597 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
598 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
599 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
600 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
601 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
602 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
603 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
604 },\
605 {\
606 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
607 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
608 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
609 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
610 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
611 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
612 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
613 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
614 },
615
616static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
617 DEF_SHIFT( )
618};
619
620static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
621 DEF_SHIFT(_raw)
622#ifndef CONFIG_USER_ONLY
623 DEF_SHIFT(_kernel)
624 DEF_SHIFT(_user)
625#endif
626};
627
628#define DEF_SHIFTD(SUFFIX, op)\
629 {\
630 NULL,\
631 NULL,\
632 },\
633 {\
634 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
635 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
636 },\
637 {\
638 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
639 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
640 },\
641 {\
642X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
643 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
644 },
645
646static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
647 DEF_SHIFTD(, im)
648};
649
650static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
651 DEF_SHIFTD(, ECX)
652};
653
654static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
655 DEF_SHIFTD(_raw, im)
656#ifndef CONFIG_USER_ONLY
657 DEF_SHIFTD(_kernel, im)
658 DEF_SHIFTD(_user, im)
659#endif
660};
661
662static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
663 DEF_SHIFTD(_raw, ECX)
664#ifndef CONFIG_USER_ONLY
665 DEF_SHIFTD(_kernel, ECX)
666 DEF_SHIFTD(_user, ECX)
667#endif
668};
669
670static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
671 [0] = {
672 gen_op_btw_T0_T1_cc,
673 gen_op_btsw_T0_T1_cc,
674 gen_op_btrw_T0_T1_cc,
675 gen_op_btcw_T0_T1_cc,
676 },
677 [1] = {
678 gen_op_btl_T0_T1_cc,
679 gen_op_btsl_T0_T1_cc,
680 gen_op_btrl_T0_T1_cc,
681 gen_op_btcl_T0_T1_cc,
682 },
683#ifdef TARGET_X86_64
684 [2] = {
685 gen_op_btq_T0_T1_cc,
686 gen_op_btsq_T0_T1_cc,
687 gen_op_btrq_T0_T1_cc,
688 gen_op_btcq_T0_T1_cc,
689 },
690#endif
691};
692
693static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
694 gen_op_add_bitw_A0_T1,
695 gen_op_add_bitl_A0_T1,
696 X86_64_ONLY(gen_op_add_bitq_A0_T1),
697};
698
699static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
700 [0] = {
701 gen_op_bsfw_T0_cc,
702 gen_op_bsrw_T0_cc,
703 },
704 [1] = {
705 gen_op_bsfl_T0_cc,
706 gen_op_bsrl_T0_cc,
707 },
708#ifdef TARGET_X86_64
709 [2] = {
710 gen_op_bsfq_T0_cc,
711 gen_op_bsrq_T0_cc,
712 },
713#endif
714};
715
716static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
717 gen_op_ldsb_raw_T0_A0,
718 gen_op_ldsw_raw_T0_A0,
719 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
720 NULL,
721#ifndef CONFIG_USER_ONLY
722 gen_op_ldsb_kernel_T0_A0,
723 gen_op_ldsw_kernel_T0_A0,
724 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
725 NULL,
726
727 gen_op_ldsb_user_T0_A0,
728 gen_op_ldsw_user_T0_A0,
729 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
730 NULL,
731#endif
732};
733
734static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
735 gen_op_ldub_raw_T0_A0,
736 gen_op_lduw_raw_T0_A0,
737 NULL,
738 NULL,
739
740#ifndef CONFIG_USER_ONLY
741 gen_op_ldub_kernel_T0_A0,
742 gen_op_lduw_kernel_T0_A0,
743 NULL,
744 NULL,
745
746 gen_op_ldub_user_T0_A0,
747 gen_op_lduw_user_T0_A0,
748 NULL,
749 NULL,
750#endif
751};
752
753/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
754static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
755 gen_op_ldub_raw_T0_A0,
756 gen_op_lduw_raw_T0_A0,
757 gen_op_ldl_raw_T0_A0,
758 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
759
760#ifndef CONFIG_USER_ONLY
761 gen_op_ldub_kernel_T0_A0,
762 gen_op_lduw_kernel_T0_A0,
763 gen_op_ldl_kernel_T0_A0,
764 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
765
766 gen_op_ldub_user_T0_A0,
767 gen_op_lduw_user_T0_A0,
768 gen_op_ldl_user_T0_A0,
769 X86_64_ONLY(gen_op_ldq_user_T0_A0),
770#endif
771};
772
773static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
774 gen_op_ldub_raw_T1_A0,
775 gen_op_lduw_raw_T1_A0,
776 gen_op_ldl_raw_T1_A0,
777 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
778
779#ifndef CONFIG_USER_ONLY
780 gen_op_ldub_kernel_T1_A0,
781 gen_op_lduw_kernel_T1_A0,
782 gen_op_ldl_kernel_T1_A0,
783 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
784
785 gen_op_ldub_user_T1_A0,
786 gen_op_lduw_user_T1_A0,
787 gen_op_ldl_user_T1_A0,
788 X86_64_ONLY(gen_op_ldq_user_T1_A0),
789#endif
790};
791
792static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
793 gen_op_stb_raw_T0_A0,
794 gen_op_stw_raw_T0_A0,
795 gen_op_stl_raw_T0_A0,
796 X86_64_ONLY(gen_op_stq_raw_T0_A0),
797
798#ifndef CONFIG_USER_ONLY
799 gen_op_stb_kernel_T0_A0,
800 gen_op_stw_kernel_T0_A0,
801 gen_op_stl_kernel_T0_A0,
802 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
803
804 gen_op_stb_user_T0_A0,
805 gen_op_stw_user_T0_A0,
806 gen_op_stl_user_T0_A0,
807 X86_64_ONLY(gen_op_stq_user_T0_A0),
808#endif
809};
810
811static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
812 NULL,
813 gen_op_stw_raw_T1_A0,
814 gen_op_stl_raw_T1_A0,
815 X86_64_ONLY(gen_op_stq_raw_T1_A0),
816
817#ifndef CONFIG_USER_ONLY
818 NULL,
819 gen_op_stw_kernel_T1_A0,
820 gen_op_stl_kernel_T1_A0,
821 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
822
823 NULL,
824 gen_op_stw_user_T1_A0,
825 gen_op_stl_user_T1_A0,
826 X86_64_ONLY(gen_op_stq_user_T1_A0),
827#endif
828};
829
830#ifdef VBOX
831static void gen_check_external_event()
832{
833 gen_op_check_external_event();
834}
835#endif /* VBOX */
836
837static inline void gen_jmp_im(target_ulong pc)
838{
839#ifdef VBOX
840 gen_check_external_event();
841#endif /* VBOX */
842#ifdef TARGET_X86_64
843 if (pc == (uint32_t)pc) {
844 gen_op_movl_eip_im(pc);
845 } else if (pc == (int32_t)pc) {
846 gen_op_movq_eip_im(pc);
847 } else {
848 gen_op_movq_eip_im64(pc >> 32, pc);
849 }
850#else
851 gen_op_movl_eip_im(pc);
852#endif
853}
854
855static inline void gen_string_movl_A0_ESI(DisasContext *s)
856{
857 int override;
858
859 override = s->override;
860#ifdef TARGET_X86_64
861 if (s->aflag == 2) {
862 if (override >= 0) {
863 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
864 gen_op_addq_A0_reg_sN[0][R_ESI]();
865 } else {
866 gen_op_movq_A0_reg[R_ESI]();
867 }
868 } else
869#endif
870 if (s->aflag) {
871 /* 32 bit address */
872 if (s->addseg && override < 0)
873 override = R_DS;
874 if (override >= 0) {
875 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
876 gen_op_addl_A0_reg_sN[0][R_ESI]();
877 } else {
878 gen_op_movl_A0_reg[R_ESI]();
879 }
880 } else {
881 /* 16 address, always override */
882 if (override < 0)
883 override = R_DS;
884 gen_op_movl_A0_reg[R_ESI]();
885 gen_op_andl_A0_ffff();
886 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
887 }
888}
889
890static inline void gen_string_movl_A0_EDI(DisasContext *s)
891{
892#ifdef TARGET_X86_64
893 if (s->aflag == 2) {
894 gen_op_movq_A0_reg[R_EDI]();
895 } else
896#endif
897 if (s->aflag) {
898 if (s->addseg) {
899 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
900 gen_op_addl_A0_reg_sN[0][R_EDI]();
901 } else {
902 gen_op_movl_A0_reg[R_EDI]();
903 }
904 } else {
905 gen_op_movl_A0_reg[R_EDI]();
906 gen_op_andl_A0_ffff();
907 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
908 }
909}
910
911static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
912 gen_op_movl_T0_Dshiftb,
913 gen_op_movl_T0_Dshiftw,
914 gen_op_movl_T0_Dshiftl,
915 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
916};
917
918static GenOpFunc1 *gen_op_jnz_ecx[3] = {
919 gen_op_jnz_ecxw,
920 gen_op_jnz_ecxl,
921 X86_64_ONLY(gen_op_jnz_ecxq),
922};
923
924static GenOpFunc1 *gen_op_jz_ecx[3] = {
925 gen_op_jz_ecxw,
926 gen_op_jz_ecxl,
927 X86_64_ONLY(gen_op_jz_ecxq),
928};
929
930static GenOpFunc *gen_op_dec_ECX[3] = {
931 gen_op_decw_ECX,
932 gen_op_decl_ECX,
933 X86_64_ONLY(gen_op_decq_ECX),
934};
935
936static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
937 {
938 gen_op_jnz_subb,
939 gen_op_jnz_subw,
940 gen_op_jnz_subl,
941 X86_64_ONLY(gen_op_jnz_subq),
942 },
943 {
944 gen_op_jz_subb,
945 gen_op_jz_subw,
946 gen_op_jz_subl,
947 X86_64_ONLY(gen_op_jz_subq),
948 },
949};
950
951static GenOpFunc *gen_op_in_DX_T0[3] = {
952 gen_op_inb_DX_T0,
953 gen_op_inw_DX_T0,
954 gen_op_inl_DX_T0,
955};
956
957static GenOpFunc *gen_op_out_DX_T0[3] = {
958 gen_op_outb_DX_T0,
959 gen_op_outw_DX_T0,
960 gen_op_outl_DX_T0,
961};
962
963static GenOpFunc *gen_op_in[3] = {
964 gen_op_inb_T0_T1,
965 gen_op_inw_T0_T1,
966 gen_op_inl_T0_T1,
967};
968
969static GenOpFunc *gen_op_out[3] = {
970 gen_op_outb_T0_T1,
971 gen_op_outw_T0_T1,
972 gen_op_outl_T0_T1,
973};
974
975static GenOpFunc *gen_check_io_T0[3] = {
976 gen_op_check_iob_T0,
977 gen_op_check_iow_T0,
978 gen_op_check_iol_T0,
979};
980
981static GenOpFunc *gen_check_io_DX[3] = {
982 gen_op_check_iob_DX,
983 gen_op_check_iow_DX,
984 gen_op_check_iol_DX,
985};
986
987static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
988{
989 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
990 if (s->cc_op != CC_OP_DYNAMIC)
991 gen_op_set_cc_op(s->cc_op);
992 gen_jmp_im(cur_eip);
993 if (use_dx)
994 gen_check_io_DX[ot]();
995 else
996 gen_check_io_T0[ot]();
997 }
998}
999
1000static inline void gen_movs(DisasContext *s, int ot)
1001{
1002 gen_string_movl_A0_ESI(s);
1003 gen_op_ld_T0_A0[ot + s->mem_index]();
1004 gen_string_movl_A0_EDI(s);
1005 gen_op_st_T0_A0[ot + s->mem_index]();
1006 gen_op_movl_T0_Dshift[ot]();
1007#ifdef TARGET_X86_64
1008 if (s->aflag == 2) {
1009 gen_op_addq_ESI_T0();
1010 gen_op_addq_EDI_T0();
1011 } else
1012#endif
1013 if (s->aflag) {
1014 gen_op_addl_ESI_T0();
1015 gen_op_addl_EDI_T0();
1016 } else {
1017 gen_op_addw_ESI_T0();
1018 gen_op_addw_EDI_T0();
1019 }
1020}
1021
1022static inline void gen_update_cc_op(DisasContext *s)
1023{
1024 if (s->cc_op != CC_OP_DYNAMIC) {
1025 gen_op_set_cc_op(s->cc_op);
1026 s->cc_op = CC_OP_DYNAMIC;
1027 }
1028}
1029
1030/* XXX: does not work with gdbstub "ice" single step - not a
1031 serious problem */
1032static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1033{
1034 int l1, l2;
1035
1036 l1 = gen_new_label();
1037 l2 = gen_new_label();
1038 gen_op_jnz_ecx[s->aflag](l1);
1039 gen_set_label(l2);
1040 gen_jmp_tb(s, next_eip, 1);
1041 gen_set_label(l1);
1042 return l2;
1043}
1044
1045static inline void gen_stos(DisasContext *s, int ot)
1046{
1047 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1048 gen_string_movl_A0_EDI(s);
1049 gen_op_st_T0_A0[ot + s->mem_index]();
1050 gen_op_movl_T0_Dshift[ot]();
1051#ifdef TARGET_X86_64
1052 if (s->aflag == 2) {
1053 gen_op_addq_EDI_T0();
1054 } else
1055#endif
1056 if (s->aflag) {
1057 gen_op_addl_EDI_T0();
1058 } else {
1059 gen_op_addw_EDI_T0();
1060 }
1061}
1062
1063static inline void gen_lods(DisasContext *s, int ot)
1064{
1065 gen_string_movl_A0_ESI(s);
1066 gen_op_ld_T0_A0[ot + s->mem_index]();
1067 gen_op_mov_reg_T0[ot][R_EAX]();
1068 gen_op_movl_T0_Dshift[ot]();
1069#ifdef TARGET_X86_64
1070 if (s->aflag == 2) {
1071 gen_op_addq_ESI_T0();
1072 } else
1073#endif
1074 if (s->aflag) {
1075 gen_op_addl_ESI_T0();
1076 } else {
1077 gen_op_addw_ESI_T0();
1078 }
1079}
1080
1081static inline void gen_scas(DisasContext *s, int ot)
1082{
1083 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1084 gen_string_movl_A0_EDI(s);
1085 gen_op_ld_T1_A0[ot + s->mem_index]();
1086 gen_op_cmpl_T0_T1_cc();
1087 gen_op_movl_T0_Dshift[ot]();
1088#ifdef TARGET_X86_64
1089 if (s->aflag == 2) {
1090 gen_op_addq_EDI_T0();
1091 } else
1092#endif
1093 if (s->aflag) {
1094 gen_op_addl_EDI_T0();
1095 } else {
1096 gen_op_addw_EDI_T0();
1097 }
1098}
1099
1100static inline void gen_cmps(DisasContext *s, int ot)
1101{
1102 gen_string_movl_A0_ESI(s);
1103 gen_op_ld_T0_A0[ot + s->mem_index]();
1104 gen_string_movl_A0_EDI(s);
1105 gen_op_ld_T1_A0[ot + s->mem_index]();
1106 gen_op_cmpl_T0_T1_cc();
1107 gen_op_movl_T0_Dshift[ot]();
1108#ifdef TARGET_X86_64
1109 if (s->aflag == 2) {
1110 gen_op_addq_ESI_T0();
1111 gen_op_addq_EDI_T0();
1112 } else
1113#endif
1114 if (s->aflag) {
1115 gen_op_addl_ESI_T0();
1116 gen_op_addl_EDI_T0();
1117 } else {
1118 gen_op_addw_ESI_T0();
1119 gen_op_addw_EDI_T0();
1120 }
1121}
1122
1123static inline void gen_ins(DisasContext *s, int ot)
1124{
1125 gen_string_movl_A0_EDI(s);
1126 gen_op_movl_T0_0();
1127 gen_op_st_T0_A0[ot + s->mem_index]();
1128 gen_op_in_DX_T0[ot]();
1129 gen_op_st_T0_A0[ot + s->mem_index]();
1130 gen_op_movl_T0_Dshift[ot]();
1131#ifdef TARGET_X86_64
1132 if (s->aflag == 2) {
1133 gen_op_addq_EDI_T0();
1134 } else
1135#endif
1136 if (s->aflag) {
1137 gen_op_addl_EDI_T0();
1138 } else {
1139 gen_op_addw_EDI_T0();
1140 }
1141}
1142
1143static inline void gen_outs(DisasContext *s, int ot)
1144{
1145 gen_string_movl_A0_ESI(s);
1146 gen_op_ld_T0_A0[ot + s->mem_index]();
1147 gen_op_out_DX_T0[ot]();
1148 gen_op_movl_T0_Dshift[ot]();
1149#ifdef TARGET_X86_64
1150 if (s->aflag == 2) {
1151 gen_op_addq_ESI_T0();
1152 } else
1153#endif
1154 if (s->aflag) {
1155 gen_op_addl_ESI_T0();
1156 } else {
1157 gen_op_addw_ESI_T0();
1158 }
1159}
1160
1161/* same method as Valgrind : we generate jumps to current or next
1162 instruction */
1163#define GEN_REPZ(op) \
1164static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1165 target_ulong cur_eip, target_ulong next_eip) \
1166{ \
1167 int l2;\
1168 gen_update_cc_op(s); \
1169 l2 = gen_jz_ecx_string(s, next_eip); \
1170 gen_ ## op(s, ot); \
1171 gen_op_dec_ECX[s->aflag](); \
1172 /* a loop would cause two single step exceptions if ECX = 1 \
1173 before rep string_insn */ \
1174 if (!s->jmp_opt) \
1175 gen_op_jz_ecx[s->aflag](l2); \
1176 gen_jmp(s, cur_eip); \
1177}
1178
1179#define GEN_REPZ2(op) \
1180static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1181 target_ulong cur_eip, \
1182 target_ulong next_eip, \
1183 int nz) \
1184{ \
1185 int l2;\
1186 gen_update_cc_op(s); \
1187 l2 = gen_jz_ecx_string(s, next_eip); \
1188 gen_ ## op(s, ot); \
1189 gen_op_dec_ECX[s->aflag](); \
1190 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1191 gen_op_string_jnz_sub[nz][ot](l2);\
1192 if (!s->jmp_opt) \
1193 gen_op_jz_ecx[s->aflag](l2); \
1194 gen_jmp(s, cur_eip); \
1195}
1196
1197GEN_REPZ(movs)
1198GEN_REPZ(stos)
1199GEN_REPZ(lods)
1200GEN_REPZ(ins)
1201GEN_REPZ(outs)
1202GEN_REPZ2(scas)
1203GEN_REPZ2(cmps)
1204
1205enum {
1206 JCC_O,
1207 JCC_B,
1208 JCC_Z,
1209 JCC_BE,
1210 JCC_S,
1211 JCC_P,
1212 JCC_L,
1213 JCC_LE,
1214};
1215
1216static GenOpFunc1 *gen_jcc_sub[4][8] = {
1217 [OT_BYTE] = {
1218 NULL,
1219 gen_op_jb_subb,
1220 gen_op_jz_subb,
1221 gen_op_jbe_subb,
1222 gen_op_js_subb,
1223 NULL,
1224 gen_op_jl_subb,
1225 gen_op_jle_subb,
1226 },
1227 [OT_WORD] = {
1228 NULL,
1229 gen_op_jb_subw,
1230 gen_op_jz_subw,
1231 gen_op_jbe_subw,
1232 gen_op_js_subw,
1233 NULL,
1234 gen_op_jl_subw,
1235 gen_op_jle_subw,
1236 },
1237 [OT_LONG] = {
1238 NULL,
1239 gen_op_jb_subl,
1240 gen_op_jz_subl,
1241 gen_op_jbe_subl,
1242 gen_op_js_subl,
1243 NULL,
1244 gen_op_jl_subl,
1245 gen_op_jle_subl,
1246 },
1247#ifdef TARGET_X86_64
1248 [OT_QUAD] = {
1249 NULL,
1250 BUGGY_64(gen_op_jb_subq),
1251 gen_op_jz_subq,
1252 BUGGY_64(gen_op_jbe_subq),
1253 gen_op_js_subq,
1254 NULL,
1255 BUGGY_64(gen_op_jl_subq),
1256 BUGGY_64(gen_op_jle_subq),
1257 },
1258#endif
1259};
1260static GenOpFunc1 *gen_op_loop[3][4] = {
1261 [0] = {
1262 gen_op_loopnzw,
1263 gen_op_loopzw,
1264 gen_op_jnz_ecxw,
1265 },
1266 [1] = {
1267 gen_op_loopnzl,
1268 gen_op_loopzl,
1269 gen_op_jnz_ecxl,
1270 },
1271#ifdef TARGET_X86_64
1272 [2] = {
1273 gen_op_loopnzq,
1274 gen_op_loopzq,
1275 gen_op_jnz_ecxq,
1276 },
1277#endif
1278};
1279
1280static GenOpFunc *gen_setcc_slow[8] = {
1281 gen_op_seto_T0_cc,
1282 gen_op_setb_T0_cc,
1283 gen_op_setz_T0_cc,
1284 gen_op_setbe_T0_cc,
1285 gen_op_sets_T0_cc,
1286 gen_op_setp_T0_cc,
1287 gen_op_setl_T0_cc,
1288 gen_op_setle_T0_cc,
1289};
1290
1291static GenOpFunc *gen_setcc_sub[4][8] = {
1292 [OT_BYTE] = {
1293 NULL,
1294 gen_op_setb_T0_subb,
1295 gen_op_setz_T0_subb,
1296 gen_op_setbe_T0_subb,
1297 gen_op_sets_T0_subb,
1298 NULL,
1299 gen_op_setl_T0_subb,
1300 gen_op_setle_T0_subb,
1301 },
1302 [OT_WORD] = {
1303 NULL,
1304 gen_op_setb_T0_subw,
1305 gen_op_setz_T0_subw,
1306 gen_op_setbe_T0_subw,
1307 gen_op_sets_T0_subw,
1308 NULL,
1309 gen_op_setl_T0_subw,
1310 gen_op_setle_T0_subw,
1311 },
1312 [OT_LONG] = {
1313 NULL,
1314 gen_op_setb_T0_subl,
1315 gen_op_setz_T0_subl,
1316 gen_op_setbe_T0_subl,
1317 gen_op_sets_T0_subl,
1318 NULL,
1319 gen_op_setl_T0_subl,
1320 gen_op_setle_T0_subl,
1321 },
1322#ifdef TARGET_X86_64
1323 [OT_QUAD] = {
1324 NULL,
1325 gen_op_setb_T0_subq,
1326 gen_op_setz_T0_subq,
1327 gen_op_setbe_T0_subq,
1328 gen_op_sets_T0_subq,
1329 NULL,
1330 gen_op_setl_T0_subq,
1331 gen_op_setle_T0_subq,
1332 },
1333#endif
1334};
1335
1336static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1337 gen_op_fadd_ST0_FT0,
1338 gen_op_fmul_ST0_FT0,
1339 gen_op_fcom_ST0_FT0,
1340 gen_op_fcom_ST0_FT0,
1341 gen_op_fsub_ST0_FT0,
1342 gen_op_fsubr_ST0_FT0,
1343 gen_op_fdiv_ST0_FT0,
1344 gen_op_fdivr_ST0_FT0,
1345};
1346
1347/* NOTE the exception in "r" op ordering */
1348static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1349 gen_op_fadd_STN_ST0,
1350 gen_op_fmul_STN_ST0,
1351 NULL,
1352 NULL,
1353 gen_op_fsubr_STN_ST0,
1354 gen_op_fsub_STN_ST0,
1355 gen_op_fdivr_STN_ST0,
1356 gen_op_fdiv_STN_ST0,
1357};
1358
1359/* if d == OR_TMP0, it means memory operand (address in A0) */
1360static void gen_op(DisasContext *s1, int op, int ot, int d)
1361{
1362 GenOpFunc *gen_update_cc;
1363
1364 if (d != OR_TMP0) {
1365 gen_op_mov_TN_reg[ot][0][d]();
1366 } else {
1367 gen_op_ld_T0_A0[ot + s1->mem_index]();
1368 }
1369 switch(op) {
1370 case OP_ADCL:
1371 case OP_SBBL:
1372 if (s1->cc_op != CC_OP_DYNAMIC)
1373 gen_op_set_cc_op(s1->cc_op);
1374 if (d != OR_TMP0) {
1375 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1376 gen_op_mov_reg_T0[ot][d]();
1377 } else {
1378 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1379 }
1380 s1->cc_op = CC_OP_DYNAMIC;
1381 goto the_end;
1382 case OP_ADDL:
1383 gen_op_addl_T0_T1();
1384 s1->cc_op = CC_OP_ADDB + ot;
1385 gen_update_cc = gen_op_update2_cc;
1386 break;
1387 case OP_SUBL:
1388 gen_op_subl_T0_T1();
1389 s1->cc_op = CC_OP_SUBB + ot;
1390 gen_update_cc = gen_op_update2_cc;
1391 break;
1392 default:
1393 case OP_ANDL:
1394 case OP_ORL:
1395 case OP_XORL:
1396 gen_op_arith_T0_T1_cc[op]();
1397 s1->cc_op = CC_OP_LOGICB + ot;
1398 gen_update_cc = gen_op_update1_cc;
1399 break;
1400 case OP_CMPL:
1401 gen_op_cmpl_T0_T1_cc();
1402 s1->cc_op = CC_OP_SUBB + ot;
1403 gen_update_cc = NULL;
1404 break;
1405 }
1406 if (op != OP_CMPL) {
1407 if (d != OR_TMP0)
1408 gen_op_mov_reg_T0[ot][d]();
1409 else
1410 gen_op_st_T0_A0[ot + s1->mem_index]();
1411 }
1412 /* the flags update must happen after the memory write (precise
1413 exception support) */
1414 if (gen_update_cc)
1415 gen_update_cc();
1416 the_end: ;
1417}
1418
1419/* if d == OR_TMP0, it means memory operand (address in A0) */
1420static void gen_inc(DisasContext *s1, int ot, int d, int c)
1421{
1422 if (d != OR_TMP0)
1423 gen_op_mov_TN_reg[ot][0][d]();
1424 else
1425 gen_op_ld_T0_A0[ot + s1->mem_index]();
1426 if (s1->cc_op != CC_OP_DYNAMIC)
1427 gen_op_set_cc_op(s1->cc_op);
1428 if (c > 0) {
1429 gen_op_incl_T0();
1430 s1->cc_op = CC_OP_INCB + ot;
1431 } else {
1432 gen_op_decl_T0();
1433 s1->cc_op = CC_OP_DECB + ot;
1434 }
1435 if (d != OR_TMP0)
1436 gen_op_mov_reg_T0[ot][d]();
1437 else
1438 gen_op_st_T0_A0[ot + s1->mem_index]();
1439 gen_op_update_inc_cc();
1440}
1441
1442static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1443{
1444 if (d != OR_TMP0)
1445 gen_op_mov_TN_reg[ot][0][d]();
1446 else
1447 gen_op_ld_T0_A0[ot + s1->mem_index]();
1448 if (s != OR_TMP1)
1449 gen_op_mov_TN_reg[ot][1][s]();
1450 /* for zero counts, flags are not updated, so must do it dynamically */
1451 if (s1->cc_op != CC_OP_DYNAMIC)
1452 gen_op_set_cc_op(s1->cc_op);
1453
1454 if (d != OR_TMP0)
1455 gen_op_shift_T0_T1_cc[ot][op]();
1456 else
1457 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1458 if (d != OR_TMP0)
1459 gen_op_mov_reg_T0[ot][d]();
1460 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1461}
1462
1463static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1464{
1465 /* currently not optimized */
1466 gen_op_movl_T1_im(c);
1467 gen_shift(s1, op, ot, d, OR_TMP1);
1468}
1469
1470static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1471{
1472 target_long disp;
1473 int havesib;
1474 int base;
1475 int index;
1476 int scale;
1477 int opreg;
1478 int mod, rm, code, override, must_add_seg;
1479
1480 override = s->override;
1481 must_add_seg = s->addseg;
1482 if (override >= 0)
1483 must_add_seg = 1;
1484 mod = (modrm >> 6) & 3;
1485 rm = modrm & 7;
1486
1487 if (s->aflag) {
1488
1489 havesib = 0;
1490 base = rm;
1491 index = 0;
1492 scale = 0;
1493
1494 if (base == 4) {
1495 havesib = 1;
1496 code = ldub_code(s->pc++);
1497 scale = (code >> 6) & 3;
1498 index = ((code >> 3) & 7) | REX_X(s);
1499 base = (code & 7);
1500 }
1501 base |= REX_B(s);
1502
1503 switch (mod) {
1504 case 0:
1505 if ((base & 7) == 5) {
1506 base = -1;
1507 disp = (int32_t)ldl_code(s->pc);
1508 s->pc += 4;
1509 if (CODE64(s) && !havesib) {
1510 disp += s->pc + s->rip_offset;
1511 }
1512 } else {
1513 disp = 0;
1514 }
1515 break;
1516 case 1:
1517 disp = (int8_t)ldub_code(s->pc++);
1518 break;
1519 default:
1520 case 2:
1521 disp = ldl_code(s->pc);
1522 s->pc += 4;
1523 break;
1524 }
1525
1526 if (base >= 0) {
1527 /* for correct popl handling with esp */
1528 if (base == 4 && s->popl_esp_hack)
1529 disp += s->popl_esp_hack;
1530#ifdef TARGET_X86_64
1531 if (s->aflag == 2) {
1532 gen_op_movq_A0_reg[base]();
1533 if (disp != 0) {
1534 if ((int32_t)disp == disp)
1535 gen_op_addq_A0_im(disp);
1536 else
1537 gen_op_addq_A0_im64(disp >> 32, disp);
1538 }
1539 } else
1540#endif
1541 {
1542 gen_op_movl_A0_reg[base]();
1543 if (disp != 0)
1544 gen_op_addl_A0_im(disp);
1545 }
1546 } else {
1547#ifdef TARGET_X86_64
1548 if (s->aflag == 2) {
1549 if ((int32_t)disp == disp)
1550 gen_op_movq_A0_im(disp);
1551 else
1552 gen_op_movq_A0_im64(disp >> 32, disp);
1553 } else
1554#endif
1555 {
1556 gen_op_movl_A0_im(disp);
1557 }
1558 }
1559 /* XXX: index == 4 is always invalid */
1560 if (havesib && (index != 4 || scale != 0)) {
1561#ifdef TARGET_X86_64
1562 if (s->aflag == 2) {
1563 gen_op_addq_A0_reg_sN[scale][index]();
1564 } else
1565#endif
1566 {
1567 gen_op_addl_A0_reg_sN[scale][index]();
1568 }
1569 }
1570 if (must_add_seg) {
1571 if (override < 0) {
1572 if (base == R_EBP || base == R_ESP)
1573 override = R_SS;
1574 else
1575 override = R_DS;
1576 }
1577#ifdef TARGET_X86_64
1578 if (s->aflag == 2) {
1579 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1580 } else
1581#endif
1582 {
1583 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1584 }
1585 }
1586 } else {
1587 switch (mod) {
1588 case 0:
1589 if (rm == 6) {
1590 disp = lduw_code(s->pc);
1591 s->pc += 2;
1592 gen_op_movl_A0_im(disp);
1593 rm = 0; /* avoid SS override */
1594 goto no_rm;
1595 } else {
1596 disp = 0;
1597 }
1598 break;
1599 case 1:
1600 disp = (int8_t)ldub_code(s->pc++);
1601 break;
1602 default:
1603 case 2:
1604 disp = lduw_code(s->pc);
1605 s->pc += 2;
1606 break;
1607 }
1608 switch(rm) {
1609 case 0:
1610 gen_op_movl_A0_reg[R_EBX]();
1611 gen_op_addl_A0_reg_sN[0][R_ESI]();
1612 break;
1613 case 1:
1614 gen_op_movl_A0_reg[R_EBX]();
1615 gen_op_addl_A0_reg_sN[0][R_EDI]();
1616 break;
1617 case 2:
1618 gen_op_movl_A0_reg[R_EBP]();
1619 gen_op_addl_A0_reg_sN[0][R_ESI]();
1620 break;
1621 case 3:
1622 gen_op_movl_A0_reg[R_EBP]();
1623 gen_op_addl_A0_reg_sN[0][R_EDI]();
1624 break;
1625 case 4:
1626 gen_op_movl_A0_reg[R_ESI]();
1627 break;
1628 case 5:
1629 gen_op_movl_A0_reg[R_EDI]();
1630 break;
1631 case 6:
1632 gen_op_movl_A0_reg[R_EBP]();
1633 break;
1634 default:
1635 case 7:
1636 gen_op_movl_A0_reg[R_EBX]();
1637 break;
1638 }
1639 if (disp != 0)
1640 gen_op_addl_A0_im(disp);
1641 gen_op_andl_A0_ffff();
1642 no_rm:
1643 if (must_add_seg) {
1644 if (override < 0) {
1645 if (rm == 2 || rm == 3 || rm == 6)
1646 override = R_SS;
1647 else
1648 override = R_DS;
1649 }
1650 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1651 }
1652 }
1653
1654 opreg = OR_A0;
1655 disp = 0;
1656 *reg_ptr = opreg;
1657 *offset_ptr = disp;
1658}
1659
1660/* used for LEA and MOV AX, mem */
1661static void gen_add_A0_ds_seg(DisasContext *s)
1662{
1663 int override, must_add_seg;
1664 must_add_seg = s->addseg;
1665 override = R_DS;
1666 if (s->override >= 0) {
1667 override = s->override;
1668 must_add_seg = 1;
1669 } else {
1670 override = R_DS;
1671 }
1672 if (must_add_seg) {
1673#ifdef TARGET_X86_64
1674 if (CODE64(s)) {
1675 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1676 } else
1677#endif
1678 {
1679 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1680 }
1681 }
1682}
1683
1684/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1685 OR_TMP0 */
1686static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1687{
1688 int mod, rm, opreg, disp;
1689
1690 mod = (modrm >> 6) & 3;
1691 rm = (modrm & 7) | REX_B(s);
1692 if (mod == 3) {
1693 if (is_store) {
1694 if (reg != OR_TMP0)
1695 gen_op_mov_TN_reg[ot][0][reg]();
1696 gen_op_mov_reg_T0[ot][rm]();
1697 } else {
1698 gen_op_mov_TN_reg[ot][0][rm]();
1699 if (reg != OR_TMP0)
1700 gen_op_mov_reg_T0[ot][reg]();
1701 }
1702 } else {
1703 gen_lea_modrm(s, modrm, &opreg, &disp);
1704 if (is_store) {
1705 if (reg != OR_TMP0)
1706 gen_op_mov_TN_reg[ot][0][reg]();
1707 gen_op_st_T0_A0[ot + s->mem_index]();
1708 } else {
1709 gen_op_ld_T0_A0[ot + s->mem_index]();
1710 if (reg != OR_TMP0)
1711 gen_op_mov_reg_T0[ot][reg]();
1712 }
1713 }
1714}
1715
1716static inline uint32_t insn_get(DisasContext *s, int ot)
1717{
1718 uint32_t ret;
1719
1720 switch(ot) {
1721 case OT_BYTE:
1722 ret = ldub_code(s->pc);
1723 s->pc++;
1724 break;
1725 case OT_WORD:
1726 ret = lduw_code(s->pc);
1727 s->pc += 2;
1728 break;
1729 default:
1730 case OT_LONG:
1731 ret = ldl_code(s->pc);
1732 s->pc += 4;
1733 break;
1734 }
1735 return ret;
1736}
1737
1738static inline int insn_const_size(unsigned int ot)
1739{
1740 if (ot <= OT_LONG)
1741 return 1 << ot;
1742 else
1743 return 4;
1744}
1745
1746static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1747{
1748 TranslationBlock *tb;
1749 target_ulong pc;
1750
1751 pc = s->cs_base + eip;
1752 tb = s->tb;
1753 /* NOTE: we handle the case where the TB spans two pages here */
1754 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1755 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1756 /* jump to same page: we can use a direct jump */
1757 if (tb_num == 0)
1758 gen_op_goto_tb0(TBPARAM(tb));
1759 else
1760 gen_op_goto_tb1(TBPARAM(tb));
1761 gen_jmp_im(eip);
1762 gen_op_movl_T0_im((long)tb + tb_num);
1763 gen_op_exit_tb();
1764 } else {
1765 /* jump to another page: currently not optimized */
1766 gen_jmp_im(eip);
1767 gen_eob(s);
1768 }
1769}
1770
1771static inline void gen_jcc(DisasContext *s, int b,
1772 target_ulong val, target_ulong next_eip)
1773{
1774 TranslationBlock *tb;
1775 int inv, jcc_op;
1776 GenOpFunc1 *func;
1777 target_ulong tmp;
1778 int l1, l2;
1779
1780 inv = b & 1;
1781 jcc_op = (b >> 1) & 7;
1782
1783 if (s->jmp_opt) {
1784#ifdef VBOX
1785 gen_check_external_event(s);
1786#endif /* VBOX */
1787 switch(s->cc_op) {
1788 /* we optimize the cmp/jcc case */
1789 case CC_OP_SUBB:
1790 case CC_OP_SUBW:
1791 case CC_OP_SUBL:
1792 case CC_OP_SUBQ:
1793 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1794 break;
1795
1796 /* some jumps are easy to compute */
1797 case CC_OP_ADDB:
1798 case CC_OP_ADDW:
1799 case CC_OP_ADDL:
1800 case CC_OP_ADDQ:
1801
1802 case CC_OP_ADCB:
1803 case CC_OP_ADCW:
1804 case CC_OP_ADCL:
1805 case CC_OP_ADCQ:
1806
1807 case CC_OP_SBBB:
1808 case CC_OP_SBBW:
1809 case CC_OP_SBBL:
1810 case CC_OP_SBBQ:
1811
1812 case CC_OP_LOGICB:
1813 case CC_OP_LOGICW:
1814 case CC_OP_LOGICL:
1815 case CC_OP_LOGICQ:
1816
1817 case CC_OP_INCB:
1818 case CC_OP_INCW:
1819 case CC_OP_INCL:
1820 case CC_OP_INCQ:
1821
1822 case CC_OP_DECB:
1823 case CC_OP_DECW:
1824 case CC_OP_DECL:
1825 case CC_OP_DECQ:
1826
1827 case CC_OP_SHLB:
1828 case CC_OP_SHLW:
1829 case CC_OP_SHLL:
1830 case CC_OP_SHLQ:
1831
1832 case CC_OP_SARB:
1833 case CC_OP_SARW:
1834 case CC_OP_SARL:
1835 case CC_OP_SARQ:
1836 switch(jcc_op) {
1837 case JCC_Z:
1838 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1839 break;
1840 case JCC_S:
1841 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1842 break;
1843 default:
1844 func = NULL;
1845 break;
1846 }
1847 break;
1848 default:
1849 func = NULL;
1850 break;
1851 }
1852
1853 if (s->cc_op != CC_OP_DYNAMIC) {
1854 gen_op_set_cc_op(s->cc_op);
1855 s->cc_op = CC_OP_DYNAMIC;
1856 }
1857
1858 if (!func) {
1859 gen_setcc_slow[jcc_op]();
1860 func = gen_op_jnz_T0_label;
1861 }
1862
1863 if (inv) {
1864 tmp = val;
1865 val = next_eip;
1866 next_eip = tmp;
1867 }
1868 tb = s->tb;
1869
1870 l1 = gen_new_label();
1871 func(l1);
1872
1873 gen_goto_tb(s, 0, next_eip);
1874
1875 gen_set_label(l1);
1876 gen_goto_tb(s, 1, val);
1877
1878 s->is_jmp = 3;
1879 } else {
1880
1881 if (s->cc_op != CC_OP_DYNAMIC) {
1882 gen_op_set_cc_op(s->cc_op);
1883 s->cc_op = CC_OP_DYNAMIC;
1884 }
1885 gen_setcc_slow[jcc_op]();
1886 if (inv) {
1887 tmp = val;
1888 val = next_eip;
1889 next_eip = tmp;
1890 }
1891 l1 = gen_new_label();
1892 l2 = gen_new_label();
1893 gen_op_jnz_T0_label(l1);
1894 gen_jmp_im(next_eip);
1895 gen_op_jmp_label(l2);
1896 gen_set_label(l1);
1897 gen_jmp_im(val);
1898 gen_set_label(l2);
1899 gen_eob(s);
1900 }
1901}
1902
1903static void gen_setcc(DisasContext *s, int b)
1904{
1905 int inv, jcc_op;
1906 GenOpFunc *func;
1907
1908 inv = b & 1;
1909 jcc_op = (b >> 1) & 7;
1910 switch(s->cc_op) {
1911 /* we optimize the cmp/jcc case */
1912 case CC_OP_SUBB:
1913 case CC_OP_SUBW:
1914 case CC_OP_SUBL:
1915 case CC_OP_SUBQ:
1916 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1917 if (!func)
1918 goto slow_jcc;
1919 break;
1920
1921 /* some jumps are easy to compute */
1922 case CC_OP_ADDB:
1923 case CC_OP_ADDW:
1924 case CC_OP_ADDL:
1925 case CC_OP_ADDQ:
1926
1927 case CC_OP_LOGICB:
1928 case CC_OP_LOGICW:
1929 case CC_OP_LOGICL:
1930 case CC_OP_LOGICQ:
1931
1932 case CC_OP_INCB:
1933 case CC_OP_INCW:
1934 case CC_OP_INCL:
1935 case CC_OP_INCQ:
1936
1937 case CC_OP_DECB:
1938 case CC_OP_DECW:
1939 case CC_OP_DECL:
1940 case CC_OP_DECQ:
1941
1942 case CC_OP_SHLB:
1943 case CC_OP_SHLW:
1944 case CC_OP_SHLL:
1945 case CC_OP_SHLQ:
1946 switch(jcc_op) {
1947 case JCC_Z:
1948 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1949 break;
1950 case JCC_S:
1951 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1952 break;
1953 default:
1954 goto slow_jcc;
1955 }
1956 break;
1957 default:
1958 slow_jcc:
1959 if (s->cc_op != CC_OP_DYNAMIC)
1960 gen_op_set_cc_op(s->cc_op);
1961 func = gen_setcc_slow[jcc_op];
1962 break;
1963 }
1964 func();
1965 if (inv) {
1966 gen_op_xor_T0_1();
1967 }
1968}
1969
1970/* move T0 to seg_reg and compute if the CPU state may change. Never
1971 call this function with seg_reg == R_CS */
1972static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1973{
1974 if (s->pe && !s->vm86) {
1975 /* XXX: optimize by finding processor state dynamically */
1976 if (s->cc_op != CC_OP_DYNAMIC)
1977 gen_op_set_cc_op(s->cc_op);
1978 gen_jmp_im(cur_eip);
1979 gen_op_movl_seg_T0(seg_reg);
1980 /* abort translation because the addseg value may change or
1981 because ss32 may change. For R_SS, translation must always
1982 stop as a special handling must be done to disable hardware
1983 interrupts for the next instruction */
1984 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1985 s->is_jmp = 3;
1986 } else {
1987 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1988 if (seg_reg == R_SS)
1989 s->is_jmp = 3;
1990 }
1991}
1992
1993static inline void gen_stack_update(DisasContext *s, int addend)
1994{
1995#ifdef TARGET_X86_64
1996 if (CODE64(s)) {
1997 if (addend == 8)
1998 gen_op_addq_ESP_8();
1999 else
2000 gen_op_addq_ESP_im(addend);
2001 } else
2002#endif
2003 if (s->ss32) {
2004 if (addend == 2)
2005 gen_op_addl_ESP_2();
2006 else if (addend == 4)
2007 gen_op_addl_ESP_4();
2008 else
2009 gen_op_addl_ESP_im(addend);
2010 } else {
2011 if (addend == 2)
2012 gen_op_addw_ESP_2();
2013 else if (addend == 4)
2014 gen_op_addw_ESP_4();
2015 else
2016 gen_op_addw_ESP_im(addend);
2017 }
2018}
2019
2020/* generate a push. It depends on ss32, addseg and dflag */
2021static void gen_push_T0(DisasContext *s)
2022{
2023#ifdef TARGET_X86_64
2024 if (CODE64(s)) {
2025 gen_op_movq_A0_reg[R_ESP]();
2026 if (s->dflag) {
2027 gen_op_subq_A0_8();
2028 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2029 } else {
2030 gen_op_subq_A0_2();
2031 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2032 }
2033 gen_op_movq_ESP_A0();
2034 } else
2035#endif
2036 {
2037 gen_op_movl_A0_reg[R_ESP]();
2038 if (!s->dflag)
2039 gen_op_subl_A0_2();
2040 else
2041 gen_op_subl_A0_4();
2042 if (s->ss32) {
2043 if (s->addseg) {
2044 gen_op_movl_T1_A0();
2045 gen_op_addl_A0_SS();
2046 }
2047 } else {
2048 gen_op_andl_A0_ffff();
2049 gen_op_movl_T1_A0();
2050 gen_op_addl_A0_SS();
2051 }
2052 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2053 if (s->ss32 && !s->addseg)
2054 gen_op_movl_ESP_A0();
2055 else
2056 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2057 }
2058}
2059
2060/* generate a push. It depends on ss32, addseg and dflag */
2061/* slower version for T1, only used for call Ev */
2062static void gen_push_T1(DisasContext *s)
2063{
2064#ifdef TARGET_X86_64
2065 if (CODE64(s)) {
2066 gen_op_movq_A0_reg[R_ESP]();
2067 if (s->dflag) {
2068 gen_op_subq_A0_8();
2069 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2070 } else {
2071 gen_op_subq_A0_2();
2072 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2073 }
2074 gen_op_movq_ESP_A0();
2075 } else
2076#endif
2077 {
2078 gen_op_movl_A0_reg[R_ESP]();
2079 if (!s->dflag)
2080 gen_op_subl_A0_2();
2081 else
2082 gen_op_subl_A0_4();
2083 if (s->ss32) {
2084 if (s->addseg) {
2085 gen_op_addl_A0_SS();
2086 }
2087 } else {
2088 gen_op_andl_A0_ffff();
2089 gen_op_addl_A0_SS();
2090 }
2091 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2092
2093 if (s->ss32 && !s->addseg)
2094 gen_op_movl_ESP_A0();
2095 else
2096 gen_stack_update(s, (-2) << s->dflag);
2097 }
2098}
2099
2100/* two step pop is necessary for precise exceptions */
2101static void gen_pop_T0(DisasContext *s)
2102{
2103#ifdef TARGET_X86_64
2104 if (CODE64(s)) {
2105 gen_op_movq_A0_reg[R_ESP]();
2106 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2107 } else
2108#endif
2109 {
2110 gen_op_movl_A0_reg[R_ESP]();
2111 if (s->ss32) {
2112 if (s->addseg)
2113 gen_op_addl_A0_SS();
2114 } else {
2115 gen_op_andl_A0_ffff();
2116 gen_op_addl_A0_SS();
2117 }
2118 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2119 }
2120}
2121
2122static void gen_pop_update(DisasContext *s)
2123{
2124#ifdef TARGET_X86_64
2125 if (CODE64(s) && s->dflag) {
2126 gen_stack_update(s, 8);
2127 } else
2128#endif
2129 {
2130 gen_stack_update(s, 2 << s->dflag);
2131 }
2132}
2133
2134static void gen_stack_A0(DisasContext *s)
2135{
2136 gen_op_movl_A0_ESP();
2137 if (!s->ss32)
2138 gen_op_andl_A0_ffff();
2139 gen_op_movl_T1_A0();
2140 if (s->addseg)
2141 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2142}
2143
2144/* NOTE: wrap around in 16 bit not fully handled */
2145static void gen_pusha(DisasContext *s)
2146{
2147 int i;
2148 gen_op_movl_A0_ESP();
2149 gen_op_addl_A0_im(-16 << s->dflag);
2150 if (!s->ss32)
2151 gen_op_andl_A0_ffff();
2152 gen_op_movl_T1_A0();
2153 if (s->addseg)
2154 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2155 for(i = 0;i < 8; i++) {
2156 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2157 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2158 gen_op_addl_A0_im(2 << s->dflag);
2159 }
2160 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2161}
2162
2163/* NOTE: wrap around in 16 bit not fully handled */
2164static void gen_popa(DisasContext *s)
2165{
2166 int i;
2167 gen_op_movl_A0_ESP();
2168 if (!s->ss32)
2169 gen_op_andl_A0_ffff();
2170 gen_op_movl_T1_A0();
2171 gen_op_addl_T1_im(16 << s->dflag);
2172 if (s->addseg)
2173 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2174 for(i = 0;i < 8; i++) {
2175 /* ESP is not reloaded */
2176 if (i != 3) {
2177 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2178 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2179 }
2180 gen_op_addl_A0_im(2 << s->dflag);
2181 }
2182 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2183}
2184
2185static void gen_enter(DisasContext *s, int esp_addend, int level)
2186{
2187 int ot, opsize;
2188
2189 level &= 0x1f;
2190#ifdef TARGET_X86_64
2191 if (CODE64(s)) {
2192 ot = s->dflag ? OT_QUAD : OT_WORD;
2193 opsize = 1 << ot;
2194
2195 gen_op_movl_A0_ESP();
2196 gen_op_addq_A0_im(-opsize);
2197 gen_op_movl_T1_A0();
2198
2199 /* push bp */
2200 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2201 gen_op_st_T0_A0[ot + s->mem_index]();
2202 if (level) {
2203 gen_op_enter64_level(level, (ot == OT_QUAD));
2204 }
2205 gen_op_mov_reg_T1[ot][R_EBP]();
2206 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2207 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2208 } else
2209#endif
2210 {
2211 ot = s->dflag + OT_WORD;
2212 opsize = 2 << s->dflag;
2213
2214 gen_op_movl_A0_ESP();
2215 gen_op_addl_A0_im(-opsize);
2216 if (!s->ss32)
2217 gen_op_andl_A0_ffff();
2218 gen_op_movl_T1_A0();
2219 if (s->addseg)
2220 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2221 /* push bp */
2222 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2223 gen_op_st_T0_A0[ot + s->mem_index]();
2224 if (level) {
2225 gen_op_enter_level(level, s->dflag);
2226 }
2227 gen_op_mov_reg_T1[ot][R_EBP]();
2228 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2229 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2230 }
2231}
2232
2233static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2234{
2235 if (s->cc_op != CC_OP_DYNAMIC)
2236 gen_op_set_cc_op(s->cc_op);
2237 gen_jmp_im(cur_eip);
2238 gen_op_raise_exception(trapno);
2239 s->is_jmp = 3;
2240}
2241
2242/* an interrupt is different from an exception because of the
2243 priviledge checks */
2244static void gen_interrupt(DisasContext *s, int intno,
2245 target_ulong cur_eip, target_ulong next_eip)
2246{
2247 if (s->cc_op != CC_OP_DYNAMIC)
2248 gen_op_set_cc_op(s->cc_op);
2249 gen_jmp_im(cur_eip);
2250 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2251 s->is_jmp = 3;
2252}
2253
2254static void gen_debug(DisasContext *s, target_ulong cur_eip)
2255{
2256 if (s->cc_op != CC_OP_DYNAMIC)
2257 gen_op_set_cc_op(s->cc_op);
2258 gen_jmp_im(cur_eip);
2259 gen_op_debug();
2260 s->is_jmp = 3;
2261}
2262
2263/* generate a generic end of block. Trace exception is also generated
2264 if needed */
2265static void gen_eob(DisasContext *s)
2266{
2267 if (s->cc_op != CC_OP_DYNAMIC)
2268 gen_op_set_cc_op(s->cc_op);
2269 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2270 gen_op_reset_inhibit_irq();
2271 }
2272 if (s->singlestep_enabled) {
2273 gen_op_debug();
2274 } else if (s->tf) {
2275 gen_op_raise_exception(EXCP01_SSTP);
2276 } else {
2277 gen_op_movl_T0_0();
2278 gen_op_exit_tb();
2279 }
2280 s->is_jmp = 3;
2281}
2282
2283/* generate a jump to eip. No segment change must happen before as a
2284 direct call to the next block may occur */
2285static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2286{
2287 if (s->jmp_opt) {
2288#ifdef VBOX
2289 gen_check_external_event(s);
2290#endif /* VBOX */
2291 if (s->cc_op != CC_OP_DYNAMIC) {
2292 gen_op_set_cc_op(s->cc_op);
2293 s->cc_op = CC_OP_DYNAMIC;
2294 }
2295 gen_goto_tb(s, tb_num, eip);
2296 s->is_jmp = 3;
2297 } else {
2298 gen_jmp_im(eip);
2299 gen_eob(s);
2300 }
2301}
2302
2303static void gen_jmp(DisasContext *s, target_ulong eip)
2304{
2305 gen_jmp_tb(s, eip, 0);
2306}
2307
2308static void gen_movtl_T0_im(target_ulong val)
2309{
2310#ifdef TARGET_X86_64
2311 if ((int32_t)val == val) {
2312 gen_op_movl_T0_im(val);
2313 } else {
2314 gen_op_movq_T0_im64(val >> 32, val);
2315 }
2316#else
2317 gen_op_movl_T0_im(val);
2318#endif
2319}
2320
2321static void gen_movtl_T1_im(target_ulong val)
2322{
2323#ifdef TARGET_X86_64
2324 if ((int32_t)val == val) {
2325 gen_op_movl_T1_im(val);
2326 } else {
2327 gen_op_movq_T1_im64(val >> 32, val);
2328 }
2329#else
2330 gen_op_movl_T1_im(val);
2331#endif
2332}
2333
2334static void gen_add_A0_im(DisasContext *s, int val)
2335{
2336#ifdef TARGET_X86_64
2337 if (CODE64(s))
2338 gen_op_addq_A0_im(val);
2339 else
2340#endif
2341 gen_op_addl_A0_im(val);
2342}
2343
2344static GenOpFunc1 *gen_ldq_env_A0[3] = {
2345 gen_op_ldq_raw_env_A0,
2346#ifndef CONFIG_USER_ONLY
2347 gen_op_ldq_kernel_env_A0,
2348 gen_op_ldq_user_env_A0,
2349#endif
2350};
2351
2352static GenOpFunc1 *gen_stq_env_A0[3] = {
2353 gen_op_stq_raw_env_A0,
2354#ifndef CONFIG_USER_ONLY
2355 gen_op_stq_kernel_env_A0,
2356 gen_op_stq_user_env_A0,
2357#endif
2358};
2359
2360static GenOpFunc1 *gen_ldo_env_A0[3] = {
2361 gen_op_ldo_raw_env_A0,
2362#ifndef CONFIG_USER_ONLY
2363 gen_op_ldo_kernel_env_A0,
2364 gen_op_ldo_user_env_A0,
2365#endif
2366};
2367
2368static GenOpFunc1 *gen_sto_env_A0[3] = {
2369 gen_op_sto_raw_env_A0,
2370#ifndef CONFIG_USER_ONLY
2371 gen_op_sto_kernel_env_A0,
2372 gen_op_sto_user_env_A0,
2373#endif
2374};
2375
2376#define SSE_SPECIAL ((GenOpFunc2 *)1)
2377
2378#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2379#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2380 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2381
2382static GenOpFunc2 *sse_op_table1[256][4] = {
2383 /* pure SSE operations */
2384 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2385 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2386 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2387 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2388 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2389 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2390 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2391 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2392
2393 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2394 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2395 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2396 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2397 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2398 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2399 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2400 [0x2f] = { gen_op_comiss, gen_op_comisd },
2401 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2402 [0x51] = SSE_FOP(sqrt),
2403 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2404 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2405 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2406 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2407 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2408 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2409 [0x58] = SSE_FOP(add),
2410 [0x59] = SSE_FOP(mul),
2411 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2412 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2413 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2414 [0x5c] = SSE_FOP(sub),
2415 [0x5d] = SSE_FOP(min),
2416 [0x5e] = SSE_FOP(div),
2417 [0x5f] = SSE_FOP(max),
2418
2419 [0xc2] = SSE_FOP(cmpeq),
2420 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2421
2422 /* MMX ops and their SSE extensions */
2423 [0x60] = MMX_OP2(punpcklbw),
2424 [0x61] = MMX_OP2(punpcklwd),
2425 [0x62] = MMX_OP2(punpckldq),
2426 [0x63] = MMX_OP2(packsswb),
2427 [0x64] = MMX_OP2(pcmpgtb),
2428 [0x65] = MMX_OP2(pcmpgtw),
2429 [0x66] = MMX_OP2(pcmpgtl),
2430 [0x67] = MMX_OP2(packuswb),
2431 [0x68] = MMX_OP2(punpckhbw),
2432 [0x69] = MMX_OP2(punpckhwd),
2433 [0x6a] = MMX_OP2(punpckhdq),
2434 [0x6b] = MMX_OP2(packssdw),
2435 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2436 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2437 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2438 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2439 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2440 (GenOpFunc2 *)gen_op_pshufd_xmm,
2441 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2442 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2443 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2444 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2445 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2446 [0x74] = MMX_OP2(pcmpeqb),
2447 [0x75] = MMX_OP2(pcmpeqw),
2448 [0x76] = MMX_OP2(pcmpeql),
2449 [0x77] = { SSE_SPECIAL }, /* emms */
2450 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2451 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2452 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2453 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2454 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2455 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2456 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2457 [0xd1] = MMX_OP2(psrlw),
2458 [0xd2] = MMX_OP2(psrld),
2459 [0xd3] = MMX_OP2(psrlq),
2460 [0xd4] = MMX_OP2(paddq),
2461 [0xd5] = MMX_OP2(pmullw),
2462 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2463 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2464 [0xd8] = MMX_OP2(psubusb),
2465 [0xd9] = MMX_OP2(psubusw),
2466 [0xda] = MMX_OP2(pminub),
2467 [0xdb] = MMX_OP2(pand),
2468 [0xdc] = MMX_OP2(paddusb),
2469 [0xdd] = MMX_OP2(paddusw),
2470 [0xde] = MMX_OP2(pmaxub),
2471 [0xdf] = MMX_OP2(pandn),
2472 [0xe0] = MMX_OP2(pavgb),
2473 [0xe1] = MMX_OP2(psraw),
2474 [0xe2] = MMX_OP2(psrad),
2475 [0xe3] = MMX_OP2(pavgw),
2476 [0xe4] = MMX_OP2(pmulhuw),
2477 [0xe5] = MMX_OP2(pmulhw),
2478 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2479 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2480 [0xe8] = MMX_OP2(psubsb),
2481 [0xe9] = MMX_OP2(psubsw),
2482 [0xea] = MMX_OP2(pminsw),
2483 [0xeb] = MMX_OP2(por),
2484 [0xec] = MMX_OP2(paddsb),
2485 [0xed] = MMX_OP2(paddsw),
2486 [0xee] = MMX_OP2(pmaxsw),
2487 [0xef] = MMX_OP2(pxor),
2488 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2489 [0xf1] = MMX_OP2(psllw),
2490 [0xf2] = MMX_OP2(pslld),
2491 [0xf3] = MMX_OP2(psllq),
2492 [0xf4] = MMX_OP2(pmuludq),
2493 [0xf5] = MMX_OP2(pmaddwd),
2494 [0xf6] = MMX_OP2(psadbw),
2495 [0xf7] = MMX_OP2(maskmov),
2496 [0xf8] = MMX_OP2(psubb),
2497 [0xf9] = MMX_OP2(psubw),
2498 [0xfa] = MMX_OP2(psubl),
2499 [0xfb] = MMX_OP2(psubq),
2500 [0xfc] = MMX_OP2(paddb),
2501 [0xfd] = MMX_OP2(paddw),
2502 [0xfe] = MMX_OP2(paddl),
2503};
2504
2505static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2506 [0 + 2] = MMX_OP2(psrlw),
2507 [0 + 4] = MMX_OP2(psraw),
2508 [0 + 6] = MMX_OP2(psllw),
2509 [8 + 2] = MMX_OP2(psrld),
2510 [8 + 4] = MMX_OP2(psrad),
2511 [8 + 6] = MMX_OP2(pslld),
2512 [16 + 2] = MMX_OP2(psrlq),
2513 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2514 [16 + 6] = MMX_OP2(psllq),
2515 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2516};
2517
2518static GenOpFunc1 *sse_op_table3[4 * 3] = {
2519 gen_op_cvtsi2ss,
2520 gen_op_cvtsi2sd,
2521 X86_64_ONLY(gen_op_cvtsq2ss),
2522 X86_64_ONLY(gen_op_cvtsq2sd),
2523
2524 gen_op_cvttss2si,
2525 gen_op_cvttsd2si,
2526 X86_64_ONLY(gen_op_cvttss2sq),
2527 X86_64_ONLY(gen_op_cvttsd2sq),
2528
2529 gen_op_cvtss2si,
2530 gen_op_cvtsd2si,
2531 X86_64_ONLY(gen_op_cvtss2sq),
2532 X86_64_ONLY(gen_op_cvtsd2sq),
2533};
2534
2535static GenOpFunc2 *sse_op_table4[8][4] = {
2536 SSE_FOP(cmpeq),
2537 SSE_FOP(cmplt),
2538 SSE_FOP(cmple),
2539 SSE_FOP(cmpunord),
2540 SSE_FOP(cmpneq),
2541 SSE_FOP(cmpnlt),
2542 SSE_FOP(cmpnle),
2543 SSE_FOP(cmpord),
2544};
2545
2546static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2547{
2548 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2549 int modrm, mod, rm, reg, reg_addr, offset_addr;
2550 GenOpFunc2 *sse_op2;
2551 GenOpFunc3 *sse_op3;
2552
2553 b &= 0xff;
2554 if (s->prefix & PREFIX_DATA)
2555 b1 = 1;
2556 else if (s->prefix & PREFIX_REPZ)
2557 b1 = 2;
2558 else if (s->prefix & PREFIX_REPNZ)
2559 b1 = 3;
2560 else
2561 b1 = 0;
2562 sse_op2 = sse_op_table1[b][b1];
2563 if (!sse_op2)
2564 goto illegal_op;
2565 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2566 is_xmm = 1;
2567 } else {
2568 if (b1 == 0) {
2569 /* MMX case */
2570 is_xmm = 0;
2571 } else {
2572 is_xmm = 1;
2573 }
2574 }
2575 /* simple MMX/SSE operation */
2576 if (s->flags & HF_TS_MASK) {
2577 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2578 return;
2579 }
2580 if (s->flags & HF_EM_MASK) {
2581 illegal_op:
2582 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2583 return;
2584 }
2585 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2586 goto illegal_op;
2587 if (b == 0x77) {
2588 /* emms */
2589 gen_op_emms();
2590 return;
2591 }
2592 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2593 the static cpu state) */
2594 if (!is_xmm) {
2595 gen_op_enter_mmx();
2596 }
2597
2598 modrm = ldub_code(s->pc++);
2599 reg = ((modrm >> 3) & 7);
2600 if (is_xmm)
2601 reg |= rex_r;
2602 mod = (modrm >> 6) & 3;
2603 if (sse_op2 == SSE_SPECIAL) {
2604 b |= (b1 << 8);
2605 switch(b) {
2606 case 0x0e7: /* movntq */
2607 if (mod == 3)
2608 goto illegal_op;
2609 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2610 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2611 break;
2612 case 0x1e7: /* movntdq */
2613 case 0x02b: /* movntps */
2614 case 0x12b: /* movntps */
2615 case 0x3f0: /* lddqu */
2616 if (mod == 3)
2617 goto illegal_op;
2618 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2619 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2620 break;
2621 case 0x6e: /* movd mm, ea */
2622 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2623 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2624 break;
2625 case 0x16e: /* movd xmm, ea */
2626 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2627 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2628 break;
2629 case 0x6f: /* movq mm, ea */
2630 if (mod != 3) {
2631 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2632 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2633 } else {
2634 rm = (modrm & 7);
2635 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2636 offsetof(CPUX86State,fpregs[rm].mmx));
2637 }
2638 break;
2639 case 0x010: /* movups */
2640 case 0x110: /* movupd */
2641 case 0x028: /* movaps */
2642 case 0x128: /* movapd */
2643 case 0x16f: /* movdqa xmm, ea */
2644 case 0x26f: /* movdqu xmm, ea */
2645 if (mod != 3) {
2646 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2647 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2648 } else {
2649 rm = (modrm & 7) | REX_B(s);
2650 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2651 offsetof(CPUX86State,xmm_regs[rm]));
2652 }
2653 break;
2654 case 0x210: /* movss xmm, ea */
2655 if (mod != 3) {
2656 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2657 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2658 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2659 gen_op_movl_T0_0();
2660 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2661 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2662 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2663 } else {
2664 rm = (modrm & 7) | REX_B(s);
2665 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2666 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2667 }
2668 break;
2669 case 0x310: /* movsd xmm, ea */
2670 if (mod != 3) {
2671 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2672 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2673 gen_op_movl_T0_0();
2674 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2675 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2676 } else {
2677 rm = (modrm & 7) | REX_B(s);
2678 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2679 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2680 }
2681 break;
2682 case 0x012: /* movlps */
2683 case 0x112: /* movlpd */
2684 if (mod != 3) {
2685 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2686 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2687 } else {
2688 /* movhlps */
2689 rm = (modrm & 7) | REX_B(s);
2690 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2691 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2692 }
2693 break;
2694 case 0x212: /* movsldup */
2695 if (mod != 3) {
2696 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2697 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2698 } else {
2699 rm = (modrm & 7) | REX_B(s);
2700 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2701 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2702 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2703 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2704 }
2705 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2706 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2707 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2708 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2709 break;
2710 case 0x312: /* movddup */
2711 if (mod != 3) {
2712 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2713 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2714 } else {
2715 rm = (modrm & 7) | REX_B(s);
2716 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2717 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2718 }
2719 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2720 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2721 break;
2722 case 0x016: /* movhps */
2723 case 0x116: /* movhpd */
2724 if (mod != 3) {
2725 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2726 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2727 } else {
2728 /* movlhps */
2729 rm = (modrm & 7) | REX_B(s);
2730 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2731 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2732 }
2733 break;
2734 case 0x216: /* movshdup */
2735 if (mod != 3) {
2736 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2737 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2738 } else {
2739 rm = (modrm & 7) | REX_B(s);
2740 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2741 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2742 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2743 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2744 }
2745 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2746 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2747 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2748 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2749 break;
2750 case 0x7e: /* movd ea, mm */
2751 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2752 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2753 break;
2754 case 0x17e: /* movd ea, xmm */
2755 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2756 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2757 break;
2758 case 0x27e: /* movq xmm, ea */
2759 if (mod != 3) {
2760 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2761 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2762 } else {
2763 rm = (modrm & 7) | REX_B(s);
2764 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2765 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2766 }
2767 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2768 break;
2769 case 0x7f: /* movq ea, mm */
2770 if (mod != 3) {
2771 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2772 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2773 } else {
2774 rm = (modrm & 7);
2775 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2776 offsetof(CPUX86State,fpregs[reg].mmx));
2777 }
2778 break;
2779 case 0x011: /* movups */
2780 case 0x111: /* movupd */
2781 case 0x029: /* movaps */
2782 case 0x129: /* movapd */
2783 case 0x17f: /* movdqa ea, xmm */
2784 case 0x27f: /* movdqu ea, xmm */
2785 if (mod != 3) {
2786 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2787 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2788 } else {
2789 rm = (modrm & 7) | REX_B(s);
2790 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2791 offsetof(CPUX86State,xmm_regs[reg]));
2792 }
2793 break;
2794 case 0x211: /* movss ea, xmm */
2795 if (mod != 3) {
2796 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2797 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2798 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2799 } else {
2800 rm = (modrm & 7) | REX_B(s);
2801 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2802 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2803 }
2804 break;
2805 case 0x311: /* movsd ea, xmm */
2806 if (mod != 3) {
2807 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2808 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2809 } else {
2810 rm = (modrm & 7) | REX_B(s);
2811 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2812 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2813 }
2814 break;
2815 case 0x013: /* movlps */
2816 case 0x113: /* movlpd */
2817 if (mod != 3) {
2818 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2819 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2820 } else {
2821 goto illegal_op;
2822 }
2823 break;
2824 case 0x017: /* movhps */
2825 case 0x117: /* movhpd */
2826 if (mod != 3) {
2827 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2828 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2829 } else {
2830 goto illegal_op;
2831 }
2832 break;
2833 case 0x71: /* shift mm, im */
2834 case 0x72:
2835 case 0x73:
2836 case 0x171: /* shift xmm, im */
2837 case 0x172:
2838 case 0x173:
2839 val = ldub_code(s->pc++);
2840 if (is_xmm) {
2841 gen_op_movl_T0_im(val);
2842 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2843 gen_op_movl_T0_0();
2844 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2845 op1_offset = offsetof(CPUX86State,xmm_t0);
2846 } else {
2847 gen_op_movl_T0_im(val);
2848 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2849 gen_op_movl_T0_0();
2850 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2851 op1_offset = offsetof(CPUX86State,mmx_t0);
2852 }
2853 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2854 if (!sse_op2)
2855 goto illegal_op;
2856 if (is_xmm) {
2857 rm = (modrm & 7) | REX_B(s);
2858 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2859 } else {
2860 rm = (modrm & 7);
2861 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2862 }
2863 sse_op2(op2_offset, op1_offset);
2864 break;
2865 case 0x050: /* movmskps */
2866 rm = (modrm & 7) | REX_B(s);
2867 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2868 gen_op_mov_reg_T0[OT_LONG][reg]();
2869 break;
2870 case 0x150: /* movmskpd */
2871 rm = (modrm & 7) | REX_B(s);
2872 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2873 gen_op_mov_reg_T0[OT_LONG][reg]();
2874 break;
2875 case 0x02a: /* cvtpi2ps */
2876 case 0x12a: /* cvtpi2pd */
2877 gen_op_enter_mmx();
2878 if (mod != 3) {
2879 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2880 op2_offset = offsetof(CPUX86State,mmx_t0);
2881 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2882 } else {
2883 rm = (modrm & 7);
2884 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2885 }
2886 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2887 switch(b >> 8) {
2888 case 0x0:
2889 gen_op_cvtpi2ps(op1_offset, op2_offset);
2890 break;
2891 default:
2892 case 0x1:
2893 gen_op_cvtpi2pd(op1_offset, op2_offset);
2894 break;
2895 }
2896 break;
2897 case 0x22a: /* cvtsi2ss */
2898 case 0x32a: /* cvtsi2sd */
2899 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2900 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2901 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2902 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2903 break;
2904 case 0x02c: /* cvttps2pi */
2905 case 0x12c: /* cvttpd2pi */
2906 case 0x02d: /* cvtps2pi */
2907 case 0x12d: /* cvtpd2pi */
2908 gen_op_enter_mmx();
2909 if (mod != 3) {
2910 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2911 op2_offset = offsetof(CPUX86State,xmm_t0);
2912 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2913 } else {
2914 rm = (modrm & 7) | REX_B(s);
2915 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2916 }
2917 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2918 switch(b) {
2919 case 0x02c:
2920 gen_op_cvttps2pi(op1_offset, op2_offset);
2921 break;
2922 case 0x12c:
2923 gen_op_cvttpd2pi(op1_offset, op2_offset);
2924 break;
2925 case 0x02d:
2926 gen_op_cvtps2pi(op1_offset, op2_offset);
2927 break;
2928 case 0x12d:
2929 gen_op_cvtpd2pi(op1_offset, op2_offset);
2930 break;
2931 }
2932 break;
2933 case 0x22c: /* cvttss2si */
2934 case 0x32c: /* cvttsd2si */
2935 case 0x22d: /* cvtss2si */
2936 case 0x32d: /* cvtsd2si */
2937 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2938 if (mod != 3) {
2939 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2940 if ((b >> 8) & 1) {
2941 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
2942 } else {
2943 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2944 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2945 }
2946 op2_offset = offsetof(CPUX86State,xmm_t0);
2947 } else {
2948 rm = (modrm & 7) | REX_B(s);
2949 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2950 }
2951 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
2952 (b & 1) * 4](op2_offset);
2953 gen_op_mov_reg_T0[ot][reg]();
2954 break;
2955 case 0xc4: /* pinsrw */
2956 case 0x1c4:
2957 s->rip_offset = 1;
2958 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2959 val = ldub_code(s->pc++);
2960 if (b1) {
2961 val &= 7;
2962 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2963 } else {
2964 val &= 3;
2965 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
2966 }
2967 break;
2968 case 0xc5: /* pextrw */
2969 case 0x1c5:
2970 if (mod != 3)
2971 goto illegal_op;
2972 val = ldub_code(s->pc++);
2973 if (b1) {
2974 val &= 7;
2975 rm = (modrm & 7) | REX_B(s);
2976 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
2977 } else {
2978 val &= 3;
2979 rm = (modrm & 7);
2980 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
2981 }
2982 reg = ((modrm >> 3) & 7) | rex_r;
2983 gen_op_mov_reg_T0[OT_LONG][reg]();
2984 break;
2985 case 0x1d6: /* movq ea, xmm */
2986 if (mod != 3) {
2987 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2988 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2989 } else {
2990 rm = (modrm & 7) | REX_B(s);
2991 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2992 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2993 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2994 }
2995 break;
2996 case 0x2d6: /* movq2dq */
2997 gen_op_enter_mmx();
2998 rm = (modrm & 7) | REX_B(s);
2999 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3000 offsetof(CPUX86State,fpregs[reg & 7].mmx));
3001 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3002 break;
3003 case 0x3d6: /* movdq2q */
3004 gen_op_enter_mmx();
3005 rm = (modrm & 7);
3006 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3007 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3008 break;
3009 case 0xd7: /* pmovmskb */
3010 case 0x1d7:
3011 if (mod != 3)
3012 goto illegal_op;
3013 if (b1) {
3014 rm = (modrm & 7) | REX_B(s);
3015 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3016 } else {
3017 rm = (modrm & 7);
3018 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3019 }
3020 reg = ((modrm >> 3) & 7) | rex_r;
3021 gen_op_mov_reg_T0[OT_LONG][reg]();
3022 break;
3023 default:
3024 goto illegal_op;
3025 }
3026 } else {
3027 /* generic MMX or SSE operation */
3028 switch(b) {
3029 case 0xf7:
3030 /* maskmov : we must prepare A0 */
3031 if (mod != 3)
3032 goto illegal_op;
3033#ifdef TARGET_X86_64
3034 if (s->aflag == 2) {
3035 gen_op_movq_A0_reg[R_EDI]();
3036 } else
3037#endif
3038 {
3039 gen_op_movl_A0_reg[R_EDI]();
3040 if (s->aflag == 0)
3041 gen_op_andl_A0_ffff();
3042 }
3043 gen_add_A0_ds_seg(s);
3044 break;
3045 case 0x70: /* pshufx insn */
3046 case 0xc6: /* pshufx insn */
3047 case 0xc2: /* compare insns */
3048 s->rip_offset = 1;
3049 break;
3050 default:
3051 break;
3052 }
3053 if (is_xmm) {
3054 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3055 if (mod != 3) {
3056 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3057 op2_offset = offsetof(CPUX86State,xmm_t0);
3058 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f) ||
3059 b == 0xc2)) {
3060 /* specific case for SSE single instructions */
3061 if (b1 == 2) {
3062 /* 32 bit access */
3063 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3064 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3065 } else {
3066 /* 64 bit access */
3067 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3068 }
3069 } else {
3070 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3071 }
3072 } else {
3073 rm = (modrm & 7) | REX_B(s);
3074 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3075 }
3076 } else {
3077 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3078 if (mod != 3) {
3079 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3080 op2_offset = offsetof(CPUX86State,mmx_t0);
3081 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3082 } else {
3083 rm = (modrm & 7);
3084 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3085 }
3086 }
3087 switch(b) {
3088 case 0x70: /* pshufx insn */
3089 case 0xc6: /* pshufx insn */
3090 val = ldub_code(s->pc++);
3091 sse_op3 = (GenOpFunc3 *)sse_op2;
3092 sse_op3(op1_offset, op2_offset, val);
3093 break;
3094 case 0xc2:
3095 /* compare insns */
3096 val = ldub_code(s->pc++);
3097 if (val >= 8)
3098 goto illegal_op;
3099 sse_op2 = sse_op_table4[val][b1];
3100 sse_op2(op1_offset, op2_offset);
3101 break;
3102 default:
3103 sse_op2(op1_offset, op2_offset);
3104 break;
3105 }
3106 if (b == 0x2e || b == 0x2f) {
3107 s->cc_op = CC_OP_EFLAGS;
3108 }
3109 }
3110}
3111
3112
3113/* convert one instruction. s->is_jmp is set if the translation must
3114 be stopped. Return the next pc value */
3115static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3116{
3117 int b, prefixes, aflag, dflag;
3118 int shift, ot;
3119 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3120 target_ulong next_eip, tval;
3121 int rex_w, rex_r;
3122
3123 s->pc = pc_start;
3124 prefixes = 0;
3125 aflag = s->code32;
3126 dflag = s->code32;
3127 s->override = -1;
3128 rex_w = -1;
3129 rex_r = 0;
3130#ifdef TARGET_X86_64
3131 s->rex_x = 0;
3132 s->rex_b = 0;
3133 x86_64_hregs = 0;
3134#endif
3135 s->rip_offset = 0; /* for relative ip address */
3136 next_byte:
3137 b = ldub_code(s->pc);
3138 s->pc++;
3139 /* check prefixes */
3140#ifdef TARGET_X86_64
3141 if (CODE64(s)) {
3142 switch (b) {
3143 case 0xf3:
3144 prefixes |= PREFIX_REPZ;
3145 goto next_byte;
3146 case 0xf2:
3147 prefixes |= PREFIX_REPNZ;
3148 goto next_byte;
3149 case 0xf0:
3150 prefixes |= PREFIX_LOCK;
3151 goto next_byte;
3152 case 0x2e:
3153 s->override = R_CS;
3154 goto next_byte;
3155 case 0x36:
3156 s->override = R_SS;
3157 goto next_byte;
3158 case 0x3e:
3159 s->override = R_DS;
3160 goto next_byte;
3161 case 0x26:
3162 s->override = R_ES;
3163 goto next_byte;
3164 case 0x64:
3165 s->override = R_FS;
3166 goto next_byte;
3167 case 0x65:
3168 s->override = R_GS;
3169 goto next_byte;
3170 case 0x66:
3171 prefixes |= PREFIX_DATA;
3172 goto next_byte;
3173 case 0x67:
3174 prefixes |= PREFIX_ADR;
3175 goto next_byte;
3176 case 0x40 ... 0x4f:
3177 /* REX prefix */
3178 rex_w = (b >> 3) & 1;
3179 rex_r = (b & 0x4) << 1;
3180 s->rex_x = (b & 0x2) << 2;
3181 REX_B(s) = (b & 0x1) << 3;
3182 x86_64_hregs = 1; /* select uniform byte register addressing */
3183 goto next_byte;
3184 }
3185 if (rex_w == 1) {
3186 /* 0x66 is ignored if rex.w is set */
3187 dflag = 2;
3188 } else {
3189 if (prefixes & PREFIX_DATA)
3190 dflag ^= 1;
3191 }
3192 if (!(prefixes & PREFIX_ADR))
3193 aflag = 2;
3194 } else
3195#endif
3196 {
3197 switch (b) {
3198 case 0xf3:
3199 prefixes |= PREFIX_REPZ;
3200 goto next_byte;
3201 case 0xf2:
3202 prefixes |= PREFIX_REPNZ;
3203 goto next_byte;
3204 case 0xf0:
3205 prefixes |= PREFIX_LOCK;
3206 goto next_byte;
3207 case 0x2e:
3208 s->override = R_CS;
3209 goto next_byte;
3210 case 0x36:
3211 s->override = R_SS;
3212 goto next_byte;
3213 case 0x3e:
3214 s->override = R_DS;
3215 goto next_byte;
3216 case 0x26:
3217 s->override = R_ES;
3218 goto next_byte;
3219 case 0x64:
3220 s->override = R_FS;
3221 goto next_byte;
3222 case 0x65:
3223 s->override = R_GS;
3224 goto next_byte;
3225 case 0x66:
3226 prefixes |= PREFIX_DATA;
3227 goto next_byte;
3228 case 0x67:
3229 prefixes |= PREFIX_ADR;
3230 goto next_byte;
3231 }
3232 if (prefixes & PREFIX_DATA)
3233 dflag ^= 1;
3234 if (prefixes & PREFIX_ADR)
3235 aflag ^= 1;
3236 }
3237
3238 s->prefix = prefixes;
3239 s->aflag = aflag;
3240 s->dflag = dflag;
3241
3242 /* lock generation */
3243 if (prefixes & PREFIX_LOCK)
3244 gen_op_lock();
3245
3246 /* now check op code */
3247 reswitch:
3248 switch(b) {
3249 case 0x0f:
3250 /**************************/
3251 /* extended op code */
3252 b = ldub_code(s->pc++) | 0x100;
3253 goto reswitch;
3254
3255 /**************************/
3256 /* arith & logic */
3257 case 0x00 ... 0x05:
3258 case 0x08 ... 0x0d:
3259 case 0x10 ... 0x15:
3260 case 0x18 ... 0x1d:
3261 case 0x20 ... 0x25:
3262 case 0x28 ... 0x2d:
3263 case 0x30 ... 0x35:
3264 case 0x38 ... 0x3d:
3265 {
3266 int op, f, val;
3267 op = (b >> 3) & 7;
3268 f = (b >> 1) & 3;
3269
3270 if ((b & 1) == 0)
3271 ot = OT_BYTE;
3272 else
3273 ot = dflag + OT_WORD;
3274
3275 switch(f) {
3276 case 0: /* OP Ev, Gv */
3277 modrm = ldub_code(s->pc++);
3278 reg = ((modrm >> 3) & 7) | rex_r;
3279 mod = (modrm >> 6) & 3;
3280 rm = (modrm & 7) | REX_B(s);
3281 if (mod != 3) {
3282 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3283 opreg = OR_TMP0;
3284 } else if (op == OP_XORL && rm == reg) {
3285 xor_zero:
3286 /* xor reg, reg optimisation */
3287 gen_op_movl_T0_0();
3288 s->cc_op = CC_OP_LOGICB + ot;
3289 gen_op_mov_reg_T0[ot][reg]();
3290 gen_op_update1_cc();
3291 break;
3292 } else {
3293 opreg = rm;
3294 }
3295 gen_op_mov_TN_reg[ot][1][reg]();
3296 gen_op(s, op, ot, opreg);
3297 break;
3298 case 1: /* OP Gv, Ev */
3299 modrm = ldub_code(s->pc++);
3300 mod = (modrm >> 6) & 3;
3301 reg = ((modrm >> 3) & 7) | rex_r;
3302 rm = (modrm & 7) | REX_B(s);
3303 if (mod != 3) {
3304 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3305 gen_op_ld_T1_A0[ot + s->mem_index]();
3306 } else if (op == OP_XORL && rm == reg) {
3307 goto xor_zero;
3308 } else {
3309 gen_op_mov_TN_reg[ot][1][rm]();
3310 }
3311 gen_op(s, op, ot, reg);
3312 break;
3313 case 2: /* OP A, Iv */
3314 val = insn_get(s, ot);
3315 gen_op_movl_T1_im(val);
3316 gen_op(s, op, ot, OR_EAX);
3317 break;
3318 }
3319 }
3320 break;
3321
3322 case 0x80: /* GRP1 */
3323 case 0x81:
3324 case 0x82:
3325 case 0x83:
3326 {
3327 int val;
3328
3329 if ((b & 1) == 0)
3330 ot = OT_BYTE;
3331 else
3332 ot = dflag + OT_WORD;
3333
3334 modrm = ldub_code(s->pc++);
3335 mod = (modrm >> 6) & 3;
3336 rm = (modrm & 7) | REX_B(s);
3337 op = (modrm >> 3) & 7;
3338
3339 if (mod != 3) {
3340 if (b == 0x83)
3341 s->rip_offset = 1;
3342 else
3343 s->rip_offset = insn_const_size(ot);
3344 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3345 opreg = OR_TMP0;
3346 } else {
3347 opreg = rm;
3348 }
3349
3350 switch(b) {
3351 default:
3352 case 0x80:
3353 case 0x81:
3354 case 0x82:
3355 val = insn_get(s, ot);
3356 break;
3357 case 0x83:
3358 val = (int8_t)insn_get(s, OT_BYTE);
3359 break;
3360 }
3361 gen_op_movl_T1_im(val);
3362 gen_op(s, op, ot, opreg);
3363 }
3364 break;
3365
3366 /**************************/
3367 /* inc, dec, and other misc arith */
3368 case 0x40 ... 0x47: /* inc Gv */
3369 ot = dflag ? OT_LONG : OT_WORD;
3370 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3371 break;
3372 case 0x48 ... 0x4f: /* dec Gv */
3373 ot = dflag ? OT_LONG : OT_WORD;
3374 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3375 break;
3376 case 0xf6: /* GRP3 */
3377 case 0xf7:
3378 if ((b & 1) == 0)
3379 ot = OT_BYTE;
3380 else
3381 ot = dflag + OT_WORD;
3382
3383 modrm = ldub_code(s->pc++);
3384 mod = (modrm >> 6) & 3;
3385 rm = (modrm & 7) | REX_B(s);
3386 op = (modrm >> 3) & 7;
3387 if (mod != 3) {
3388 if (op == 0)
3389 s->rip_offset = insn_const_size(ot);
3390 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3391 gen_op_ld_T0_A0[ot + s->mem_index]();
3392 } else {
3393 gen_op_mov_TN_reg[ot][0][rm]();
3394 }
3395
3396 switch(op) {
3397 case 0: /* test */
3398 val = insn_get(s, ot);
3399 gen_op_movl_T1_im(val);
3400 gen_op_testl_T0_T1_cc();
3401 s->cc_op = CC_OP_LOGICB + ot;
3402 break;
3403 case 2: /* not */
3404 gen_op_notl_T0();
3405 if (mod != 3) {
3406 gen_op_st_T0_A0[ot + s->mem_index]();
3407 } else {
3408 gen_op_mov_reg_T0[ot][rm]();
3409 }
3410 break;
3411 case 3: /* neg */
3412 gen_op_negl_T0();
3413 if (mod != 3) {
3414 gen_op_st_T0_A0[ot + s->mem_index]();
3415 } else {
3416 gen_op_mov_reg_T0[ot][rm]();
3417 }
3418 gen_op_update_neg_cc();
3419 s->cc_op = CC_OP_SUBB + ot;
3420 break;
3421 case 4: /* mul */
3422 switch(ot) {
3423 case OT_BYTE:
3424 gen_op_mulb_AL_T0();
3425 s->cc_op = CC_OP_MULB;
3426 break;
3427 case OT_WORD:
3428 gen_op_mulw_AX_T0();
3429 s->cc_op = CC_OP_MULW;
3430 break;
3431 default:
3432 case OT_LONG:
3433 gen_op_mull_EAX_T0();
3434 s->cc_op = CC_OP_MULL;
3435 break;
3436#ifdef TARGET_X86_64
3437 case OT_QUAD:
3438 gen_op_mulq_EAX_T0();
3439 s->cc_op = CC_OP_MULQ;
3440 break;
3441#endif
3442 }
3443 break;
3444 case 5: /* imul */
3445 switch(ot) {
3446 case OT_BYTE:
3447 gen_op_imulb_AL_T0();
3448 s->cc_op = CC_OP_MULB;
3449 break;
3450 case OT_WORD:
3451 gen_op_imulw_AX_T0();
3452 s->cc_op = CC_OP_MULW;
3453 break;
3454 default:
3455 case OT_LONG:
3456 gen_op_imull_EAX_T0();
3457 s->cc_op = CC_OP_MULL;
3458 break;
3459#ifdef TARGET_X86_64
3460 case OT_QUAD:
3461 gen_op_imulq_EAX_T0();
3462 s->cc_op = CC_OP_MULQ;
3463 break;
3464#endif
3465 }
3466 break;
3467 case 6: /* div */
3468 switch(ot) {
3469 case OT_BYTE:
3470 gen_jmp_im(pc_start - s->cs_base);
3471 gen_op_divb_AL_T0();
3472 break;
3473 case OT_WORD:
3474 gen_jmp_im(pc_start - s->cs_base);
3475 gen_op_divw_AX_T0();
3476 break;
3477 default:
3478 case OT_LONG:
3479 gen_jmp_im(pc_start - s->cs_base);
3480 gen_op_divl_EAX_T0();
3481 break;
3482#ifdef TARGET_X86_64
3483 case OT_QUAD:
3484 gen_jmp_im(pc_start - s->cs_base);
3485 gen_op_divq_EAX_T0();
3486 break;
3487#endif
3488 }
3489 break;
3490 case 7: /* idiv */
3491 switch(ot) {
3492 case OT_BYTE:
3493 gen_jmp_im(pc_start - s->cs_base);
3494 gen_op_idivb_AL_T0();
3495 break;
3496 case OT_WORD:
3497 gen_jmp_im(pc_start - s->cs_base);
3498 gen_op_idivw_AX_T0();
3499 break;
3500 default:
3501 case OT_LONG:
3502 gen_jmp_im(pc_start - s->cs_base);
3503 gen_op_idivl_EAX_T0();
3504 break;
3505#ifdef TARGET_X86_64
3506 case OT_QUAD:
3507 gen_jmp_im(pc_start - s->cs_base);
3508 gen_op_idivq_EAX_T0();
3509 break;
3510#endif
3511 }
3512 break;
3513 default:
3514 goto illegal_op;
3515 }
3516 break;
3517
3518 case 0xfe: /* GRP4 */
3519 case 0xff: /* GRP5 */
3520 if ((b & 1) == 0)
3521 ot = OT_BYTE;
3522 else
3523 ot = dflag + OT_WORD;
3524
3525 modrm = ldub_code(s->pc++);
3526 mod = (modrm >> 6) & 3;
3527 rm = (modrm & 7) | REX_B(s);
3528 op = (modrm >> 3) & 7;
3529 if (op >= 2 && b == 0xfe) {
3530 goto illegal_op;
3531 }
3532 if (CODE64(s)) {
3533 if (op == 2 || op == 4) {
3534 /* operand size for jumps is 64 bit */
3535 ot = OT_QUAD;
3536 } else if (op == 3 || op == 5) {
3537 /* for call calls, the operand is 16 or 32 bit, even
3538 in long mode */
3539 ot = dflag ? OT_LONG : OT_WORD;
3540 } else if (op == 6) {
3541 /* default push size is 64 bit */
3542 ot = dflag ? OT_QUAD : OT_WORD;
3543 }
3544 }
3545 if (mod != 3) {
3546 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3547 if (op >= 2 && op != 3 && op != 5)
3548 gen_op_ld_T0_A0[ot + s->mem_index]();
3549 } else {
3550 gen_op_mov_TN_reg[ot][0][rm]();
3551 }
3552
3553 switch(op) {
3554 case 0: /* inc Ev */
3555 if (mod != 3)
3556 opreg = OR_TMP0;
3557 else
3558 opreg = rm;
3559 gen_inc(s, ot, opreg, 1);
3560 break;
3561 case 1: /* dec Ev */
3562 if (mod != 3)
3563 opreg = OR_TMP0;
3564 else
3565 opreg = rm;
3566 gen_inc(s, ot, opreg, -1);
3567 break;
3568 case 2: /* call Ev */
3569 /* XXX: optimize if memory (no 'and' is necessary) */
3570 if (s->dflag == 0)
3571 gen_op_andl_T0_ffff();
3572 next_eip = s->pc - s->cs_base;
3573 gen_movtl_T1_im(next_eip);
3574 gen_push_T1(s);
3575 gen_op_jmp_T0();
3576 gen_eob(s);
3577 break;
3578 case 3: /* lcall Ev */
3579 gen_op_ld_T1_A0[ot + s->mem_index]();
3580 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3581 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3582 do_lcall:
3583 if (s->pe && !s->vm86) {
3584 if (s->cc_op != CC_OP_DYNAMIC)
3585 gen_op_set_cc_op(s->cc_op);
3586 gen_jmp_im(pc_start - s->cs_base);
3587 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3588 } else {
3589 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3590 }
3591 gen_eob(s);
3592 break;
3593 case 4: /* jmp Ev */
3594 if (s->dflag == 0)
3595 gen_op_andl_T0_ffff();
3596 gen_op_jmp_T0();
3597 gen_eob(s);
3598 break;
3599 case 5: /* ljmp Ev */
3600 gen_op_ld_T1_A0[ot + s->mem_index]();
3601 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3602 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3603 do_ljmp:
3604 if (s->pe && !s->vm86) {
3605 if (s->cc_op != CC_OP_DYNAMIC)
3606 gen_op_set_cc_op(s->cc_op);
3607 gen_jmp_im(pc_start - s->cs_base);
3608 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3609 } else {
3610 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3611 gen_op_movl_T0_T1();
3612 gen_op_jmp_T0();
3613 }
3614 gen_eob(s);
3615 break;
3616 case 6: /* push Ev */
3617 gen_push_T0(s);
3618 break;
3619 default:
3620 goto illegal_op;
3621 }
3622 break;
3623
3624 case 0x84: /* test Ev, Gv */
3625 case 0x85:
3626 if ((b & 1) == 0)
3627 ot = OT_BYTE;
3628 else
3629 ot = dflag + OT_WORD;
3630
3631 modrm = ldub_code(s->pc++);
3632 mod = (modrm >> 6) & 3;
3633 rm = (modrm & 7) | REX_B(s);
3634 reg = ((modrm >> 3) & 7) | rex_r;
3635
3636 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3637 gen_op_mov_TN_reg[ot][1][reg]();
3638 gen_op_testl_T0_T1_cc();
3639 s->cc_op = CC_OP_LOGICB + ot;
3640 break;
3641
3642 case 0xa8: /* test eAX, Iv */
3643 case 0xa9:
3644 if ((b & 1) == 0)
3645 ot = OT_BYTE;
3646 else
3647 ot = dflag + OT_WORD;
3648 val = insn_get(s, ot);
3649
3650 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3651 gen_op_movl_T1_im(val);
3652 gen_op_testl_T0_T1_cc();
3653 s->cc_op = CC_OP_LOGICB + ot;
3654 break;
3655
3656 case 0x98: /* CWDE/CBW */
3657#ifdef TARGET_X86_64
3658 if (dflag == 2) {
3659 gen_op_movslq_RAX_EAX();
3660 } else
3661#endif
3662 if (dflag == 1)
3663 gen_op_movswl_EAX_AX();
3664 else
3665 gen_op_movsbw_AX_AL();
3666 break;
3667 case 0x99: /* CDQ/CWD */
3668#ifdef TARGET_X86_64
3669 if (dflag == 2) {
3670 gen_op_movsqo_RDX_RAX();
3671 } else
3672#endif
3673 if (dflag == 1)
3674 gen_op_movslq_EDX_EAX();
3675 else
3676 gen_op_movswl_DX_AX();
3677 break;
3678 case 0x1af: /* imul Gv, Ev */
3679 case 0x69: /* imul Gv, Ev, I */
3680 case 0x6b:
3681 ot = dflag + OT_WORD;
3682 modrm = ldub_code(s->pc++);
3683 reg = ((modrm >> 3) & 7) | rex_r;
3684 if (b == 0x69)
3685 s->rip_offset = insn_const_size(ot);
3686 else if (b == 0x6b)
3687 s->rip_offset = 1;
3688 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3689 if (b == 0x69) {
3690 val = insn_get(s, ot);
3691 gen_op_movl_T1_im(val);
3692 } else if (b == 0x6b) {
3693 val = (int8_t)insn_get(s, OT_BYTE);
3694 gen_op_movl_T1_im(val);
3695 } else {
3696 gen_op_mov_TN_reg[ot][1][reg]();
3697 }
3698
3699#ifdef TARGET_X86_64
3700 if (ot == OT_QUAD) {
3701 gen_op_imulq_T0_T1();
3702 } else
3703#endif
3704 if (ot == OT_LONG) {
3705 gen_op_imull_T0_T1();
3706 } else {
3707 gen_op_imulw_T0_T1();
3708 }
3709 gen_op_mov_reg_T0[ot][reg]();
3710 s->cc_op = CC_OP_MULB + ot;
3711 break;
3712 case 0x1c0:
3713 case 0x1c1: /* xadd Ev, Gv */
3714 if ((b & 1) == 0)
3715 ot = OT_BYTE;
3716 else
3717 ot = dflag + OT_WORD;
3718 modrm = ldub_code(s->pc++);
3719 reg = ((modrm >> 3) & 7) | rex_r;
3720 mod = (modrm >> 6) & 3;
3721 if (mod == 3) {
3722 rm = (modrm & 7) | REX_B(s);
3723 gen_op_mov_TN_reg[ot][0][reg]();
3724 gen_op_mov_TN_reg[ot][1][rm]();
3725 gen_op_addl_T0_T1();
3726 gen_op_mov_reg_T1[ot][reg]();
3727 gen_op_mov_reg_T0[ot][rm]();
3728 } else {
3729 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3730 gen_op_mov_TN_reg[ot][0][reg]();
3731 gen_op_ld_T1_A0[ot + s->mem_index]();
3732 gen_op_addl_T0_T1();
3733 gen_op_st_T0_A0[ot + s->mem_index]();
3734 gen_op_mov_reg_T1[ot][reg]();
3735 }
3736 gen_op_update2_cc();
3737 s->cc_op = CC_OP_ADDB + ot;
3738 break;
3739 case 0x1b0:
3740 case 0x1b1: /* cmpxchg Ev, Gv */
3741 if ((b & 1) == 0)
3742 ot = OT_BYTE;
3743 else
3744 ot = dflag + OT_WORD;
3745 modrm = ldub_code(s->pc++);
3746 reg = ((modrm >> 3) & 7) | rex_r;
3747 mod = (modrm >> 6) & 3;
3748 gen_op_mov_TN_reg[ot][1][reg]();
3749 if (mod == 3) {
3750 rm = (modrm & 7) | REX_B(s);
3751 gen_op_mov_TN_reg[ot][0][rm]();
3752 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3753 gen_op_mov_reg_T0[ot][rm]();
3754 } else {
3755 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3756 gen_op_ld_T0_A0[ot + s->mem_index]();
3757 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3758 }
3759 s->cc_op = CC_OP_SUBB + ot;
3760 break;
3761 case 0x1c7: /* cmpxchg8b */
3762 modrm = ldub_code(s->pc++);
3763 mod = (modrm >> 6) & 3;
3764 if (mod == 3)
3765 goto illegal_op;
3766 if (s->cc_op != CC_OP_DYNAMIC)
3767 gen_op_set_cc_op(s->cc_op);
3768 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3769 gen_op_cmpxchg8b();
3770 s->cc_op = CC_OP_EFLAGS;
3771 break;
3772
3773 /**************************/
3774 /* push/pop */
3775 case 0x50 ... 0x57: /* push */
3776 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3777 gen_push_T0(s);
3778 break;
3779 case 0x58 ... 0x5f: /* pop */
3780 if (CODE64(s)) {
3781 ot = dflag ? OT_QUAD : OT_WORD;
3782 } else {
3783 ot = dflag + OT_WORD;
3784 }
3785 gen_pop_T0(s);
3786 /* NOTE: order is important for pop %sp */
3787 gen_pop_update(s);
3788 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3789 break;
3790 case 0x60: /* pusha */
3791 if (CODE64(s))
3792 goto illegal_op;
3793 gen_pusha(s);
3794 break;
3795 case 0x61: /* popa */
3796 if (CODE64(s))
3797 goto illegal_op;
3798 gen_popa(s);
3799 break;
3800 case 0x68: /* push Iv */
3801 case 0x6a:
3802 if (CODE64(s)) {
3803 ot = dflag ? OT_QUAD : OT_WORD;
3804 } else {
3805 ot = dflag + OT_WORD;
3806 }
3807 if (b == 0x68)
3808 val = insn_get(s, ot);
3809 else
3810 val = (int8_t)insn_get(s, OT_BYTE);
3811 gen_op_movl_T0_im(val);
3812 gen_push_T0(s);
3813 break;
3814 case 0x8f: /* pop Ev */
3815 if (CODE64(s)) {
3816 ot = dflag ? OT_QUAD : OT_WORD;
3817 } else {
3818 ot = dflag + OT_WORD;
3819 }
3820 modrm = ldub_code(s->pc++);
3821 mod = (modrm >> 6) & 3;
3822 gen_pop_T0(s);
3823 if (mod == 3) {
3824 /* NOTE: order is important for pop %sp */
3825 gen_pop_update(s);
3826 rm = (modrm & 7) | REX_B(s);
3827 gen_op_mov_reg_T0[ot][rm]();
3828 } else {
3829 /* NOTE: order is important too for MMU exceptions */
3830 s->popl_esp_hack = 1 << ot;
3831 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3832 s->popl_esp_hack = 0;
3833 gen_pop_update(s);
3834 }
3835 break;
3836 case 0xc8: /* enter */
3837 {
3838 int level;
3839 val = lduw_code(s->pc);
3840 s->pc += 2;
3841 level = ldub_code(s->pc++);
3842 gen_enter(s, val, level);
3843 }
3844 break;
3845 case 0xc9: /* leave */
3846 /* XXX: exception not precise (ESP is updated before potential exception) */
3847 if (CODE64(s)) {
3848 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3849 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3850 } else if (s->ss32) {
3851 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3852 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3853 } else {
3854 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3855 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3856 }
3857 gen_pop_T0(s);
3858 if (CODE64(s)) {
3859 ot = dflag ? OT_QUAD : OT_WORD;
3860 } else {
3861 ot = dflag + OT_WORD;
3862 }
3863 gen_op_mov_reg_T0[ot][R_EBP]();
3864 gen_pop_update(s);
3865 break;
3866 case 0x06: /* push es */
3867 case 0x0e: /* push cs */
3868 case 0x16: /* push ss */
3869 case 0x1e: /* push ds */
3870 if (CODE64(s))
3871 goto illegal_op;
3872 gen_op_movl_T0_seg(b >> 3);
3873 gen_push_T0(s);
3874 break;
3875 case 0x1a0: /* push fs */
3876 case 0x1a8: /* push gs */
3877 gen_op_movl_T0_seg((b >> 3) & 7);
3878 gen_push_T0(s);
3879 break;
3880 case 0x07: /* pop es */
3881 case 0x17: /* pop ss */
3882 case 0x1f: /* pop ds */
3883 if (CODE64(s))
3884 goto illegal_op;
3885 reg = b >> 3;
3886 gen_pop_T0(s);
3887 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3888 gen_pop_update(s);
3889 if (reg == R_SS) {
3890 /* if reg == SS, inhibit interrupts/trace. */
3891 /* If several instructions disable interrupts, only the
3892 _first_ does it */
3893 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3894 gen_op_set_inhibit_irq();
3895 s->tf = 0;
3896 }
3897 if (s->is_jmp) {
3898 gen_jmp_im(s->pc - s->cs_base);
3899 gen_eob(s);
3900 }
3901 break;
3902 case 0x1a1: /* pop fs */
3903 case 0x1a9: /* pop gs */
3904 gen_pop_T0(s);
3905 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3906 gen_pop_update(s);
3907 if (s->is_jmp) {
3908 gen_jmp_im(s->pc - s->cs_base);
3909 gen_eob(s);
3910 }
3911 break;
3912
3913 /**************************/
3914 /* mov */
3915 case 0x88:
3916 case 0x89: /* mov Gv, Ev */
3917 if ((b & 1) == 0)
3918 ot = OT_BYTE;
3919 else
3920 ot = dflag + OT_WORD;
3921 modrm = ldub_code(s->pc++);
3922 reg = ((modrm >> 3) & 7) | rex_r;
3923
3924 /* generate a generic store */
3925 gen_ldst_modrm(s, modrm, ot, reg, 1);
3926 break;
3927 case 0xc6:
3928 case 0xc7: /* mov Ev, Iv */
3929 if ((b & 1) == 0)
3930 ot = OT_BYTE;
3931 else
3932 ot = dflag + OT_WORD;
3933 modrm = ldub_code(s->pc++);
3934 mod = (modrm >> 6) & 3;
3935 if (mod != 3) {
3936 s->rip_offset = insn_const_size(ot);
3937 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3938 }
3939 val = insn_get(s, ot);
3940 gen_op_movl_T0_im(val);
3941 if (mod != 3)
3942 gen_op_st_T0_A0[ot + s->mem_index]();
3943 else
3944 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3945 break;
3946 case 0x8a:
3947 case 0x8b: /* mov Ev, Gv */
3948 if ((b & 1) == 0)
3949 ot = OT_BYTE;
3950 else
3951 ot = OT_WORD + dflag;
3952 modrm = ldub_code(s->pc++);
3953 reg = ((modrm >> 3) & 7) | rex_r;
3954
3955 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3956 gen_op_mov_reg_T0[ot][reg]();
3957 break;
3958 case 0x8e: /* mov seg, Gv */
3959 modrm = ldub_code(s->pc++);
3960 reg = (modrm >> 3) & 7;
3961 if (reg >= 6 || reg == R_CS)
3962 goto illegal_op;
3963 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3964 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3965 if (reg == R_SS) {
3966 /* if reg == SS, inhibit interrupts/trace */
3967 /* If several instructions disable interrupts, only the
3968 _first_ does it */
3969 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3970 gen_op_set_inhibit_irq();
3971 s->tf = 0;
3972 }
3973 if (s->is_jmp) {
3974 gen_jmp_im(s->pc - s->cs_base);
3975 gen_eob(s);
3976 }
3977 break;
3978 case 0x8c: /* mov Gv, seg */
3979 modrm = ldub_code(s->pc++);
3980 reg = (modrm >> 3) & 7;
3981 mod = (modrm >> 6) & 3;
3982 if (reg >= 6)
3983 goto illegal_op;
3984 gen_op_movl_T0_seg(reg);
3985 if (mod == 3)
3986 ot = OT_WORD + dflag;
3987 else
3988 ot = OT_WORD;
3989 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3990 break;
3991
3992 case 0x1b6: /* movzbS Gv, Eb */
3993 case 0x1b7: /* movzwS Gv, Eb */
3994 case 0x1be: /* movsbS Gv, Eb */
3995 case 0x1bf: /* movswS Gv, Eb */
3996 {
3997 int d_ot;
3998 /* d_ot is the size of destination */
3999 d_ot = dflag + OT_WORD;
4000 /* ot is the size of source */
4001 ot = (b & 1) + OT_BYTE;
4002 modrm = ldub_code(s->pc++);
4003 reg = ((modrm >> 3) & 7) | rex_r;
4004 mod = (modrm >> 6) & 3;
4005 rm = (modrm & 7) | REX_B(s);
4006
4007 if (mod == 3) {
4008 gen_op_mov_TN_reg[ot][0][rm]();
4009 switch(ot | (b & 8)) {
4010 case OT_BYTE:
4011 gen_op_movzbl_T0_T0();
4012 break;
4013 case OT_BYTE | 8:
4014 gen_op_movsbl_T0_T0();
4015 break;
4016 case OT_WORD:
4017 gen_op_movzwl_T0_T0();
4018 break;
4019 default:
4020 case OT_WORD | 8:
4021 gen_op_movswl_T0_T0();
4022 break;
4023 }
4024 gen_op_mov_reg_T0[d_ot][reg]();
4025 } else {
4026 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4027 if (b & 8) {
4028 gen_op_lds_T0_A0[ot + s->mem_index]();
4029 } else {
4030 gen_op_ldu_T0_A0[ot + s->mem_index]();
4031 }
4032 gen_op_mov_reg_T0[d_ot][reg]();
4033 }
4034 }
4035 break;
4036
4037 case 0x8d: /* lea */
4038 ot = dflag + OT_WORD;
4039 modrm = ldub_code(s->pc++);
4040 mod = (modrm >> 6) & 3;
4041 if (mod == 3)
4042 goto illegal_op;
4043 reg = ((modrm >> 3) & 7) | rex_r;
4044 /* we must ensure that no segment is added */
4045 s->override = -1;
4046 val = s->addseg;
4047 s->addseg = 0;
4048 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4049 s->addseg = val;
4050 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4051 break;
4052
4053 case 0xa0: /* mov EAX, Ov */
4054 case 0xa1:
4055 case 0xa2: /* mov Ov, EAX */
4056 case 0xa3:
4057 {
4058 target_ulong offset_addr;
4059
4060 if ((b & 1) == 0)
4061 ot = OT_BYTE;
4062 else
4063 ot = dflag + OT_WORD;
4064#ifdef TARGET_X86_64
4065 if (s->aflag == 2) {
4066 offset_addr = ldq_code(s->pc);
4067 s->pc += 8;
4068 if (offset_addr == (int32_t)offset_addr)
4069 gen_op_movq_A0_im(offset_addr);
4070 else
4071 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4072 } else
4073#endif
4074 {
4075 if (s->aflag) {
4076 offset_addr = insn_get(s, OT_LONG);
4077 } else {
4078 offset_addr = insn_get(s, OT_WORD);
4079 }
4080 gen_op_movl_A0_im(offset_addr);
4081 }
4082 gen_add_A0_ds_seg(s);
4083 if ((b & 2) == 0) {
4084 gen_op_ld_T0_A0[ot + s->mem_index]();
4085 gen_op_mov_reg_T0[ot][R_EAX]();
4086 } else {
4087 gen_op_mov_TN_reg[ot][0][R_EAX]();
4088 gen_op_st_T0_A0[ot + s->mem_index]();
4089 }
4090 }
4091 break;
4092 case 0xd7: /* xlat */
4093#ifdef TARGET_X86_64
4094 if (s->aflag == 2) {
4095 gen_op_movq_A0_reg[R_EBX]();
4096 gen_op_addq_A0_AL();
4097 } else
4098#endif
4099 {
4100 gen_op_movl_A0_reg[R_EBX]();
4101 gen_op_addl_A0_AL();
4102 if (s->aflag == 0)
4103 gen_op_andl_A0_ffff();
4104 }
4105 gen_add_A0_ds_seg(s);
4106 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4107 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4108 break;
4109 case 0xb0 ... 0xb7: /* mov R, Ib */
4110 val = insn_get(s, OT_BYTE);
4111 gen_op_movl_T0_im(val);
4112 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4113 break;
4114 case 0xb8 ... 0xbf: /* mov R, Iv */
4115#ifdef TARGET_X86_64
4116 if (dflag == 2) {
4117 uint64_t tmp;
4118 /* 64 bit case */
4119 tmp = ldq_code(s->pc);
4120 s->pc += 8;
4121 reg = (b & 7) | REX_B(s);
4122 gen_movtl_T0_im(tmp);
4123 gen_op_mov_reg_T0[OT_QUAD][reg]();
4124 } else
4125#endif
4126 {
4127 ot = dflag ? OT_LONG : OT_WORD;
4128 val = insn_get(s, ot);
4129 reg = (b & 7) | REX_B(s);
4130 gen_op_movl_T0_im(val);
4131 gen_op_mov_reg_T0[ot][reg]();
4132 }
4133 break;
4134
4135 case 0x91 ... 0x97: /* xchg R, EAX */
4136 ot = dflag + OT_WORD;
4137 reg = (b & 7) | REX_B(s);
4138 rm = R_EAX;
4139 goto do_xchg_reg;
4140 case 0x86:
4141 case 0x87: /* xchg Ev, Gv */
4142 if ((b & 1) == 0)
4143 ot = OT_BYTE;
4144 else
4145 ot = dflag + OT_WORD;
4146 modrm = ldub_code(s->pc++);
4147 reg = ((modrm >> 3) & 7) | rex_r;
4148 mod = (modrm >> 6) & 3;
4149 if (mod == 3) {
4150 rm = (modrm & 7) | REX_B(s);
4151 do_xchg_reg:
4152 gen_op_mov_TN_reg[ot][0][reg]();
4153 gen_op_mov_TN_reg[ot][1][rm]();
4154 gen_op_mov_reg_T0[ot][rm]();
4155 gen_op_mov_reg_T1[ot][reg]();
4156 } else {
4157 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4158 gen_op_mov_TN_reg[ot][0][reg]();
4159 /* for xchg, lock is implicit */
4160 if (!(prefixes & PREFIX_LOCK))
4161 gen_op_lock();
4162 gen_op_ld_T1_A0[ot + s->mem_index]();
4163 gen_op_st_T0_A0[ot + s->mem_index]();
4164 if (!(prefixes & PREFIX_LOCK))
4165 gen_op_unlock();
4166 gen_op_mov_reg_T1[ot][reg]();
4167 }
4168 break;
4169 case 0xc4: /* les Gv */
4170 if (CODE64(s))
4171 goto illegal_op;
4172 op = R_ES;
4173 goto do_lxx;
4174 case 0xc5: /* lds Gv */
4175 if (CODE64(s))
4176 goto illegal_op;
4177 op = R_DS;
4178 goto do_lxx;
4179 case 0x1b2: /* lss Gv */
4180 op = R_SS;
4181 goto do_lxx;
4182 case 0x1b4: /* lfs Gv */
4183 op = R_FS;
4184 goto do_lxx;
4185 case 0x1b5: /* lgs Gv */
4186 op = R_GS;
4187 do_lxx:
4188 ot = dflag ? OT_LONG : OT_WORD;
4189 modrm = ldub_code(s->pc++);
4190 reg = ((modrm >> 3) & 7) | rex_r;
4191 mod = (modrm >> 6) & 3;
4192 if (mod == 3)
4193 goto illegal_op;
4194 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4195 gen_op_ld_T1_A0[ot + s->mem_index]();
4196 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4197 /* load the segment first to handle exceptions properly */
4198 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4199 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4200 /* then put the data */
4201 gen_op_mov_reg_T1[ot][reg]();
4202 if (s->is_jmp) {
4203 gen_jmp_im(s->pc - s->cs_base);
4204 gen_eob(s);
4205 }
4206 break;
4207
4208 /************************/
4209 /* shifts */
4210 case 0xc0:
4211 case 0xc1:
4212 /* shift Ev,Ib */
4213 shift = 2;
4214 grp2:
4215 {
4216 if ((b & 1) == 0)
4217 ot = OT_BYTE;
4218 else
4219 ot = dflag + OT_WORD;
4220
4221 modrm = ldub_code(s->pc++);
4222 mod = (modrm >> 6) & 3;
4223 op = (modrm >> 3) & 7;
4224
4225 if (mod != 3) {
4226 if (shift == 2) {
4227 s->rip_offset = 1;
4228 }
4229 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4230 opreg = OR_TMP0;
4231 } else {
4232 opreg = (modrm & 7) | REX_B(s);
4233 }
4234
4235 /* simpler op */
4236 if (shift == 0) {
4237 gen_shift(s, op, ot, opreg, OR_ECX);
4238 } else {
4239 if (shift == 2) {
4240 shift = ldub_code(s->pc++);
4241 }
4242 gen_shifti(s, op, ot, opreg, shift);
4243 }
4244 }
4245 break;
4246 case 0xd0:
4247 case 0xd1:
4248 /* shift Ev,1 */
4249 shift = 1;
4250 goto grp2;
4251 case 0xd2:
4252 case 0xd3:
4253 /* shift Ev,cl */
4254 shift = 0;
4255 goto grp2;
4256
4257 case 0x1a4: /* shld imm */
4258 op = 0;
4259 shift = 1;
4260 goto do_shiftd;
4261 case 0x1a5: /* shld cl */
4262 op = 0;
4263 shift = 0;
4264 goto do_shiftd;
4265 case 0x1ac: /* shrd imm */
4266 op = 1;
4267 shift = 1;
4268 goto do_shiftd;
4269 case 0x1ad: /* shrd cl */
4270 op = 1;
4271 shift = 0;
4272 do_shiftd:
4273 ot = dflag + OT_WORD;
4274 modrm = ldub_code(s->pc++);
4275 mod = (modrm >> 6) & 3;
4276 rm = (modrm & 7) | REX_B(s);
4277 reg = ((modrm >> 3) & 7) | rex_r;
4278
4279 if (mod != 3) {
4280 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4281 gen_op_ld_T0_A0[ot + s->mem_index]();
4282 } else {
4283 gen_op_mov_TN_reg[ot][0][rm]();
4284 }
4285 gen_op_mov_TN_reg[ot][1][reg]();
4286
4287 if (shift) {
4288 val = ldub_code(s->pc++);
4289 if (ot == OT_QUAD)
4290 val &= 0x3f;
4291 else
4292 val &= 0x1f;
4293 if (val) {
4294 if (mod == 3)
4295 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4296 else
4297 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4298 if (op == 0 && ot != OT_WORD)
4299 s->cc_op = CC_OP_SHLB + ot;
4300 else
4301 s->cc_op = CC_OP_SARB + ot;
4302 }
4303 } else {
4304 if (s->cc_op != CC_OP_DYNAMIC)
4305 gen_op_set_cc_op(s->cc_op);
4306 if (mod == 3)
4307 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4308 else
4309 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4310 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4311 }
4312 if (mod == 3) {
4313 gen_op_mov_reg_T0[ot][rm]();
4314 }
4315 break;
4316
4317 /************************/
4318 /* floats */
4319 case 0xd8 ... 0xdf:
4320 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4321 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4322 /* XXX: what to do if illegal op ? */
4323 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4324 break;
4325 }
4326 modrm = ldub_code(s->pc++);
4327 mod = (modrm >> 6) & 3;
4328 rm = modrm & 7;
4329 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4330 if (mod != 3) {
4331 /* memory op */
4332 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4333 switch(op) {
4334 case 0x00 ... 0x07: /* fxxxs */
4335 case 0x10 ... 0x17: /* fixxxl */
4336 case 0x20 ... 0x27: /* fxxxl */
4337 case 0x30 ... 0x37: /* fixxx */
4338 {
4339 int op1;
4340 op1 = op & 7;
4341
4342 switch(op >> 4) {
4343 case 0:
4344 gen_op_flds_FT0_A0();
4345 break;
4346 case 1:
4347 gen_op_fildl_FT0_A0();
4348 break;
4349 case 2:
4350 gen_op_fldl_FT0_A0();
4351 break;
4352 case 3:
4353 default:
4354 gen_op_fild_FT0_A0();
4355 break;
4356 }
4357
4358 gen_op_fp_arith_ST0_FT0[op1]();
4359 if (op1 == 3) {
4360 /* fcomp needs pop */
4361 gen_op_fpop();
4362 }
4363 }
4364 break;
4365 case 0x08: /* flds */
4366 case 0x0a: /* fsts */
4367 case 0x0b: /* fstps */
4368 case 0x18: /* fildl */
4369 case 0x1a: /* fistl */
4370 case 0x1b: /* fistpl */
4371 case 0x28: /* fldl */
4372 case 0x2a: /* fstl */
4373 case 0x2b: /* fstpl */
4374 case 0x38: /* filds */
4375 case 0x3a: /* fists */
4376 case 0x3b: /* fistps */
4377
4378 switch(op & 7) {
4379 case 0:
4380 switch(op >> 4) {
4381 case 0:
4382 gen_op_flds_ST0_A0();
4383 break;
4384 case 1:
4385 gen_op_fildl_ST0_A0();
4386 break;
4387 case 2:
4388 gen_op_fldl_ST0_A0();
4389 break;
4390 case 3:
4391 default:
4392 gen_op_fild_ST0_A0();
4393 break;
4394 }
4395 break;
4396 default:
4397 switch(op >> 4) {
4398 case 0:
4399 gen_op_fsts_ST0_A0();
4400 break;
4401 case 1:
4402 gen_op_fistl_ST0_A0();
4403 break;
4404 case 2:
4405 gen_op_fstl_ST0_A0();
4406 break;
4407 case 3:
4408 default:
4409 gen_op_fist_ST0_A0();
4410 break;
4411 }
4412 if ((op & 7) == 3)
4413 gen_op_fpop();
4414 break;
4415 }
4416 break;
4417 case 0x0c: /* fldenv mem */
4418 gen_op_fldenv_A0(s->dflag);
4419 break;
4420 case 0x0d: /* fldcw mem */
4421 gen_op_fldcw_A0();
4422 break;
4423 case 0x0e: /* fnstenv mem */
4424 gen_op_fnstenv_A0(s->dflag);
4425 break;
4426 case 0x0f: /* fnstcw mem */
4427 gen_op_fnstcw_A0();
4428 break;
4429 case 0x1d: /* fldt mem */
4430 gen_op_fldt_ST0_A0();
4431 break;
4432 case 0x1f: /* fstpt mem */
4433 gen_op_fstt_ST0_A0();
4434 gen_op_fpop();
4435 break;
4436 case 0x2c: /* frstor mem */
4437 gen_op_frstor_A0(s->dflag);
4438 break;
4439 case 0x2e: /* fnsave mem */
4440 gen_op_fnsave_A0(s->dflag);
4441 break;
4442 case 0x2f: /* fnstsw mem */
4443 gen_op_fnstsw_A0();
4444 break;
4445 case 0x3c: /* fbld */
4446 gen_op_fbld_ST0_A0();
4447 break;
4448 case 0x3e: /* fbstp */
4449 gen_op_fbst_ST0_A0();
4450 gen_op_fpop();
4451 break;
4452 case 0x3d: /* fildll */
4453 gen_op_fildll_ST0_A0();
4454 break;
4455 case 0x3f: /* fistpll */
4456 gen_op_fistll_ST0_A0();
4457 gen_op_fpop();
4458 break;
4459 default:
4460 goto illegal_op;
4461 }
4462 } else {
4463 /* register float ops */
4464 opreg = rm;
4465
4466 switch(op) {
4467 case 0x08: /* fld sti */
4468 gen_op_fpush();
4469 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4470 break;
4471 case 0x09: /* fxchg sti */
4472 case 0x29: /* fxchg4 sti, undocumented op */
4473 case 0x39: /* fxchg7 sti, undocumented op */
4474 gen_op_fxchg_ST0_STN(opreg);
4475 break;
4476 case 0x0a: /* grp d9/2 */
4477 switch(rm) {
4478 case 0: /* fnop */
4479 /* check exceptions (FreeBSD FPU probe) */
4480 if (s->cc_op != CC_OP_DYNAMIC)
4481 gen_op_set_cc_op(s->cc_op);
4482 gen_jmp_im(pc_start - s->cs_base);
4483 gen_op_fwait();
4484 break;
4485 default:
4486 goto illegal_op;
4487 }
4488 break;
4489 case 0x0c: /* grp d9/4 */
4490 switch(rm) {
4491 case 0: /* fchs */
4492 gen_op_fchs_ST0();
4493 break;
4494 case 1: /* fabs */
4495 gen_op_fabs_ST0();
4496 break;
4497 case 4: /* ftst */
4498 gen_op_fldz_FT0();
4499 gen_op_fcom_ST0_FT0();
4500 break;
4501 case 5: /* fxam */
4502 gen_op_fxam_ST0();
4503 break;
4504 default:
4505 goto illegal_op;
4506 }
4507 break;
4508 case 0x0d: /* grp d9/5 */
4509 {
4510 switch(rm) {
4511 case 0:
4512 gen_op_fpush();
4513 gen_op_fld1_ST0();
4514 break;
4515 case 1:
4516 gen_op_fpush();
4517 gen_op_fldl2t_ST0();
4518 break;
4519 case 2:
4520 gen_op_fpush();
4521 gen_op_fldl2e_ST0();
4522 break;
4523 case 3:
4524 gen_op_fpush();
4525 gen_op_fldpi_ST0();
4526 break;
4527 case 4:
4528 gen_op_fpush();
4529 gen_op_fldlg2_ST0();
4530 break;
4531 case 5:
4532 gen_op_fpush();
4533 gen_op_fldln2_ST0();
4534 break;
4535 case 6:
4536 gen_op_fpush();
4537 gen_op_fldz_ST0();
4538 break;
4539 default:
4540 goto illegal_op;
4541 }
4542 }
4543 break;
4544 case 0x0e: /* grp d9/6 */
4545 switch(rm) {
4546 case 0: /* f2xm1 */
4547 gen_op_f2xm1();
4548 break;
4549 case 1: /* fyl2x */
4550 gen_op_fyl2x();
4551 break;
4552 case 2: /* fptan */
4553 gen_op_fptan();
4554 break;
4555 case 3: /* fpatan */
4556 gen_op_fpatan();
4557 break;
4558 case 4: /* fxtract */
4559 gen_op_fxtract();
4560 break;
4561 case 5: /* fprem1 */
4562 gen_op_fprem1();
4563 break;
4564 case 6: /* fdecstp */
4565 gen_op_fdecstp();
4566 break;
4567 default:
4568 case 7: /* fincstp */
4569 gen_op_fincstp();
4570 break;
4571 }
4572 break;
4573 case 0x0f: /* grp d9/7 */
4574 switch(rm) {
4575 case 0: /* fprem */
4576 gen_op_fprem();
4577 break;
4578 case 1: /* fyl2xp1 */
4579 gen_op_fyl2xp1();
4580 break;
4581 case 2: /* fsqrt */
4582 gen_op_fsqrt();
4583 break;
4584 case 3: /* fsincos */
4585 gen_op_fsincos();
4586 break;
4587 case 5: /* fscale */
4588 gen_op_fscale();
4589 break;
4590 case 4: /* frndint */
4591 gen_op_frndint();
4592 break;
4593 case 6: /* fsin */
4594 gen_op_fsin();
4595 break;
4596 default:
4597 case 7: /* fcos */
4598 gen_op_fcos();
4599 break;
4600 }
4601 break;
4602 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4603 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4604 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4605 {
4606 int op1;
4607
4608 op1 = op & 7;
4609 if (op >= 0x20) {
4610 gen_op_fp_arith_STN_ST0[op1](opreg);
4611 if (op >= 0x30)
4612 gen_op_fpop();
4613 } else {
4614 gen_op_fmov_FT0_STN(opreg);
4615 gen_op_fp_arith_ST0_FT0[op1]();
4616 }
4617 }
4618 break;
4619 case 0x02: /* fcom */
4620 case 0x22: /* fcom2, undocumented op */
4621 gen_op_fmov_FT0_STN(opreg);
4622 gen_op_fcom_ST0_FT0();
4623 break;
4624 case 0x03: /* fcomp */
4625 case 0x23: /* fcomp3, undocumented op */
4626 case 0x32: /* fcomp5, undocumented op */
4627 gen_op_fmov_FT0_STN(opreg);
4628 gen_op_fcom_ST0_FT0();
4629 gen_op_fpop();
4630 break;
4631 case 0x15: /* da/5 */
4632 switch(rm) {
4633 case 1: /* fucompp */
4634 gen_op_fmov_FT0_STN(1);
4635 gen_op_fucom_ST0_FT0();
4636 gen_op_fpop();
4637 gen_op_fpop();
4638 break;
4639 default:
4640 goto illegal_op;
4641 }
4642 break;
4643 case 0x1c:
4644 switch(rm) {
4645 case 0: /* feni (287 only, just do nop here) */
4646 break;
4647 case 1: /* fdisi (287 only, just do nop here) */
4648 break;
4649 case 2: /* fclex */
4650 gen_op_fclex();
4651 break;
4652 case 3: /* fninit */
4653 gen_op_fninit();
4654 break;
4655 case 4: /* fsetpm (287 only, just do nop here) */
4656 break;
4657 default:
4658 goto illegal_op;
4659 }
4660 break;
4661 case 0x1d: /* fucomi */
4662 if (s->cc_op != CC_OP_DYNAMIC)
4663 gen_op_set_cc_op(s->cc_op);
4664 gen_op_fmov_FT0_STN(opreg);
4665 gen_op_fucomi_ST0_FT0();
4666 s->cc_op = CC_OP_EFLAGS;
4667 break;
4668 case 0x1e: /* fcomi */
4669 if (s->cc_op != CC_OP_DYNAMIC)
4670 gen_op_set_cc_op(s->cc_op);
4671 gen_op_fmov_FT0_STN(opreg);
4672 gen_op_fcomi_ST0_FT0();
4673 s->cc_op = CC_OP_EFLAGS;
4674 break;
4675 case 0x28: /* ffree sti */
4676 gen_op_ffree_STN(opreg);
4677 break;
4678 case 0x2a: /* fst sti */
4679 gen_op_fmov_STN_ST0(opreg);
4680 break;
4681 case 0x2b: /* fstp sti */
4682 case 0x0b: /* fstp1 sti, undocumented op */
4683 case 0x3a: /* fstp8 sti, undocumented op */
4684 case 0x3b: /* fstp9 sti, undocumented op */
4685 gen_op_fmov_STN_ST0(opreg);
4686 gen_op_fpop();
4687 break;
4688 case 0x2c: /* fucom st(i) */
4689 gen_op_fmov_FT0_STN(opreg);
4690 gen_op_fucom_ST0_FT0();
4691 break;
4692 case 0x2d: /* fucomp st(i) */
4693 gen_op_fmov_FT0_STN(opreg);
4694 gen_op_fucom_ST0_FT0();
4695 gen_op_fpop();
4696 break;
4697 case 0x33: /* de/3 */
4698 switch(rm) {
4699 case 1: /* fcompp */
4700 gen_op_fmov_FT0_STN(1);
4701 gen_op_fcom_ST0_FT0();
4702 gen_op_fpop();
4703 gen_op_fpop();
4704 break;
4705 default:
4706 goto illegal_op;
4707 }
4708 break;
4709 case 0x38: /* ffreep sti, undocumented op */
4710 gen_op_ffree_STN(opreg);
4711 gen_op_fpop();
4712 break;
4713 case 0x3c: /* df/4 */
4714 switch(rm) {
4715 case 0:
4716 gen_op_fnstsw_EAX();
4717 break;
4718 default:
4719 goto illegal_op;
4720 }
4721 break;
4722 case 0x3d: /* fucomip */
4723 if (s->cc_op != CC_OP_DYNAMIC)
4724 gen_op_set_cc_op(s->cc_op);
4725 gen_op_fmov_FT0_STN(opreg);
4726 gen_op_fucomi_ST0_FT0();
4727 gen_op_fpop();
4728 s->cc_op = CC_OP_EFLAGS;
4729 break;
4730 case 0x3e: /* fcomip */
4731 if (s->cc_op != CC_OP_DYNAMIC)
4732 gen_op_set_cc_op(s->cc_op);
4733 gen_op_fmov_FT0_STN(opreg);
4734 gen_op_fcomi_ST0_FT0();
4735 gen_op_fpop();
4736 s->cc_op = CC_OP_EFLAGS;
4737 break;
4738 case 0x10 ... 0x13: /* fcmovxx */
4739 case 0x18 ... 0x1b:
4740 {
4741 int op1;
4742 const static uint8_t fcmov_cc[8] = {
4743 (JCC_B << 1),
4744 (JCC_Z << 1),
4745 (JCC_BE << 1),
4746 (JCC_P << 1),
4747 };
4748 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4749 gen_setcc(s, op1);
4750 gen_op_fcmov_ST0_STN_T0(opreg);
4751 }
4752 break;
4753 default:
4754 goto illegal_op;
4755 }
4756 }
4757#ifdef USE_CODE_COPY
4758 s->tb->cflags |= CF_TB_FP_USED;
4759#endif
4760 break;
4761 /************************/
4762 /* string ops */
4763
4764 case 0xa4: /* movsS */
4765 case 0xa5:
4766 if ((b & 1) == 0)
4767 ot = OT_BYTE;
4768 else
4769 ot = dflag + OT_WORD;
4770
4771 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4772 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4773 } else {
4774 gen_movs(s, ot);
4775 }
4776 break;
4777
4778 case 0xaa: /* stosS */
4779 case 0xab:
4780 if ((b & 1) == 0)
4781 ot = OT_BYTE;
4782 else
4783 ot = dflag + OT_WORD;
4784
4785 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4786 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4787 } else {
4788 gen_stos(s, ot);
4789 }
4790 break;
4791 case 0xac: /* lodsS */
4792 case 0xad:
4793 if ((b & 1) == 0)
4794 ot = OT_BYTE;
4795 else
4796 ot = dflag + OT_WORD;
4797 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4798 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4799 } else {
4800 gen_lods(s, ot);
4801 }
4802 break;
4803 case 0xae: /* scasS */
4804 case 0xaf:
4805 if ((b & 1) == 0)
4806 ot = OT_BYTE;
4807 else
4808 ot = dflag + OT_WORD;
4809 if (prefixes & PREFIX_REPNZ) {
4810 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4811 } else if (prefixes & PREFIX_REPZ) {
4812 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4813 } else {
4814 gen_scas(s, ot);
4815 s->cc_op = CC_OP_SUBB + ot;
4816 }
4817 break;
4818
4819 case 0xa6: /* cmpsS */
4820 case 0xa7:
4821 if ((b & 1) == 0)
4822 ot = OT_BYTE;
4823 else
4824 ot = dflag + OT_WORD;
4825 if (prefixes & PREFIX_REPNZ) {
4826 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4827 } else if (prefixes & PREFIX_REPZ) {
4828 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4829 } else {
4830 gen_cmps(s, ot);
4831 s->cc_op = CC_OP_SUBB + ot;
4832 }
4833 break;
4834 case 0x6c: /* insS */
4835 case 0x6d:
4836 if ((b & 1) == 0)
4837 ot = OT_BYTE;
4838 else
4839 ot = dflag ? OT_LONG : OT_WORD;
4840 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4841 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4842 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4843 } else {
4844 gen_ins(s, ot);
4845 }
4846 break;
4847 case 0x6e: /* outsS */
4848 case 0x6f:
4849 if ((b & 1) == 0)
4850 ot = OT_BYTE;
4851 else
4852 ot = dflag ? OT_LONG : OT_WORD;
4853 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4854 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4855 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4856 } else {
4857 gen_outs(s, ot);
4858 }
4859 break;
4860
4861 /************************/
4862 /* port I/O */
4863 case 0xe4:
4864 case 0xe5:
4865 if ((b & 1) == 0)
4866 ot = OT_BYTE;
4867 else
4868 ot = dflag ? OT_LONG : OT_WORD;
4869 val = ldub_code(s->pc++);
4870 gen_op_movl_T0_im(val);
4871 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4872 gen_op_in[ot]();
4873 gen_op_mov_reg_T1[ot][R_EAX]();
4874 break;
4875 case 0xe6:
4876 case 0xe7:
4877 if ((b & 1) == 0)
4878 ot = OT_BYTE;
4879 else
4880 ot = dflag ? OT_LONG : OT_WORD;
4881 val = ldub_code(s->pc++);
4882 gen_op_movl_T0_im(val);
4883 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4884#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
4885 if (val == 0x80)
4886 break;
4887#endif /* VBOX */
4888 gen_op_mov_TN_reg[ot][1][R_EAX]();
4889 gen_op_out[ot]();
4890 break;
4891 case 0xec:
4892 case 0xed:
4893 if ((b & 1) == 0)
4894 ot = OT_BYTE;
4895 else
4896 ot = dflag ? OT_LONG : OT_WORD;
4897 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4898 gen_op_andl_T0_ffff();
4899 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4900 gen_op_in[ot]();
4901 gen_op_mov_reg_T1[ot][R_EAX]();
4902 break;
4903 case 0xee:
4904 case 0xef:
4905 if ((b & 1) == 0)
4906 ot = OT_BYTE;
4907 else
4908 ot = dflag ? OT_LONG : OT_WORD;
4909 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4910 gen_op_andl_T0_ffff();
4911 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4912 gen_op_mov_TN_reg[ot][1][R_EAX]();
4913 gen_op_out[ot]();
4914 break;
4915
4916 /************************/
4917 /* control */
4918 case 0xc2: /* ret im */
4919 val = ldsw_code(s->pc);
4920 s->pc += 2;
4921 gen_pop_T0(s);
4922 if (CODE64(s) && s->dflag)
4923 s->dflag = 2;
4924 gen_stack_update(s, val + (2 << s->dflag));
4925 if (s->dflag == 0)
4926 gen_op_andl_T0_ffff();
4927 gen_op_jmp_T0();
4928 gen_eob(s);
4929 break;
4930 case 0xc3: /* ret */
4931 gen_pop_T0(s);
4932 gen_pop_update(s);
4933 if (s->dflag == 0)
4934 gen_op_andl_T0_ffff();
4935 gen_op_jmp_T0();
4936 gen_eob(s);
4937 break;
4938 case 0xca: /* lret im */
4939 val = ldsw_code(s->pc);
4940 s->pc += 2;
4941 do_lret:
4942 if (s->pe && !s->vm86) {
4943 if (s->cc_op != CC_OP_DYNAMIC)
4944 gen_op_set_cc_op(s->cc_op);
4945 gen_jmp_im(pc_start - s->cs_base);
4946 gen_op_lret_protected(s->dflag, val);
4947 } else {
4948 gen_stack_A0(s);
4949 /* pop offset */
4950 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4951 if (s->dflag == 0)
4952 gen_op_andl_T0_ffff();
4953 /* NOTE: keeping EIP updated is not a problem in case of
4954 exception */
4955 gen_op_jmp_T0();
4956 /* pop selector */
4957 gen_op_addl_A0_im(2 << s->dflag);
4958 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4959 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4960 /* add stack offset */
4961 gen_stack_update(s, val + (4 << s->dflag));
4962 }
4963 gen_eob(s);
4964 break;
4965 case 0xcb: /* lret */
4966 val = 0;
4967 goto do_lret;
4968 case 0xcf: /* iret */
4969 if (!s->pe) {
4970 /* real mode */
4971 gen_op_iret_real(s->dflag);
4972 s->cc_op = CC_OP_EFLAGS;
4973 } else if (s->vm86) {
4974#ifdef VBOX
4975 if (s->iopl != 3 && (!s->vme || s->dflag)) {
4976#else
4977 if (s->iopl != 3) {
4978#endif
4979 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4980 } else {
4981 gen_op_iret_real(s->dflag);
4982 s->cc_op = CC_OP_EFLAGS;
4983 }
4984 } else {
4985 if (s->cc_op != CC_OP_DYNAMIC)
4986 gen_op_set_cc_op(s->cc_op);
4987 gen_jmp_im(pc_start - s->cs_base);
4988 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4989 s->cc_op = CC_OP_EFLAGS;
4990 }
4991 gen_eob(s);
4992 break;
4993 case 0xe8: /* call im */
4994 {
4995 if (dflag)
4996 tval = (int32_t)insn_get(s, OT_LONG);
4997 else
4998 tval = (int16_t)insn_get(s, OT_WORD);
4999 next_eip = s->pc - s->cs_base;
5000 tval += next_eip;
5001 if (s->dflag == 0)
5002 tval &= 0xffff;
5003 gen_movtl_T0_im(next_eip);
5004 gen_push_T0(s);
5005 gen_jmp(s, tval);
5006 }
5007 break;
5008 case 0x9a: /* lcall im */
5009 {
5010 unsigned int selector, offset;
5011
5012 if (CODE64(s))
5013 goto illegal_op;
5014 ot = dflag ? OT_LONG : OT_WORD;
5015 offset = insn_get(s, ot);
5016 selector = insn_get(s, OT_WORD);
5017
5018 gen_op_movl_T0_im(selector);
5019 gen_op_movl_T1_imu(offset);
5020 }
5021 goto do_lcall;
5022 case 0xe9: /* jmp im */
5023 if (dflag)
5024 tval = (int32_t)insn_get(s, OT_LONG);
5025 else
5026 tval = (int16_t)insn_get(s, OT_WORD);
5027 tval += s->pc - s->cs_base;
5028 if (s->dflag == 0)
5029 tval &= 0xffff;
5030 gen_jmp(s, tval);
5031 break;
5032 case 0xea: /* ljmp im */
5033 {
5034 unsigned int selector, offset;
5035
5036 if (CODE64(s))
5037 goto illegal_op;
5038 ot = dflag ? OT_LONG : OT_WORD;
5039 offset = insn_get(s, ot);
5040 selector = insn_get(s, OT_WORD);
5041
5042 gen_op_movl_T0_im(selector);
5043 gen_op_movl_T1_imu(offset);
5044 }
5045 goto do_ljmp;
5046 case 0xeb: /* jmp Jb */
5047 tval = (int8_t)insn_get(s, OT_BYTE);
5048 tval += s->pc - s->cs_base;
5049 if (s->dflag == 0)
5050 tval &= 0xffff;
5051 gen_jmp(s, tval);
5052 break;
5053 case 0x70 ... 0x7f: /* jcc Jb */
5054 tval = (int8_t)insn_get(s, OT_BYTE);
5055 goto do_jcc;
5056 case 0x180 ... 0x18f: /* jcc Jv */
5057 if (dflag) {
5058 tval = (int32_t)insn_get(s, OT_LONG);
5059 } else {
5060 tval = (int16_t)insn_get(s, OT_WORD);
5061 }
5062 do_jcc:
5063 next_eip = s->pc - s->cs_base;
5064 tval += next_eip;
5065 if (s->dflag == 0)
5066 tval &= 0xffff;
5067 gen_jcc(s, b, tval, next_eip);
5068 break;
5069
5070 case 0x190 ... 0x19f: /* setcc Gv */
5071 modrm = ldub_code(s->pc++);
5072 gen_setcc(s, b);
5073 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5074 break;
5075 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5076 ot = dflag + OT_WORD;
5077 modrm = ldub_code(s->pc++);
5078 reg = ((modrm >> 3) & 7) | rex_r;
5079 mod = (modrm >> 6) & 3;
5080 gen_setcc(s, b);
5081 if (mod != 3) {
5082 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5083 gen_op_ld_T1_A0[ot + s->mem_index]();
5084 } else {
5085 rm = (modrm & 7) | REX_B(s);
5086 gen_op_mov_TN_reg[ot][1][rm]();
5087 }
5088 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5089 break;
5090
5091 /************************/
5092 /* flags */
5093 case 0x9c: /* pushf */
5094#ifdef VBOX
5095 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5096#else
5097 if (s->vm86 && s->iopl != 3) {
5098#endif
5099 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5100 } else {
5101 if (s->cc_op != CC_OP_DYNAMIC)
5102 gen_op_set_cc_op(s->cc_op);
5103#ifdef VBOX
5104 if (s->vm86 && s->vme && s->iopl != 3)
5105 gen_op_movl_T0_eflags_vme();
5106 else
5107#endif
5108 gen_op_movl_T0_eflags();
5109 gen_push_T0(s);
5110 }
5111 break;
5112 case 0x9d: /* popf */
5113#ifdef VBOX
5114 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5115#else
5116 if (s->vm86 && s->iopl != 3) {
5117#endif
5118 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5119 } else {
5120 gen_pop_T0(s);
5121 if (s->cpl == 0) {
5122 if (s->dflag) {
5123 gen_op_movl_eflags_T0_cpl0();
5124 } else {
5125 gen_op_movw_eflags_T0_cpl0();
5126 }
5127 } else {
5128 if (s->cpl <= s->iopl) {
5129 if (s->dflag) {
5130 gen_op_movl_eflags_T0_io();
5131 } else {
5132 gen_op_movw_eflags_T0_io();
5133 }
5134 } else {
5135 if (s->dflag) {
5136 gen_op_movl_eflags_T0();
5137 } else {
5138#ifdef VBOX
5139 if (s->vm86 && s->vme)
5140 gen_op_movw_eflags_T0_vme();
5141 else
5142#endif
5143 gen_op_movw_eflags_T0();
5144 }
5145 }
5146 }
5147 gen_pop_update(s);
5148 s->cc_op = CC_OP_EFLAGS;
5149 /* abort translation because TF flag may change */
5150 gen_jmp_im(s->pc - s->cs_base);
5151 gen_eob(s);
5152 }
5153 break;
5154 case 0x9e: /* sahf */
5155 if (CODE64(s))
5156 goto illegal_op;
5157 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5158 if (s->cc_op != CC_OP_DYNAMIC)
5159 gen_op_set_cc_op(s->cc_op);
5160 gen_op_movb_eflags_T0();
5161 s->cc_op = CC_OP_EFLAGS;
5162 break;
5163 case 0x9f: /* lahf */
5164 if (CODE64(s))
5165 goto illegal_op;
5166 if (s->cc_op != CC_OP_DYNAMIC)
5167 gen_op_set_cc_op(s->cc_op);
5168 gen_op_movl_T0_eflags();
5169 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5170 break;
5171 case 0xf5: /* cmc */
5172 if (s->cc_op != CC_OP_DYNAMIC)
5173 gen_op_set_cc_op(s->cc_op);
5174 gen_op_cmc();
5175 s->cc_op = CC_OP_EFLAGS;
5176 break;
5177 case 0xf8: /* clc */
5178 if (s->cc_op != CC_OP_DYNAMIC)
5179 gen_op_set_cc_op(s->cc_op);
5180 gen_op_clc();
5181 s->cc_op = CC_OP_EFLAGS;
5182 break;
5183 case 0xf9: /* stc */
5184 if (s->cc_op != CC_OP_DYNAMIC)
5185 gen_op_set_cc_op(s->cc_op);
5186 gen_op_stc();
5187 s->cc_op = CC_OP_EFLAGS;
5188 break;
5189 case 0xfc: /* cld */
5190 gen_op_cld();
5191 break;
5192 case 0xfd: /* std */
5193 gen_op_std();
5194 break;
5195
5196 /************************/
5197 /* bit operations */
5198 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5199 ot = dflag + OT_WORD;
5200 modrm = ldub_code(s->pc++);
5201 op = (modrm >> 3) & 7;
5202 mod = (modrm >> 6) & 3;
5203 rm = (modrm & 7) | REX_B(s);
5204 if (mod != 3) {
5205 s->rip_offset = 1;
5206 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5207 gen_op_ld_T0_A0[ot + s->mem_index]();
5208 } else {
5209 gen_op_mov_TN_reg[ot][0][rm]();
5210 }
5211 /* load shift */
5212 val = ldub_code(s->pc++);
5213 gen_op_movl_T1_im(val);
5214 if (op < 4)
5215 goto illegal_op;
5216 op -= 4;
5217 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5218 s->cc_op = CC_OP_SARB + ot;
5219 if (op != 0) {
5220 if (mod != 3)
5221 gen_op_st_T0_A0[ot + s->mem_index]();
5222 else
5223 gen_op_mov_reg_T0[ot][rm]();
5224 gen_op_update_bt_cc();
5225 }
5226 break;
5227 case 0x1a3: /* bt Gv, Ev */
5228 op = 0;
5229 goto do_btx;
5230 case 0x1ab: /* bts */
5231 op = 1;
5232 goto do_btx;
5233 case 0x1b3: /* btr */
5234 op = 2;
5235 goto do_btx;
5236 case 0x1bb: /* btc */
5237 op = 3;
5238 do_btx:
5239 ot = dflag + OT_WORD;
5240 modrm = ldub_code(s->pc++);
5241 reg = ((modrm >> 3) & 7) | rex_r;
5242 mod = (modrm >> 6) & 3;
5243 rm = (modrm & 7) | REX_B(s);
5244 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5245 if (mod != 3) {
5246 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5247 /* specific case: we need to add a displacement */
5248 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5249 gen_op_ld_T0_A0[ot + s->mem_index]();
5250 } else {
5251 gen_op_mov_TN_reg[ot][0][rm]();
5252 }
5253 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5254 s->cc_op = CC_OP_SARB + ot;
5255 if (op != 0) {
5256 if (mod != 3)
5257 gen_op_st_T0_A0[ot + s->mem_index]();
5258 else
5259 gen_op_mov_reg_T0[ot][rm]();
5260 gen_op_update_bt_cc();
5261 }
5262 break;
5263 case 0x1bc: /* bsf */
5264 case 0x1bd: /* bsr */
5265 ot = dflag + OT_WORD;
5266 modrm = ldub_code(s->pc++);
5267 reg = ((modrm >> 3) & 7) | rex_r;
5268 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5269 /* NOTE: in order to handle the 0 case, we must load the
5270 result. It could be optimized with a generated jump */
5271 gen_op_mov_TN_reg[ot][1][reg]();
5272 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5273 gen_op_mov_reg_T1[ot][reg]();
5274 s->cc_op = CC_OP_LOGICB + ot;
5275 break;
5276 /************************/
5277 /* bcd */
5278 case 0x27: /* daa */
5279 if (CODE64(s))
5280 goto illegal_op;
5281 if (s->cc_op != CC_OP_DYNAMIC)
5282 gen_op_set_cc_op(s->cc_op);
5283 gen_op_daa();
5284 s->cc_op = CC_OP_EFLAGS;
5285 break;
5286 case 0x2f: /* das */
5287 if (CODE64(s))
5288 goto illegal_op;
5289 if (s->cc_op != CC_OP_DYNAMIC)
5290 gen_op_set_cc_op(s->cc_op);
5291 gen_op_das();
5292 s->cc_op = CC_OP_EFLAGS;
5293 break;
5294 case 0x37: /* aaa */
5295 if (CODE64(s))
5296 goto illegal_op;
5297 if (s->cc_op != CC_OP_DYNAMIC)
5298 gen_op_set_cc_op(s->cc_op);
5299 gen_op_aaa();
5300 s->cc_op = CC_OP_EFLAGS;
5301 break;
5302 case 0x3f: /* aas */
5303 if (CODE64(s))
5304 goto illegal_op;
5305 if (s->cc_op != CC_OP_DYNAMIC)
5306 gen_op_set_cc_op(s->cc_op);
5307 gen_op_aas();
5308 s->cc_op = CC_OP_EFLAGS;
5309 break;
5310 case 0xd4: /* aam */
5311 if (CODE64(s))
5312 goto illegal_op;
5313 val = ldub_code(s->pc++);
5314 gen_op_aam(val);
5315 s->cc_op = CC_OP_LOGICB;
5316 break;
5317 case 0xd5: /* aad */
5318 if (CODE64(s))
5319 goto illegal_op;
5320 val = ldub_code(s->pc++);
5321 gen_op_aad(val);
5322 s->cc_op = CC_OP_LOGICB;
5323 break;
5324 /************************/
5325 /* misc */
5326 case 0x90: /* nop */
5327 /* XXX: xchg + rex handling */
5328 /* XXX: correct lock test for all insn */
5329 if (prefixes & PREFIX_LOCK)
5330 goto illegal_op;
5331 break;
5332 case 0x9b: /* fwait */
5333 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5334 (HF_MP_MASK | HF_TS_MASK)) {
5335 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5336 } else {
5337 if (s->cc_op != CC_OP_DYNAMIC)
5338 gen_op_set_cc_op(s->cc_op);
5339 gen_jmp_im(pc_start - s->cs_base);
5340 gen_op_fwait();
5341 }
5342 break;
5343 case 0xcc: /* int3 */
5344#ifdef VBOX
5345 if (s->vm86 && s->iopl != 3 && !s->vme) {
5346 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5347 }
5348 else
5349#endif
5350 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5351 break;
5352 case 0xcd: /* int N */
5353 val = ldub_code(s->pc++);
5354#ifdef VBOX
5355 if (s->vm86 && s->iopl != 3 && !s->vme) {
5356#else
5357 if (s->vm86 && s->iopl != 3) {
5358#endif
5359 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5360 } else {
5361 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5362 }
5363 break;
5364 case 0xce: /* into */
5365 if (CODE64(s))
5366 goto illegal_op;
5367 if (s->cc_op != CC_OP_DYNAMIC)
5368 gen_op_set_cc_op(s->cc_op);
5369 gen_jmp_im(pc_start - s->cs_base);
5370 gen_op_into(s->pc - pc_start);
5371 break;
5372 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5373 gen_debug(s, pc_start - s->cs_base);
5374 break;
5375 case 0xfa: /* cli */
5376 if (!s->vm86) {
5377 if (s->cpl <= s->iopl) {
5378 gen_op_cli();
5379 } else {
5380 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5381 }
5382 } else {
5383 if (s->iopl == 3) {
5384 gen_op_cli();
5385#ifdef VBOX
5386 } else
5387 if (s->iopl != 3 && s->vme) {
5388 gen_op_cli_vme();
5389#endif
5390 } else {
5391 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5392 }
5393 }
5394 break;
5395 case 0xfb: /* sti */
5396 if (!s->vm86) {
5397 if (s->cpl <= s->iopl) {
5398 gen_sti:
5399 gen_op_sti();
5400 /* interruptions are enabled only the first insn after sti */
5401 /* If several instructions disable interrupts, only the
5402 _first_ does it */
5403 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5404 gen_op_set_inhibit_irq();
5405 /* give a chance to handle pending irqs */
5406 gen_jmp_im(s->pc - s->cs_base);
5407 gen_eob(s);
5408 } else {
5409 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5410 }
5411 } else {
5412 if (s->iopl == 3) {
5413 goto gen_sti;
5414#ifdef VBOX
5415 } else
5416 if (s->iopl != 3 && s->vme) {
5417 gen_op_sti_vme();
5418 /* give a chance to handle pending irqs */
5419 gen_jmp_im(s->pc - s->cs_base);
5420 gen_eob(s);
5421#endif
5422 } else {
5423 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5424 }
5425 }
5426 break;
5427 case 0x62: /* bound */
5428 if (CODE64(s))
5429 goto illegal_op;
5430 ot = dflag ? OT_LONG : OT_WORD;
5431 modrm = ldub_code(s->pc++);
5432 reg = (modrm >> 3) & 7;
5433 mod = (modrm >> 6) & 3;
5434 if (mod == 3)
5435 goto illegal_op;
5436 gen_op_mov_TN_reg[ot][0][reg]();
5437 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5438 gen_jmp_im(pc_start - s->cs_base);
5439 if (ot == OT_WORD)
5440 gen_op_boundw();
5441 else
5442 gen_op_boundl();
5443 break;
5444 case 0x1c8 ... 0x1cf: /* bswap reg */
5445 reg = (b & 7) | REX_B(s);
5446#ifdef TARGET_X86_64
5447 if (dflag == 2) {
5448 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5449 gen_op_bswapq_T0();
5450 gen_op_mov_reg_T0[OT_QUAD][reg]();
5451 } else
5452#endif
5453 {
5454 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5455 gen_op_bswapl_T0();
5456 gen_op_mov_reg_T0[OT_LONG][reg]();
5457 }
5458 break;
5459 case 0xd6: /* salc */
5460 if (CODE64(s))
5461 goto illegal_op;
5462 if (s->cc_op != CC_OP_DYNAMIC)
5463 gen_op_set_cc_op(s->cc_op);
5464 gen_op_salc();
5465 break;
5466 case 0xe0: /* loopnz */
5467 case 0xe1: /* loopz */
5468 if (s->cc_op != CC_OP_DYNAMIC)
5469 gen_op_set_cc_op(s->cc_op);
5470 /* FALL THRU */
5471 case 0xe2: /* loop */
5472 case 0xe3: /* jecxz */
5473 {
5474 int l1, l2;
5475
5476 tval = (int8_t)insn_get(s, OT_BYTE);
5477 next_eip = s->pc - s->cs_base;
5478 tval += next_eip;
5479 if (s->dflag == 0)
5480 tval &= 0xffff;
5481
5482 l1 = gen_new_label();
5483 l2 = gen_new_label();
5484 b &= 3;
5485 if (b == 3) {
5486 gen_op_jz_ecx[s->aflag](l1);
5487 } else {
5488 gen_op_dec_ECX[s->aflag]();
5489 if (b <= 1)
5490 gen_op_mov_T0_cc();
5491 gen_op_loop[s->aflag][b](l1);
5492 }
5493
5494 gen_jmp_im(next_eip);
5495 gen_op_jmp_label(l2);
5496 gen_set_label(l1);
5497 gen_jmp_im(tval);
5498 gen_set_label(l2);
5499 gen_eob(s);
5500 }
5501 break;
5502 case 0x130: /* wrmsr */
5503 case 0x132: /* rdmsr */
5504 if (s->cpl != 0) {
5505 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5506 } else {
5507 if (b & 2)
5508 gen_op_rdmsr();
5509 else
5510 gen_op_wrmsr();
5511 }
5512 break;
5513 case 0x131: /* rdtsc */
5514 gen_jmp_im(pc_start - s->cs_base);
5515 gen_op_rdtsc();
5516 break;
5517 case 0x134: /* sysenter */
5518 if (CODE64(s))
5519 goto illegal_op;
5520 if (!s->pe) {
5521 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5522 } else {
5523 if (s->cc_op != CC_OP_DYNAMIC) {
5524 gen_op_set_cc_op(s->cc_op);
5525 s->cc_op = CC_OP_DYNAMIC;
5526 }
5527 gen_jmp_im(pc_start - s->cs_base);
5528 gen_op_sysenter();
5529 gen_eob(s);
5530 }
5531 break;
5532 case 0x135: /* sysexit */
5533 if (CODE64(s))
5534 goto illegal_op;
5535 if (!s->pe) {
5536 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5537 } else {
5538 if (s->cc_op != CC_OP_DYNAMIC) {
5539 gen_op_set_cc_op(s->cc_op);
5540 s->cc_op = CC_OP_DYNAMIC;
5541 }
5542 gen_jmp_im(pc_start - s->cs_base);
5543 gen_op_sysexit();
5544 gen_eob(s);
5545 }
5546 break;
5547#ifdef TARGET_X86_64
5548 case 0x105: /* syscall */
5549 /* XXX: is it usable in real mode ? */
5550 if (s->cc_op != CC_OP_DYNAMIC) {
5551 gen_op_set_cc_op(s->cc_op);
5552 s->cc_op = CC_OP_DYNAMIC;
5553 }
5554 gen_jmp_im(pc_start - s->cs_base);
5555 gen_op_syscall(s->pc - pc_start);
5556 gen_eob(s);
5557 break;
5558 case 0x107: /* sysret */
5559 if (!s->pe) {
5560 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5561 } else {
5562 if (s->cc_op != CC_OP_DYNAMIC) {
5563 gen_op_set_cc_op(s->cc_op);
5564 s->cc_op = CC_OP_DYNAMIC;
5565 }
5566 gen_jmp_im(pc_start - s->cs_base);
5567 gen_op_sysret(s->dflag);
5568 /* condition codes are modified only in long mode */
5569 if (s->lma)
5570 s->cc_op = CC_OP_EFLAGS;
5571 gen_eob(s);
5572 }
5573 break;
5574#endif
5575 case 0x1a2: /* cpuid */
5576 gen_op_cpuid();
5577 break;
5578 case 0xf4: /* hlt */
5579 if (s->cpl != 0) {
5580 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5581 } else {
5582 if (s->cc_op != CC_OP_DYNAMIC)
5583 gen_op_set_cc_op(s->cc_op);
5584 gen_jmp_im(s->pc - s->cs_base);
5585 gen_op_hlt();
5586 s->is_jmp = 3;
5587 }
5588 break;
5589 case 0x100:
5590 modrm = ldub_code(s->pc++);
5591 mod = (modrm >> 6) & 3;
5592 op = (modrm >> 3) & 7;
5593 switch(op) {
5594 case 0: /* sldt */
5595 if (!s->pe || s->vm86)
5596 goto illegal_op;
5597 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5598 ot = OT_WORD;
5599 if (mod == 3)
5600 ot += s->dflag;
5601 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5602 break;
5603 case 2: /* lldt */
5604 if (!s->pe || s->vm86)
5605 goto illegal_op;
5606 if (s->cpl != 0) {
5607 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5608 } else {
5609 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5610 gen_jmp_im(pc_start - s->cs_base);
5611 gen_op_lldt_T0();
5612 }
5613 break;
5614 case 1: /* str */
5615 if (!s->pe || s->vm86)
5616 goto illegal_op;
5617 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5618 ot = OT_WORD;
5619 if (mod == 3)
5620 ot += s->dflag;
5621 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5622 break;
5623 case 3: /* ltr */
5624 if (!s->pe || s->vm86)
5625 goto illegal_op;
5626 if (s->cpl != 0) {
5627 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5628 } else {
5629 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5630 gen_jmp_im(pc_start - s->cs_base);
5631 gen_op_ltr_T0();
5632 }
5633 break;
5634 case 4: /* verr */
5635 case 5: /* verw */
5636 if (!s->pe || s->vm86)
5637 goto illegal_op;
5638 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5639 if (s->cc_op != CC_OP_DYNAMIC)
5640 gen_op_set_cc_op(s->cc_op);
5641 if (op == 4)
5642 gen_op_verr();
5643 else
5644 gen_op_verw();
5645 s->cc_op = CC_OP_EFLAGS;
5646 break;
5647 default:
5648 goto illegal_op;
5649 }
5650 break;
5651 case 0x101:
5652 modrm = ldub_code(s->pc++);
5653 mod = (modrm >> 6) & 3;
5654 op = (modrm >> 3) & 7;
5655 rm = modrm & 7;
5656 switch(op) {
5657 case 0: /* sgdt */
5658 if (mod == 3)
5659 goto illegal_op;
5660 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5661 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5662 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5663 gen_add_A0_im(s, 2);
5664 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5665 if (!s->dflag)
5666 gen_op_andl_T0_im(0xffffff);
5667 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5668 break;
5669 case 1:
5670 if (mod == 3) {
5671 switch (rm) {
5672 case 0: /* monitor */
5673 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5674 s->cpl != 0)
5675 goto illegal_op;
5676 gen_jmp_im(pc_start - s->cs_base);
5677#ifdef TARGET_X86_64
5678 if (s->aflag == 2) {
5679 gen_op_movq_A0_reg[R_EBX]();
5680 gen_op_addq_A0_AL();
5681 } else
5682#endif
5683 {
5684 gen_op_movl_A0_reg[R_EBX]();
5685 gen_op_addl_A0_AL();
5686 if (s->aflag == 0)
5687 gen_op_andl_A0_ffff();
5688 }
5689 gen_add_A0_ds_seg(s);
5690 gen_op_monitor();
5691 break;
5692 case 1: /* mwait */
5693 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5694 s->cpl != 0)
5695 goto illegal_op;
5696 if (s->cc_op != CC_OP_DYNAMIC) {
5697 gen_op_set_cc_op(s->cc_op);
5698 s->cc_op = CC_OP_DYNAMIC;
5699 }
5700 gen_jmp_im(s->pc - s->cs_base);
5701 gen_op_mwait();
5702 gen_eob(s);
5703 break;
5704 default:
5705 goto illegal_op;
5706 }
5707 } else { /* sidt */
5708 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5709 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5710 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5711 gen_add_A0_im(s, 2);
5712 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5713 if (!s->dflag)
5714 gen_op_andl_T0_im(0xffffff);
5715 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5716 }
5717 break;
5718 case 2: /* lgdt */
5719 case 3: /* lidt */
5720 if (mod == 3)
5721 goto illegal_op;
5722 if (s->cpl != 0) {
5723 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5724 } else {
5725 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5726 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5727 gen_add_A0_im(s, 2);
5728 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5729 if (!s->dflag)
5730 gen_op_andl_T0_im(0xffffff);
5731 if (op == 2) {
5732 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5733 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5734 } else {
5735 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5736 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5737 }
5738 }
5739 break;
5740 case 4: /* smsw */
5741 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5742 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5743 break;
5744 case 6: /* lmsw */
5745 if (s->cpl != 0) {
5746 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5747 } else {
5748 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5749 gen_op_lmsw_T0();
5750 gen_jmp_im(s->pc - s->cs_base);
5751 gen_eob(s);
5752 }
5753 break;
5754 case 7: /* invlpg */
5755 if (s->cpl != 0) {
5756 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5757 } else {
5758 if (mod == 3) {
5759#ifdef TARGET_X86_64
5760 if (CODE64(s) && (modrm & 7) == 0) {
5761 /* swapgs */
5762 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5763 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5764 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5765 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5766 } else
5767#endif
5768 {
5769 goto illegal_op;
5770 }
5771 } else {
5772 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5773 gen_op_invlpg_A0();
5774 gen_jmp_im(s->pc - s->cs_base);
5775 gen_eob(s);
5776 }
5777 }
5778 break;
5779 default:
5780 goto illegal_op;
5781 }
5782 break;
5783 case 0x108: /* invd */
5784 case 0x109: /* wbinvd */
5785 if (s->cpl != 0) {
5786 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5787 } else {
5788 /* nothing to do */
5789 }
5790 break;
5791 case 0x63: /* arpl or movslS (x86_64) */
5792#ifdef TARGET_X86_64
5793 if (CODE64(s)) {
5794 int d_ot;
5795 /* d_ot is the size of destination */
5796 d_ot = dflag + OT_WORD;
5797
5798 modrm = ldub_code(s->pc++);
5799 reg = ((modrm >> 3) & 7) | rex_r;
5800 mod = (modrm >> 6) & 3;
5801 rm = (modrm & 7) | REX_B(s);
5802
5803 if (mod == 3) {
5804 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5805 /* sign extend */
5806 if (d_ot == OT_QUAD)
5807 gen_op_movslq_T0_T0();
5808 gen_op_mov_reg_T0[d_ot][reg]();
5809 } else {
5810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5811 if (d_ot == OT_QUAD) {
5812 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5813 } else {
5814 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5815 }
5816 gen_op_mov_reg_T0[d_ot][reg]();
5817 }
5818 } else
5819#endif
5820 {
5821 if (!s->pe || s->vm86)
5822 goto illegal_op;
5823 ot = dflag ? OT_LONG : OT_WORD;
5824 modrm = ldub_code(s->pc++);
5825 reg = (modrm >> 3) & 7;
5826 mod = (modrm >> 6) & 3;
5827 rm = modrm & 7;
5828 if (mod != 3) {
5829 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5830 gen_op_ld_T0_A0[ot + s->mem_index]();
5831 } else {
5832 gen_op_mov_TN_reg[ot][0][rm]();
5833 }
5834 if (s->cc_op != CC_OP_DYNAMIC)
5835 gen_op_set_cc_op(s->cc_op);
5836 gen_op_arpl();
5837 s->cc_op = CC_OP_EFLAGS;
5838 if (mod != 3) {
5839 gen_op_st_T0_A0[ot + s->mem_index]();
5840 } else {
5841 gen_op_mov_reg_T0[ot][rm]();
5842 }
5843 gen_op_arpl_update();
5844 }
5845 break;
5846 case 0x102: /* lar */
5847 case 0x103: /* lsl */
5848 if (!s->pe || s->vm86)
5849 goto illegal_op;
5850 ot = dflag ? OT_LONG : OT_WORD;
5851 modrm = ldub_code(s->pc++);
5852 reg = ((modrm >> 3) & 7) | rex_r;
5853 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5854 gen_op_mov_TN_reg[ot][1][reg]();
5855 if (s->cc_op != CC_OP_DYNAMIC)
5856 gen_op_set_cc_op(s->cc_op);
5857 if (b == 0x102)
5858 gen_op_lar();
5859 else
5860 gen_op_lsl();
5861 s->cc_op = CC_OP_EFLAGS;
5862 gen_op_mov_reg_T1[ot][reg]();
5863 break;
5864 case 0x118:
5865 modrm = ldub_code(s->pc++);
5866 mod = (modrm >> 6) & 3;
5867 op = (modrm >> 3) & 7;
5868 switch(op) {
5869 case 0: /* prefetchnta */
5870 case 1: /* prefetchnt0 */
5871 case 2: /* prefetchnt0 */
5872 case 3: /* prefetchnt0 */
5873 if (mod == 3)
5874 goto illegal_op;
5875 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5876 /* nothing more to do */
5877 break;
5878 default:
5879 goto illegal_op;
5880 }
5881 break;
5882 case 0x120: /* mov reg, crN */
5883 case 0x122: /* mov crN, reg */
5884 if (s->cpl != 0) {
5885 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5886 } else {
5887 modrm = ldub_code(s->pc++);
5888 if ((modrm & 0xc0) != 0xc0)
5889 goto illegal_op;
5890 rm = (modrm & 7) | REX_B(s);
5891 reg = ((modrm >> 3) & 7) | rex_r;
5892 if (CODE64(s))
5893 ot = OT_QUAD;
5894 else
5895 ot = OT_LONG;
5896 switch(reg) {
5897 case 0:
5898 case 2:
5899 case 3:
5900 case 4:
5901 case 8:
5902 if (b & 2) {
5903 gen_op_mov_TN_reg[ot][0][rm]();
5904 gen_op_movl_crN_T0(reg);
5905 gen_jmp_im(s->pc - s->cs_base);
5906 gen_eob(s);
5907 } else {
5908#if !defined(CONFIG_USER_ONLY)
5909 if (reg == 8)
5910 gen_op_movtl_T0_cr8();
5911 else
5912#endif
5913 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5914 gen_op_mov_reg_T0[ot][rm]();
5915 }
5916 break;
5917 default:
5918 goto illegal_op;
5919 }
5920 }
5921 break;
5922 case 0x121: /* mov reg, drN */
5923 case 0x123: /* mov drN, reg */
5924 if (s->cpl != 0) {
5925 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5926 } else {
5927 modrm = ldub_code(s->pc++);
5928 if ((modrm & 0xc0) != 0xc0)
5929 goto illegal_op;
5930 rm = (modrm & 7) | REX_B(s);
5931 reg = ((modrm >> 3) & 7) | rex_r;
5932 if (CODE64(s))
5933 ot = OT_QUAD;
5934 else
5935 ot = OT_LONG;
5936 /* XXX: do it dynamically with CR4.DE bit */
5937 if (reg == 4 || reg == 5 || reg >= 8)
5938 goto illegal_op;
5939 if (b & 2) {
5940 gen_op_mov_TN_reg[ot][0][rm]();
5941 gen_op_movl_drN_T0(reg);
5942 gen_jmp_im(s->pc - s->cs_base);
5943 gen_eob(s);
5944 } else {
5945 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5946 gen_op_mov_reg_T0[ot][rm]();
5947 }
5948 }
5949 break;
5950 case 0x106: /* clts */
5951 if (s->cpl != 0) {
5952 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5953 } else {
5954 gen_op_clts();
5955 /* abort block because static cpu state changed */
5956 gen_jmp_im(s->pc - s->cs_base);
5957 gen_eob(s);
5958 }
5959 break;
5960 /* MMX/SSE/SSE2/PNI support */
5961 case 0x1c3: /* MOVNTI reg, mem */
5962 if (!(s->cpuid_features & CPUID_SSE2))
5963 goto illegal_op;
5964 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5965 modrm = ldub_code(s->pc++);
5966 mod = (modrm >> 6) & 3;
5967 if (mod == 3)
5968 goto illegal_op;
5969 reg = ((modrm >> 3) & 7) | rex_r;
5970 /* generate a generic store */
5971 gen_ldst_modrm(s, modrm, ot, reg, 1);
5972 break;
5973 case 0x1ae:
5974 modrm = ldub_code(s->pc++);
5975 mod = (modrm >> 6) & 3;
5976 op = (modrm >> 3) & 7;
5977 switch(op) {
5978 case 0: /* fxsave */
5979 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
5980 (s->flags & HF_EM_MASK))
5981 goto illegal_op;
5982 if (s->flags & HF_TS_MASK) {
5983 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5984 break;
5985 }
5986 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5987 gen_op_fxsave_A0((s->dflag == 2));
5988 break;
5989 case 1: /* fxrstor */
5990 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
5991 (s->flags & HF_EM_MASK))
5992 goto illegal_op;
5993 if (s->flags & HF_TS_MASK) {
5994 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5995 break;
5996 }
5997 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5998 gen_op_fxrstor_A0((s->dflag == 2));
5999 break;
6000 case 2: /* ldmxcsr */
6001 case 3: /* stmxcsr */
6002 if (s->flags & HF_TS_MASK) {
6003 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6004 break;
6005 }
6006 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6007 mod == 3)
6008 goto illegal_op;
6009 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6010 if (op == 2) {
6011 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6012 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6013 } else {
6014 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6015 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6016 }
6017 break;
6018 case 5: /* lfence */
6019 case 6: /* mfence */
6020 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6021 goto illegal_op;
6022 break;
6023 case 7: /* sfence / clflush */
6024 if ((modrm & 0xc7) == 0xc0) {
6025 /* sfence */
6026 if (!(s->cpuid_features & CPUID_SSE))
6027 goto illegal_op;
6028 } else {
6029 /* clflush */
6030 if (!(s->cpuid_features & CPUID_CLFLUSH))
6031 goto illegal_op;
6032 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6033 }
6034 break;
6035 default:
6036 goto illegal_op;
6037 }
6038 break;
6039 case 0x10d: /* prefetch */
6040 modrm = ldub_code(s->pc++);
6041 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6042 /* ignore for now */
6043 break;
6044 case 0x110 ... 0x117:
6045 case 0x128 ... 0x12f:
6046 case 0x150 ... 0x177:
6047 case 0x17c ... 0x17f:
6048 case 0x1c2:
6049 case 0x1c4 ... 0x1c6:
6050 case 0x1d0 ... 0x1fe:
6051 gen_sse(s, b, pc_start, rex_r);
6052 break;
6053 default:
6054 goto illegal_op;
6055 }
6056 /* lock generation */
6057 if (s->prefix & PREFIX_LOCK)
6058 gen_op_unlock();
6059 return s->pc;
6060 illegal_op:
6061 if (s->prefix & PREFIX_LOCK)
6062 gen_op_unlock();
6063 /* XXX: ensure that no lock was generated */
6064 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6065 return s->pc;
6066}
6067
6068#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6069#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6070
6071/* flags read by an operation */
6072static uint16_t opc_read_flags[NB_OPS] = {
6073 [INDEX_op_aas] = CC_A,
6074 [INDEX_op_aaa] = CC_A,
6075 [INDEX_op_das] = CC_A | CC_C,
6076 [INDEX_op_daa] = CC_A | CC_C,
6077
6078 /* subtle: due to the incl/decl implementation, C is used */
6079 [INDEX_op_update_inc_cc] = CC_C,
6080
6081 [INDEX_op_into] = CC_O,
6082
6083 [INDEX_op_jb_subb] = CC_C,
6084 [INDEX_op_jb_subw] = CC_C,
6085 [INDEX_op_jb_subl] = CC_C,
6086
6087 [INDEX_op_jz_subb] = CC_Z,
6088 [INDEX_op_jz_subw] = CC_Z,
6089 [INDEX_op_jz_subl] = CC_Z,
6090
6091 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6092 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6093 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6094
6095 [INDEX_op_js_subb] = CC_S,
6096 [INDEX_op_js_subw] = CC_S,
6097 [INDEX_op_js_subl] = CC_S,
6098
6099 [INDEX_op_jl_subb] = CC_O | CC_S,
6100 [INDEX_op_jl_subw] = CC_O | CC_S,
6101 [INDEX_op_jl_subl] = CC_O | CC_S,
6102
6103 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6104 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6105 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6106
6107 [INDEX_op_loopnzw] = CC_Z,
6108 [INDEX_op_loopnzl] = CC_Z,
6109 [INDEX_op_loopzw] = CC_Z,
6110 [INDEX_op_loopzl] = CC_Z,
6111
6112 [INDEX_op_seto_T0_cc] = CC_O,
6113 [INDEX_op_setb_T0_cc] = CC_C,
6114 [INDEX_op_setz_T0_cc] = CC_Z,
6115 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6116 [INDEX_op_sets_T0_cc] = CC_S,
6117 [INDEX_op_setp_T0_cc] = CC_P,
6118 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6119 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6120
6121 [INDEX_op_setb_T0_subb] = CC_C,
6122 [INDEX_op_setb_T0_subw] = CC_C,
6123 [INDEX_op_setb_T0_subl] = CC_C,
6124
6125 [INDEX_op_setz_T0_subb] = CC_Z,
6126 [INDEX_op_setz_T0_subw] = CC_Z,
6127 [INDEX_op_setz_T0_subl] = CC_Z,
6128
6129 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6130 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6131 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6132
6133 [INDEX_op_sets_T0_subb] = CC_S,
6134 [INDEX_op_sets_T0_subw] = CC_S,
6135 [INDEX_op_sets_T0_subl] = CC_S,
6136
6137 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6138 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6139 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6140
6141 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6142 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6143 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6144
6145 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6146 [INDEX_op_cmc] = CC_C,
6147 [INDEX_op_salc] = CC_C,
6148
6149 /* needed for correct flag optimisation before string ops */
6150 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6151 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6152 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6153 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6154
6155#ifdef TARGET_X86_64
6156 [INDEX_op_jb_subq] = CC_C,
6157 [INDEX_op_jz_subq] = CC_Z,
6158 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6159 [INDEX_op_js_subq] = CC_S,
6160 [INDEX_op_jl_subq] = CC_O | CC_S,
6161 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6162
6163 [INDEX_op_loopnzq] = CC_Z,
6164 [INDEX_op_loopzq] = CC_Z,
6165
6166 [INDEX_op_setb_T0_subq] = CC_C,
6167 [INDEX_op_setz_T0_subq] = CC_Z,
6168 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6169 [INDEX_op_sets_T0_subq] = CC_S,
6170 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6171 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6172
6173 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6174 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6175#endif
6176
6177#define DEF_READF(SUFFIX)\
6178 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6179 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6180 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6181 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6182 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6183 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6184 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6185 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6186\
6187 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6188 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6189 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6190 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6191 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6192 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6193 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6194 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6195
6196 DEF_READF( )
6197 DEF_READF(_raw)
6198#ifndef CONFIG_USER_ONLY
6199 DEF_READF(_kernel)
6200 DEF_READF(_user)
6201#endif
6202};
6203
6204/* flags written by an operation */
6205static uint16_t opc_write_flags[NB_OPS] = {
6206 [INDEX_op_update2_cc] = CC_OSZAPC,
6207 [INDEX_op_update1_cc] = CC_OSZAPC,
6208 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6209 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6210 /* subtle: due to the incl/decl implementation, C is used */
6211 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6212 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6213
6214 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6215 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6216 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6217 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6218 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6219 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6220 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6221 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6222 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6223 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6224 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6225
6226 /* sse */
6227 [INDEX_op_ucomiss] = CC_OSZAPC,
6228 [INDEX_op_ucomisd] = CC_OSZAPC,
6229 [INDEX_op_comiss] = CC_OSZAPC,
6230 [INDEX_op_comisd] = CC_OSZAPC,
6231
6232 /* bcd */
6233 [INDEX_op_aam] = CC_OSZAPC,
6234 [INDEX_op_aad] = CC_OSZAPC,
6235 [INDEX_op_aas] = CC_OSZAPC,
6236 [INDEX_op_aaa] = CC_OSZAPC,
6237 [INDEX_op_das] = CC_OSZAPC,
6238 [INDEX_op_daa] = CC_OSZAPC,
6239
6240 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6241 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6242 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6243 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6244 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6245 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6246 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6247 [INDEX_op_clc] = CC_C,
6248 [INDEX_op_stc] = CC_C,
6249 [INDEX_op_cmc] = CC_C,
6250
6251 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6252 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6253 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6254 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6255 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6256 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6257 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6258 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6259 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6260 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6261 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6262 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6263
6264 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6265 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6266 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6267 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6268 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6269 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6270
6271 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6272 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6273 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6274 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6275
6276 [INDEX_op_cmpxchg8b] = CC_Z,
6277 [INDEX_op_lar] = CC_Z,
6278 [INDEX_op_lsl] = CC_Z,
6279 [INDEX_op_verr] = CC_Z,
6280 [INDEX_op_verw] = CC_Z,
6281 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6282 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6283
6284#define DEF_WRITEF(SUFFIX)\
6285 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6286 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6287 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6288 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6289 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6290 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6291 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6292 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6293\
6294 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6295 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6296 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6297 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6298 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6299 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6300 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6301 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6302\
6303 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6304 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6305 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6306 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6307 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6308 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6309 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6310 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6311\
6312 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6313 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6314 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6315 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6316\
6317 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6318 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6319 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6320 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6321\
6322 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6323 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6324 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6325 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6326\
6327 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6328 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6329 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6330 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6331 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6332 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6333\
6334 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6335 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6336 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6337 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6338 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6339 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6340\
6341 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6342 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6343 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6344 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6345
6346
6347 DEF_WRITEF( )
6348 DEF_WRITEF(_raw)
6349#ifndef CONFIG_USER_ONLY
6350 DEF_WRITEF(_kernel)
6351 DEF_WRITEF(_user)
6352#endif
6353};
6354
6355/* simpler form of an operation if no flags need to be generated */
6356static uint16_t opc_simpler[NB_OPS] = {
6357 [INDEX_op_update2_cc] = INDEX_op_nop,
6358 [INDEX_op_update1_cc] = INDEX_op_nop,
6359 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6360#if 0
6361 /* broken: CC_OP logic must be rewritten */
6362 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6363#endif
6364
6365 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6366 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6367 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6368 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6369
6370 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6371 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6372 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6373 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6374
6375 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6376 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6377 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6378 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6379
6380#define DEF_SIMPLER(SUFFIX)\
6381 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6382 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6383 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6384 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6385\
6386 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6387 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6388 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6389 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6390
6391 DEF_SIMPLER( )
6392 DEF_SIMPLER(_raw)
6393#ifndef CONFIG_USER_ONLY
6394 DEF_SIMPLER(_kernel)
6395 DEF_SIMPLER(_user)
6396#endif
6397};
6398
6399void optimize_flags_init(void)
6400{
6401 int i;
6402 /* put default values in arrays */
6403 for(i = 0; i < NB_OPS; i++) {
6404 if (opc_simpler[i] == 0)
6405 opc_simpler[i] = i;
6406 }
6407}
6408
6409/* CPU flags computation optimization: we move backward thru the
6410 generated code to see which flags are needed. The operation is
6411 modified if suitable */
6412static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6413{
6414 uint16_t *opc_ptr;
6415 int live_flags, write_flags, op;
6416
6417 opc_ptr = opc_buf + opc_buf_len;
6418 /* live_flags contains the flags needed by the next instructions
6419 in the code. At the end of the bloc, we consider that all the
6420 flags are live. */
6421 live_flags = CC_OSZAPC;
6422 while (opc_ptr > opc_buf) {
6423 op = *--opc_ptr;
6424 /* if none of the flags written by the instruction is used,
6425 then we can try to find a simpler instruction */
6426 write_flags = opc_write_flags[op];
6427 if ((live_flags & write_flags) == 0) {
6428 *opc_ptr = opc_simpler[op];
6429 }
6430 /* compute the live flags before the instruction */
6431 live_flags &= ~write_flags;
6432 live_flags |= opc_read_flags[op];
6433 }
6434}
6435
6436/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6437 basic block 'tb'. If search_pc is TRUE, also generate PC
6438 information for each intermediate instruction. */
6439static inline int gen_intermediate_code_internal(CPUState *env,
6440 TranslationBlock *tb,
6441 int search_pc)
6442{
6443 DisasContext dc1, *dc = &dc1;
6444 target_ulong pc_ptr;
6445 uint16_t *gen_opc_end;
6446 int flags, j, lj, cflags;
6447 target_ulong pc_start;
6448 target_ulong cs_base;
6449
6450 /* generate intermediate code */
6451 pc_start = tb->pc;
6452 cs_base = tb->cs_base;
6453 flags = tb->flags;
6454 cflags = tb->cflags;
6455
6456 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6457 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6458 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6459 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6460 dc->f_st = 0;
6461 dc->vm86 = (flags >> VM_SHIFT) & 1;
6462#ifdef VBOX
6463 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
6464#endif
6465 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6466 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6467 dc->tf = (flags >> TF_SHIFT) & 1;
6468 dc->singlestep_enabled = env->singlestep_enabled;
6469 dc->cc_op = CC_OP_DYNAMIC;
6470 dc->cs_base = cs_base;
6471 dc->tb = tb;
6472 dc->popl_esp_hack = 0;
6473 /* select memory access functions */
6474 dc->mem_index = 0;
6475 if (flags & HF_SOFTMMU_MASK) {
6476 if (dc->cpl == 3)
6477 dc->mem_index = 2 * 4;
6478 else
6479 dc->mem_index = 1 * 4;
6480 }
6481 dc->cpuid_features = env->cpuid_features;
6482 dc->cpuid_ext_features = env->cpuid_ext_features;
6483#ifdef TARGET_X86_64
6484 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6485 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6486#endif
6487 dc->flags = flags;
6488 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6489 (flags & HF_INHIBIT_IRQ_MASK)
6490#ifndef CONFIG_SOFTMMU
6491 || (flags & HF_SOFTMMU_MASK)
6492#endif
6493 );
6494#if 0
6495 /* check addseg logic */
6496 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6497 printf("ERROR addseg\n");
6498#endif
6499
6500 gen_opc_ptr = gen_opc_buf;
6501 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6502 gen_opparam_ptr = gen_opparam_buf;
6503 nb_gen_labels = 0;
6504
6505 dc->is_jmp = DISAS_NEXT;
6506 pc_ptr = pc_start;
6507 lj = -1;
6508
6509 for(;;) {
6510 if (env->nb_breakpoints > 0) {
6511 for(j = 0; j < env->nb_breakpoints; j++) {
6512 if (env->breakpoints[j] == pc_ptr) {
6513 gen_debug(dc, pc_ptr - dc->cs_base);
6514 break;
6515 }
6516 }
6517 }
6518 if (search_pc) {
6519 j = gen_opc_ptr - gen_opc_buf;
6520 if (lj < j) {
6521 lj++;
6522 while (lj < j)
6523 gen_opc_instr_start[lj++] = 0;
6524 }
6525 gen_opc_pc[lj] = pc_ptr;
6526 gen_opc_cc_op[lj] = dc->cc_op;
6527 gen_opc_instr_start[lj] = 1;
6528 }
6529 pc_ptr = disas_insn(dc, pc_ptr);
6530 /* stop translation if indicated */
6531 if (dc->is_jmp)
6532 break;
6533
6534#ifdef VBOX
6535#ifdef DEBUG
6536/*
6537 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6538 {
6539 //should never happen as the jump to the patch code terminates the translation block
6540 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6541 }
6542*/
6543#endif
6544 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6545 {
6546 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6547 gen_jmp_im(pc_ptr - dc->cs_base);
6548 gen_eob(dc);
6549 break;
6550 }
6551#endif
6552
6553 /* if single step mode, we generate only one instruction and
6554 generate an exception */
6555 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6556 the flag and abort the translation to give the irqs a
6557 change to be happen */
6558 if (dc->tf || dc->singlestep_enabled ||
6559 (flags & HF_INHIBIT_IRQ_MASK) ||
6560 (cflags & CF_SINGLE_INSN)) {
6561 gen_jmp_im(pc_ptr - dc->cs_base);
6562 gen_eob(dc);
6563 break;
6564 }
6565 /* if too long translation, stop generation too */
6566 if (gen_opc_ptr >= gen_opc_end ||
6567 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6568 gen_jmp_im(pc_ptr - dc->cs_base);
6569 gen_eob(dc);
6570 break;
6571 }
6572 }
6573 *gen_opc_ptr = INDEX_op_end;
6574 /* we don't forget to fill the last values */
6575 if (search_pc) {
6576 j = gen_opc_ptr - gen_opc_buf;
6577 lj++;
6578 while (lj <= j)
6579 gen_opc_instr_start[lj++] = 0;
6580 }
6581
6582#ifdef DEBUG_DISAS
6583 if (loglevel & CPU_LOG_TB_CPU) {
6584 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6585 }
6586 if (loglevel & CPU_LOG_TB_IN_ASM) {
6587 int disas_flags;
6588 fprintf(logfile, "----------------\n");
6589 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6590#ifdef TARGET_X86_64
6591 if (dc->code64)
6592 disas_flags = 2;
6593 else
6594#endif
6595 disas_flags = !dc->code32;
6596 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6597 fprintf(logfile, "\n");
6598 if (loglevel & CPU_LOG_TB_OP) {
6599 fprintf(logfile, "OP:\n");
6600 dump_ops(gen_opc_buf, gen_opparam_buf);
6601 fprintf(logfile, "\n");
6602 }
6603 }
6604#endif
6605
6606 /* optimize flag computations */
6607 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6608
6609#ifdef DEBUG_DISAS
6610 if (loglevel & CPU_LOG_TB_OP_OPT) {
6611 fprintf(logfile, "AFTER FLAGS OPT:\n");
6612 dump_ops(gen_opc_buf, gen_opparam_buf);
6613 fprintf(logfile, "\n");
6614 }
6615#endif
6616 if (!search_pc)
6617 tb->size = pc_ptr - pc_start;
6618 return 0;
6619}
6620
6621int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6622{
6623 return gen_intermediate_code_internal(env, tb, 0);
6624}
6625
6626int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6627{
6628 return gen_intermediate_code_internal(env, tb, 1);
6629}
6630
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette