VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 18661

最後變更 在這個檔案從18661是 15861,由 vboxsync 提交於 16 年 前

REM: put VME bit update in place

  • 屬性 svn:eol-style 設為 native
檔案大小: 206.0 KB
 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#include <inttypes.h>
34#ifndef VBOX
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42
43/* XXX: move that elsewhere */
44static uint16_t *gen_opc_ptr;
45static uint32_t *gen_opparam_ptr;
46
47#define PREFIX_REPZ 0x01
48#define PREFIX_REPNZ 0x02
49#define PREFIX_LOCK 0x04
50#define PREFIX_DATA 0x08
51#define PREFIX_ADR 0x10
52
53#ifdef TARGET_X86_64
54#define X86_64_ONLY(x) x
55#define X86_64_DEF(x...) x
56#define CODE64(s) ((s)->code64)
57#define REX_X(s) ((s)->rex_x)
58#define REX_B(s) ((s)->rex_b)
59/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
60#if 1
61#define BUGGY_64(x) NULL
62#endif
63#else
64#define X86_64_ONLY(x) NULL
65#define X86_64_DEF(x...)
66#define CODE64(s) 0
67#define REX_X(s) 0
68#define REX_B(s) 0
69#endif
70
71#ifdef TARGET_X86_64
72static int x86_64_hregs;
73#endif
74
75#ifdef USE_DIRECT_JUMP
76#define TBPARAM(x)
77#else
78#define TBPARAM(x) (long)(x)
79#endif
80
81#ifdef VBOX
82/* Special/override code readers to hide patched code. */
83
84uint8_t ldub_code_raw(target_ulong pc)
85{
86 uint8_t b;
87
88 if (!remR3GetOpcode(cpu_single_env, pc, &b))
89 b = ldub_code(pc);
90 return b;
91}
92#define ldub_code(a) ldub_code_raw(a)
93
94uint16_t lduw_code_raw(target_ulong pc)
95{
96 return (ldub_code(pc+1) << 8) | ldub_code(pc);
97}
98#define lduw_code(a) lduw_code_raw(a)
99
100
101uint32_t ldl_code_raw(target_ulong pc)
102{
103 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
104}
105#define ldl_code(a) ldl_code_raw(a)
106
107#endif /* VBOX */
108
109
110typedef struct DisasContext {
111 /* current insn context */
112 int override; /* -1 if no override */
113 int prefix;
114 int aflag, dflag;
115 target_ulong pc; /* pc = eip + cs_base */
116 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
117 static state change (stop translation) */
118 /* current block context */
119 target_ulong cs_base; /* base of CS segment */
120 int pe; /* protected mode */
121 int code32; /* 32 bit code segment */
122#ifdef TARGET_X86_64
123 int lma; /* long mode active */
124 int code64; /* 64 bit code segment */
125 int rex_x, rex_b;
126#endif
127 int ss32; /* 32 bit stack segment */
128 int cc_op; /* current CC operation */
129 int addseg; /* non zero if either DS/ES/SS have a non zero base */
130 int f_st; /* currently unused */
131 int vm86; /* vm86 mode */
132#ifdef VBOX
133 int vme; /* CR4.VME */
134 int record_call; /* record calls for CSAM or not? */
135#endif
136 int cpl;
137 int iopl;
138 int tf; /* TF cpu flag */
139 int singlestep_enabled; /* "hardware" single step enabled */
140 int jmp_opt; /* use direct block chaining for direct jumps */
141 int mem_index; /* select memory access functions */
142 int flags; /* all execution flags */
143 struct TranslationBlock *tb;
144 int popl_esp_hack; /* for correct popl with esp base handling */
145 int rip_offset; /* only used in x86_64, but left for simplicity */
146 int cpuid_features;
147 int cpuid_ext_features;
148 int cpuid_ext2_features;
149 int cpuid_ext3_features;
150} DisasContext;
151
152static void gen_eob(DisasContext *s);
153static void gen_jmp(DisasContext *s, target_ulong eip);
154static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
155
156/* i386 arith/logic operations */
157enum {
158 OP_ADDL,
159 OP_ORL,
160 OP_ADCL,
161 OP_SBBL,
162 OP_ANDL,
163 OP_SUBL,
164 OP_XORL,
165 OP_CMPL,
166};
167
168/* i386 shift ops */
169enum {
170 OP_ROL,
171 OP_ROR,
172 OP_RCL,
173 OP_RCR,
174 OP_SHL,
175 OP_SHR,
176 OP_SHL1, /* undocumented */
177 OP_SAR = 7,
178};
179
180enum {
181#define DEF(s, n, copy_size) INDEX_op_ ## s,
182#include "opc.h"
183#undef DEF
184 NB_OPS,
185};
186
187#include "gen-op.h"
188
189/* operand size */
190enum {
191 OT_BYTE = 0,
192 OT_WORD,
193 OT_LONG,
194 OT_QUAD,
195};
196
197enum {
198 /* I386 int registers */
199 OR_EAX, /* MUST be even numbered */
200 OR_ECX,
201 OR_EDX,
202 OR_EBX,
203 OR_ESP,
204 OR_EBP,
205 OR_ESI,
206 OR_EDI,
207
208 OR_TMP0 = 16, /* temporary operand register */
209 OR_TMP1,
210 OR_A0, /* temporary register used when doing address evaluation */
211};
212
213#ifdef TARGET_X86_64
214
215#define NB_OP_SIZES 4
216
217#define DEF_REGS(prefix, suffix) \
218 prefix ## EAX ## suffix,\
219 prefix ## ECX ## suffix,\
220 prefix ## EDX ## suffix,\
221 prefix ## EBX ## suffix,\
222 prefix ## ESP ## suffix,\
223 prefix ## EBP ## suffix,\
224 prefix ## ESI ## suffix,\
225 prefix ## EDI ## suffix,\
226 prefix ## R8 ## suffix,\
227 prefix ## R9 ## suffix,\
228 prefix ## R10 ## suffix,\
229 prefix ## R11 ## suffix,\
230 prefix ## R12 ## suffix,\
231 prefix ## R13 ## suffix,\
232 prefix ## R14 ## suffix,\
233 prefix ## R15 ## suffix,
234
235#define DEF_BREGS(prefixb, prefixh, suffix) \
236 \
237static void prefixb ## ESP ## suffix ## _wrapper(void) \
238{ \
239 if (x86_64_hregs) \
240 prefixb ## ESP ## suffix (); \
241 else \
242 prefixh ## EAX ## suffix (); \
243} \
244 \
245static void prefixb ## EBP ## suffix ## _wrapper(void) \
246{ \
247 if (x86_64_hregs) \
248 prefixb ## EBP ## suffix (); \
249 else \
250 prefixh ## ECX ## suffix (); \
251} \
252 \
253static void prefixb ## ESI ## suffix ## _wrapper(void) \
254{ \
255 if (x86_64_hregs) \
256 prefixb ## ESI ## suffix (); \
257 else \
258 prefixh ## EDX ## suffix (); \
259} \
260 \
261static void prefixb ## EDI ## suffix ## _wrapper(void) \
262{ \
263 if (x86_64_hregs) \
264 prefixb ## EDI ## suffix (); \
265 else \
266 prefixh ## EBX ## suffix (); \
267}
268
269DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
270DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
271DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
272DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
273
274#else /* !TARGET_X86_64 */
275
276#define NB_OP_SIZES 3
277
278#define DEF_REGS(prefix, suffix) \
279 prefix ## EAX ## suffix,\
280 prefix ## ECX ## suffix,\
281 prefix ## EDX ## suffix,\
282 prefix ## EBX ## suffix,\
283 prefix ## ESP ## suffix,\
284 prefix ## EBP ## suffix,\
285 prefix ## ESI ## suffix,\
286 prefix ## EDI ## suffix,
287
288#endif /* !TARGET_X86_64 */
289
290static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
291 [OT_BYTE] = {
292 gen_op_movb_EAX_T0,
293 gen_op_movb_ECX_T0,
294 gen_op_movb_EDX_T0,
295 gen_op_movb_EBX_T0,
296#ifdef TARGET_X86_64
297 gen_op_movb_ESP_T0_wrapper,
298 gen_op_movb_EBP_T0_wrapper,
299 gen_op_movb_ESI_T0_wrapper,
300 gen_op_movb_EDI_T0_wrapper,
301 gen_op_movb_R8_T0,
302 gen_op_movb_R9_T0,
303 gen_op_movb_R10_T0,
304 gen_op_movb_R11_T0,
305 gen_op_movb_R12_T0,
306 gen_op_movb_R13_T0,
307 gen_op_movb_R14_T0,
308 gen_op_movb_R15_T0,
309#else
310 gen_op_movh_EAX_T0,
311 gen_op_movh_ECX_T0,
312 gen_op_movh_EDX_T0,
313 gen_op_movh_EBX_T0,
314#endif
315 },
316 [OT_WORD] = {
317 DEF_REGS(gen_op_movw_, _T0)
318 },
319 [OT_LONG] = {
320 DEF_REGS(gen_op_movl_, _T0)
321 },
322#ifdef TARGET_X86_64
323 [OT_QUAD] = {
324 DEF_REGS(gen_op_movq_, _T0)
325 },
326#endif
327};
328
329static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
330 [OT_BYTE] = {
331 gen_op_movb_EAX_T1,
332 gen_op_movb_ECX_T1,
333 gen_op_movb_EDX_T1,
334 gen_op_movb_EBX_T1,
335#ifdef TARGET_X86_64
336 gen_op_movb_ESP_T1_wrapper,
337 gen_op_movb_EBP_T1_wrapper,
338 gen_op_movb_ESI_T1_wrapper,
339 gen_op_movb_EDI_T1_wrapper,
340 gen_op_movb_R8_T1,
341 gen_op_movb_R9_T1,
342 gen_op_movb_R10_T1,
343 gen_op_movb_R11_T1,
344 gen_op_movb_R12_T1,
345 gen_op_movb_R13_T1,
346 gen_op_movb_R14_T1,
347 gen_op_movb_R15_T1,
348#else
349 gen_op_movh_EAX_T1,
350 gen_op_movh_ECX_T1,
351 gen_op_movh_EDX_T1,
352 gen_op_movh_EBX_T1,
353#endif
354 },
355 [OT_WORD] = {
356 DEF_REGS(gen_op_movw_, _T1)
357 },
358 [OT_LONG] = {
359 DEF_REGS(gen_op_movl_, _T1)
360 },
361#ifdef TARGET_X86_64
362 [OT_QUAD] = {
363 DEF_REGS(gen_op_movq_, _T1)
364 },
365#endif
366};
367
368static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
369 [0] = {
370 DEF_REGS(gen_op_movw_, _A0)
371 },
372 [1] = {
373 DEF_REGS(gen_op_movl_, _A0)
374 },
375#ifdef TARGET_X86_64
376 [2] = {
377 DEF_REGS(gen_op_movq_, _A0)
378 },
379#endif
380};
381
382static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
383{
384 [OT_BYTE] = {
385 {
386 gen_op_movl_T0_EAX,
387 gen_op_movl_T0_ECX,
388 gen_op_movl_T0_EDX,
389 gen_op_movl_T0_EBX,
390#ifdef TARGET_X86_64
391 gen_op_movl_T0_ESP_wrapper,
392 gen_op_movl_T0_EBP_wrapper,
393 gen_op_movl_T0_ESI_wrapper,
394 gen_op_movl_T0_EDI_wrapper,
395 gen_op_movl_T0_R8,
396 gen_op_movl_T0_R9,
397 gen_op_movl_T0_R10,
398 gen_op_movl_T0_R11,
399 gen_op_movl_T0_R12,
400 gen_op_movl_T0_R13,
401 gen_op_movl_T0_R14,
402 gen_op_movl_T0_R15,
403#else
404 gen_op_movh_T0_EAX,
405 gen_op_movh_T0_ECX,
406 gen_op_movh_T0_EDX,
407 gen_op_movh_T0_EBX,
408#endif
409 },
410 {
411 gen_op_movl_T1_EAX,
412 gen_op_movl_T1_ECX,
413 gen_op_movl_T1_EDX,
414 gen_op_movl_T1_EBX,
415#ifdef TARGET_X86_64
416 gen_op_movl_T1_ESP_wrapper,
417 gen_op_movl_T1_EBP_wrapper,
418 gen_op_movl_T1_ESI_wrapper,
419 gen_op_movl_T1_EDI_wrapper,
420 gen_op_movl_T1_R8,
421 gen_op_movl_T1_R9,
422 gen_op_movl_T1_R10,
423 gen_op_movl_T1_R11,
424 gen_op_movl_T1_R12,
425 gen_op_movl_T1_R13,
426 gen_op_movl_T1_R14,
427 gen_op_movl_T1_R15,
428#else
429 gen_op_movh_T1_EAX,
430 gen_op_movh_T1_ECX,
431 gen_op_movh_T1_EDX,
432 gen_op_movh_T1_EBX,
433#endif
434 },
435 },
436 [OT_WORD] = {
437 {
438 DEF_REGS(gen_op_movl_T0_, )
439 },
440 {
441 DEF_REGS(gen_op_movl_T1_, )
442 },
443 },
444 [OT_LONG] = {
445 {
446 DEF_REGS(gen_op_movl_T0_, )
447 },
448 {
449 DEF_REGS(gen_op_movl_T1_, )
450 },
451 },
452#ifdef TARGET_X86_64
453 [OT_QUAD] = {
454 {
455 DEF_REGS(gen_op_movl_T0_, )
456 },
457 {
458 DEF_REGS(gen_op_movl_T1_, )
459 },
460 },
461#endif
462};
463
464static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
465 DEF_REGS(gen_op_movl_A0_, )
466};
467
468static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
469 [0] = {
470 DEF_REGS(gen_op_addl_A0_, )
471 },
472 [1] = {
473 DEF_REGS(gen_op_addl_A0_, _s1)
474 },
475 [2] = {
476 DEF_REGS(gen_op_addl_A0_, _s2)
477 },
478 [3] = {
479 DEF_REGS(gen_op_addl_A0_, _s3)
480 },
481};
482
483#ifdef TARGET_X86_64
484static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
485 DEF_REGS(gen_op_movq_A0_, )
486};
487
488static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
489 [0] = {
490 DEF_REGS(gen_op_addq_A0_, )
491 },
492 [1] = {
493 DEF_REGS(gen_op_addq_A0_, _s1)
494 },
495 [2] = {
496 DEF_REGS(gen_op_addq_A0_, _s2)
497 },
498 [3] = {
499 DEF_REGS(gen_op_addq_A0_, _s3)
500 },
501};
502#endif
503
504static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
505 [0] = {
506 DEF_REGS(gen_op_cmovw_, _T1_T0)
507 },
508 [1] = {
509 DEF_REGS(gen_op_cmovl_, _T1_T0)
510 },
511#ifdef TARGET_X86_64
512 [2] = {
513 DEF_REGS(gen_op_cmovq_, _T1_T0)
514 },
515#endif
516};
517
518static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
519 NULL,
520 gen_op_orl_T0_T1,
521 NULL,
522 NULL,
523 gen_op_andl_T0_T1,
524 NULL,
525 gen_op_xorl_T0_T1,
526 NULL,
527};
528
529#define DEF_ARITHC(SUFFIX)\
530 {\
531 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
532 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
533 },\
534 {\
535 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
536 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
537 },\
538 {\
539 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
540 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
541 },\
542 {\
543 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
544 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
545 },
546
547static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
548 DEF_ARITHC( )
549};
550
551static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
552 DEF_ARITHC(_raw)
553#ifndef CONFIG_USER_ONLY
554 DEF_ARITHC(_kernel)
555 DEF_ARITHC(_user)
556#endif
557};
558
559static const int cc_op_arithb[8] = {
560 CC_OP_ADDB,
561 CC_OP_LOGICB,
562 CC_OP_ADDB,
563 CC_OP_SUBB,
564 CC_OP_LOGICB,
565 CC_OP_SUBB,
566 CC_OP_LOGICB,
567 CC_OP_SUBB,
568};
569
570#define DEF_CMPXCHG(SUFFIX)\
571 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
572 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
573 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
574 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
575
576static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
577 DEF_CMPXCHG( )
578};
579
580static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
581 DEF_CMPXCHG(_raw)
582#ifndef CONFIG_USER_ONLY
583 DEF_CMPXCHG(_kernel)
584 DEF_CMPXCHG(_user)
585#endif
586};
587
588#define DEF_SHIFT(SUFFIX)\
589 {\
590 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
591 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
592 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
593 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
594 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
595 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
596 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
597 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
598 },\
599 {\
600 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
601 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
602 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
603 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
604 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
605 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
606 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
607 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
608 },\
609 {\
610 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
611 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
612 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
613 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
614 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
615 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
616 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
617 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
618 },\
619 {\
620 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
621 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
622 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
623 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
624 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
625 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
626 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
627 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
628 },
629
630static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
631 DEF_SHIFT( )
632};
633
634static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
635 DEF_SHIFT(_raw)
636#ifndef CONFIG_USER_ONLY
637 DEF_SHIFT(_kernel)
638 DEF_SHIFT(_user)
639#endif
640};
641
642#define DEF_SHIFTD(SUFFIX, op)\
643 {\
644 NULL,\
645 NULL,\
646 },\
647 {\
648 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
649 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
650 },\
651 {\
652 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
653 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
654 },\
655 {\
656X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
657 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
658 },
659
660static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
661 DEF_SHIFTD(, im)
662};
663
664static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
665 DEF_SHIFTD(, ECX)
666};
667
668static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
669 DEF_SHIFTD(_raw, im)
670#ifndef CONFIG_USER_ONLY
671 DEF_SHIFTD(_kernel, im)
672 DEF_SHIFTD(_user, im)
673#endif
674};
675
676static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
677 DEF_SHIFTD(_raw, ECX)
678#ifndef CONFIG_USER_ONLY
679 DEF_SHIFTD(_kernel, ECX)
680 DEF_SHIFTD(_user, ECX)
681#endif
682};
683
684static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
685 [0] = {
686 gen_op_btw_T0_T1_cc,
687 gen_op_btsw_T0_T1_cc,
688 gen_op_btrw_T0_T1_cc,
689 gen_op_btcw_T0_T1_cc,
690 },
691 [1] = {
692 gen_op_btl_T0_T1_cc,
693 gen_op_btsl_T0_T1_cc,
694 gen_op_btrl_T0_T1_cc,
695 gen_op_btcl_T0_T1_cc,
696 },
697#ifdef TARGET_X86_64
698 [2] = {
699 gen_op_btq_T0_T1_cc,
700 gen_op_btsq_T0_T1_cc,
701 gen_op_btrq_T0_T1_cc,
702 gen_op_btcq_T0_T1_cc,
703 },
704#endif
705};
706
707static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
708 gen_op_add_bitw_A0_T1,
709 gen_op_add_bitl_A0_T1,
710 X86_64_ONLY(gen_op_add_bitq_A0_T1),
711};
712
713static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
714 [0] = {
715 gen_op_bsfw_T0_cc,
716 gen_op_bsrw_T0_cc,
717 },
718 [1] = {
719 gen_op_bsfl_T0_cc,
720 gen_op_bsrl_T0_cc,
721 },
722#ifdef TARGET_X86_64
723 [2] = {
724 gen_op_bsfq_T0_cc,
725 gen_op_bsrq_T0_cc,
726 },
727#endif
728};
729
730static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
731 gen_op_ldsb_raw_T0_A0,
732 gen_op_ldsw_raw_T0_A0,
733 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
734 NULL,
735#ifndef CONFIG_USER_ONLY
736 gen_op_ldsb_kernel_T0_A0,
737 gen_op_ldsw_kernel_T0_A0,
738 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
739 NULL,
740
741 gen_op_ldsb_user_T0_A0,
742 gen_op_ldsw_user_T0_A0,
743 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
744 NULL,
745#endif
746};
747
748static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
749 gen_op_ldub_raw_T0_A0,
750 gen_op_lduw_raw_T0_A0,
751 NULL,
752 NULL,
753
754#ifndef CONFIG_USER_ONLY
755 gen_op_ldub_kernel_T0_A0,
756 gen_op_lduw_kernel_T0_A0,
757 NULL,
758 NULL,
759
760 gen_op_ldub_user_T0_A0,
761 gen_op_lduw_user_T0_A0,
762 NULL,
763 NULL,
764#endif
765};
766
767/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
768static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
769 gen_op_ldub_raw_T0_A0,
770 gen_op_lduw_raw_T0_A0,
771 gen_op_ldl_raw_T0_A0,
772 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
773
774#ifndef CONFIG_USER_ONLY
775 gen_op_ldub_kernel_T0_A0,
776 gen_op_lduw_kernel_T0_A0,
777 gen_op_ldl_kernel_T0_A0,
778 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
779
780 gen_op_ldub_user_T0_A0,
781 gen_op_lduw_user_T0_A0,
782 gen_op_ldl_user_T0_A0,
783 X86_64_ONLY(gen_op_ldq_user_T0_A0),
784#endif
785};
786
787static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
788 gen_op_ldub_raw_T1_A0,
789 gen_op_lduw_raw_T1_A0,
790 gen_op_ldl_raw_T1_A0,
791 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
792
793#ifndef CONFIG_USER_ONLY
794 gen_op_ldub_kernel_T1_A0,
795 gen_op_lduw_kernel_T1_A0,
796 gen_op_ldl_kernel_T1_A0,
797 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
798
799 gen_op_ldub_user_T1_A0,
800 gen_op_lduw_user_T1_A0,
801 gen_op_ldl_user_T1_A0,
802 X86_64_ONLY(gen_op_ldq_user_T1_A0),
803#endif
804};
805
806static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
807 gen_op_stb_raw_T0_A0,
808 gen_op_stw_raw_T0_A0,
809 gen_op_stl_raw_T0_A0,
810 X86_64_ONLY(gen_op_stq_raw_T0_A0),
811
812#ifndef CONFIG_USER_ONLY
813 gen_op_stb_kernel_T0_A0,
814 gen_op_stw_kernel_T0_A0,
815 gen_op_stl_kernel_T0_A0,
816 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
817
818 gen_op_stb_user_T0_A0,
819 gen_op_stw_user_T0_A0,
820 gen_op_stl_user_T0_A0,
821 X86_64_ONLY(gen_op_stq_user_T0_A0),
822#endif
823};
824
825static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
826 NULL,
827 gen_op_stw_raw_T1_A0,
828 gen_op_stl_raw_T1_A0,
829 X86_64_ONLY(gen_op_stq_raw_T1_A0),
830
831#ifndef CONFIG_USER_ONLY
832 NULL,
833 gen_op_stw_kernel_T1_A0,
834 gen_op_stl_kernel_T1_A0,
835 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
836
837 NULL,
838 gen_op_stw_user_T1_A0,
839 gen_op_stl_user_T1_A0,
840 X86_64_ONLY(gen_op_stq_user_T1_A0),
841#endif
842};
843
844#ifdef VBOX
845static void gen_check_external_event()
846{
847 gen_op_check_external_event();
848}
849
850static inline void gen_update_eip(target_ulong pc)
851{
852#ifdef TARGET_X86_64
853 if (pc == (uint32_t)pc) {
854 gen_op_movl_eip_im(pc);
855 } else if (pc == (int32_t)pc) {
856 gen_op_movq_eip_im(pc);
857 } else {
858 gen_op_movq_eip_im64(pc >> 32, pc);
859 }
860#else
861 gen_op_movl_eip_im(pc);
862#endif
863}
864
865#endif /* VBOX */
866
867static inline void gen_jmp_im(target_ulong pc)
868{
869#ifdef VBOX
870 gen_check_external_event();
871#endif /* VBOX */
872#ifdef TARGET_X86_64
873 if (pc == (uint32_t)pc) {
874 gen_op_movl_eip_im(pc);
875 } else if (pc == (int32_t)pc) {
876 gen_op_movq_eip_im(pc);
877 } else {
878 gen_op_movq_eip_im64(pc >> 32, pc);
879 }
880#else
881 gen_op_movl_eip_im(pc);
882#endif
883}
884
885static inline void gen_string_movl_A0_ESI(DisasContext *s)
886{
887 int override;
888
889 override = s->override;
890#ifdef TARGET_X86_64
891 if (s->aflag == 2) {
892 if (override >= 0) {
893 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
894 gen_op_addq_A0_reg_sN[0][R_ESI]();
895 } else {
896 gen_op_movq_A0_reg[R_ESI]();
897 }
898 } else
899#endif
900 if (s->aflag) {
901 /* 32 bit address */
902 if (s->addseg && override < 0)
903 override = R_DS;
904 if (override >= 0) {
905 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
906 gen_op_addl_A0_reg_sN[0][R_ESI]();
907 } else {
908 gen_op_movl_A0_reg[R_ESI]();
909 }
910 } else {
911 /* 16 address, always override */
912 if (override < 0)
913 override = R_DS;
914 gen_op_movl_A0_reg[R_ESI]();
915 gen_op_andl_A0_ffff();
916 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
917 }
918}
919
920static inline void gen_string_movl_A0_EDI(DisasContext *s)
921{
922#ifdef TARGET_X86_64
923 if (s->aflag == 2) {
924 gen_op_movq_A0_reg[R_EDI]();
925 } else
926#endif
927 if (s->aflag) {
928 if (s->addseg) {
929 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
930 gen_op_addl_A0_reg_sN[0][R_EDI]();
931 } else {
932 gen_op_movl_A0_reg[R_EDI]();
933 }
934 } else {
935 gen_op_movl_A0_reg[R_EDI]();
936 gen_op_andl_A0_ffff();
937 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
938 }
939}
940
941static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
942 gen_op_movl_T0_Dshiftb,
943 gen_op_movl_T0_Dshiftw,
944 gen_op_movl_T0_Dshiftl,
945 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
946};
947
948static GenOpFunc1 *gen_op_jnz_ecx[3] = {
949 gen_op_jnz_ecxw,
950 gen_op_jnz_ecxl,
951 X86_64_ONLY(gen_op_jnz_ecxq),
952};
953
954static GenOpFunc1 *gen_op_jz_ecx[3] = {
955 gen_op_jz_ecxw,
956 gen_op_jz_ecxl,
957 X86_64_ONLY(gen_op_jz_ecxq),
958};
959
960static GenOpFunc *gen_op_dec_ECX[3] = {
961 gen_op_decw_ECX,
962 gen_op_decl_ECX,
963 X86_64_ONLY(gen_op_decq_ECX),
964};
965
966static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
967 {
968 gen_op_jnz_subb,
969 gen_op_jnz_subw,
970 gen_op_jnz_subl,
971 X86_64_ONLY(gen_op_jnz_subq),
972 },
973 {
974 gen_op_jz_subb,
975 gen_op_jz_subw,
976 gen_op_jz_subl,
977 X86_64_ONLY(gen_op_jz_subq),
978 },
979};
980
981static GenOpFunc *gen_op_in_DX_T0[3] = {
982 gen_op_inb_DX_T0,
983 gen_op_inw_DX_T0,
984 gen_op_inl_DX_T0,
985};
986
987static GenOpFunc *gen_op_out_DX_T0[3] = {
988 gen_op_outb_DX_T0,
989 gen_op_outw_DX_T0,
990 gen_op_outl_DX_T0,
991};
992
993static GenOpFunc *gen_op_in[3] = {
994 gen_op_inb_T0_T1,
995 gen_op_inw_T0_T1,
996 gen_op_inl_T0_T1,
997};
998
999static GenOpFunc *gen_op_out[3] = {
1000 gen_op_outb_T0_T1,
1001 gen_op_outw_T0_T1,
1002 gen_op_outl_T0_T1,
1003};
1004
1005static GenOpFunc *gen_check_io_T0[3] = {
1006 gen_op_check_iob_T0,
1007 gen_op_check_iow_T0,
1008 gen_op_check_iol_T0,
1009};
1010
1011static GenOpFunc *gen_check_io_DX[3] = {
1012 gen_op_check_iob_DX,
1013 gen_op_check_iow_DX,
1014 gen_op_check_iol_DX,
1015};
1016
1017static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
1018{
1019 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1020 if (s->cc_op != CC_OP_DYNAMIC)
1021 gen_op_set_cc_op(s->cc_op);
1022 gen_jmp_im(cur_eip);
1023 if (use_dx)
1024 gen_check_io_DX[ot]();
1025 else
1026 gen_check_io_T0[ot]();
1027 }
1028}
1029
1030static inline void gen_movs(DisasContext *s, int ot)
1031{
1032 gen_string_movl_A0_ESI(s);
1033 gen_op_ld_T0_A0[ot + s->mem_index]();
1034 gen_string_movl_A0_EDI(s);
1035 gen_op_st_T0_A0[ot + s->mem_index]();
1036 gen_op_movl_T0_Dshift[ot]();
1037#ifdef TARGET_X86_64
1038 if (s->aflag == 2) {
1039 gen_op_addq_ESI_T0();
1040 gen_op_addq_EDI_T0();
1041 } else
1042#endif
1043 if (s->aflag) {
1044 gen_op_addl_ESI_T0();
1045 gen_op_addl_EDI_T0();
1046 } else {
1047 gen_op_addw_ESI_T0();
1048 gen_op_addw_EDI_T0();
1049 }
1050}
1051
1052static inline void gen_update_cc_op(DisasContext *s)
1053{
1054 if (s->cc_op != CC_OP_DYNAMIC) {
1055 gen_op_set_cc_op(s->cc_op);
1056 s->cc_op = CC_OP_DYNAMIC;
1057 }
1058}
1059
1060/* XXX: does not work with gdbstub "ice" single step - not a
1061 serious problem */
1062static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1063{
1064 int l1, l2;
1065
1066 l1 = gen_new_label();
1067 l2 = gen_new_label();
1068 gen_op_jnz_ecx[s->aflag](l1);
1069 gen_set_label(l2);
1070 gen_jmp_tb(s, next_eip, 1);
1071 gen_set_label(l1);
1072 return l2;
1073}
1074
1075static inline void gen_stos(DisasContext *s, int ot)
1076{
1077 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1078 gen_string_movl_A0_EDI(s);
1079 gen_op_st_T0_A0[ot + s->mem_index]();
1080 gen_op_movl_T0_Dshift[ot]();
1081#ifdef TARGET_X86_64
1082 if (s->aflag == 2) {
1083 gen_op_addq_EDI_T0();
1084 } else
1085#endif
1086 if (s->aflag) {
1087 gen_op_addl_EDI_T0();
1088 } else {
1089 gen_op_addw_EDI_T0();
1090 }
1091}
1092
1093static inline void gen_lods(DisasContext *s, int ot)
1094{
1095 gen_string_movl_A0_ESI(s);
1096 gen_op_ld_T0_A0[ot + s->mem_index]();
1097 gen_op_mov_reg_T0[ot][R_EAX]();
1098 gen_op_movl_T0_Dshift[ot]();
1099#ifdef TARGET_X86_64
1100 if (s->aflag == 2) {
1101 gen_op_addq_ESI_T0();
1102 } else
1103#endif
1104 if (s->aflag) {
1105 gen_op_addl_ESI_T0();
1106 } else {
1107 gen_op_addw_ESI_T0();
1108 }
1109}
1110
1111static inline void gen_scas(DisasContext *s, int ot)
1112{
1113 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1114 gen_string_movl_A0_EDI(s);
1115 gen_op_ld_T1_A0[ot + s->mem_index]();
1116 gen_op_cmpl_T0_T1_cc();
1117 gen_op_movl_T0_Dshift[ot]();
1118#ifdef TARGET_X86_64
1119 if (s->aflag == 2) {
1120 gen_op_addq_EDI_T0();
1121 } else
1122#endif
1123 if (s->aflag) {
1124 gen_op_addl_EDI_T0();
1125 } else {
1126 gen_op_addw_EDI_T0();
1127 }
1128}
1129
1130static inline void gen_cmps(DisasContext *s, int ot)
1131{
1132 gen_string_movl_A0_ESI(s);
1133 gen_op_ld_T0_A0[ot + s->mem_index]();
1134 gen_string_movl_A0_EDI(s);
1135 gen_op_ld_T1_A0[ot + s->mem_index]();
1136 gen_op_cmpl_T0_T1_cc();
1137 gen_op_movl_T0_Dshift[ot]();
1138#ifdef TARGET_X86_64
1139 if (s->aflag == 2) {
1140 gen_op_addq_ESI_T0();
1141 gen_op_addq_EDI_T0();
1142 } else
1143#endif
1144 if (s->aflag) {
1145 gen_op_addl_ESI_T0();
1146 gen_op_addl_EDI_T0();
1147 } else {
1148 gen_op_addw_ESI_T0();
1149 gen_op_addw_EDI_T0();
1150 }
1151}
1152
1153static inline void gen_ins(DisasContext *s, int ot)
1154{
1155 gen_string_movl_A0_EDI(s);
1156 gen_op_movl_T0_0();
1157 gen_op_st_T0_A0[ot + s->mem_index]();
1158 gen_op_in_DX_T0[ot]();
1159 gen_op_st_T0_A0[ot + s->mem_index]();
1160 gen_op_movl_T0_Dshift[ot]();
1161#ifdef TARGET_X86_64
1162 if (s->aflag == 2) {
1163 gen_op_addq_EDI_T0();
1164 } else
1165#endif
1166 if (s->aflag) {
1167 gen_op_addl_EDI_T0();
1168 } else {
1169 gen_op_addw_EDI_T0();
1170 }
1171}
1172
1173static inline void gen_outs(DisasContext *s, int ot)
1174{
1175 gen_string_movl_A0_ESI(s);
1176 gen_op_ld_T0_A0[ot + s->mem_index]();
1177 gen_op_out_DX_T0[ot]();
1178 gen_op_movl_T0_Dshift[ot]();
1179#ifdef TARGET_X86_64
1180 if (s->aflag == 2) {
1181 gen_op_addq_ESI_T0();
1182 } else
1183#endif
1184 if (s->aflag) {
1185 gen_op_addl_ESI_T0();
1186 } else {
1187 gen_op_addw_ESI_T0();
1188 }
1189}
1190
1191/* same method as Valgrind : we generate jumps to current or next
1192 instruction */
1193#define GEN_REPZ(op) \
1194static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1195 target_ulong cur_eip, target_ulong next_eip) \
1196{ \
1197 int l2;\
1198 gen_update_cc_op(s); \
1199 l2 = gen_jz_ecx_string(s, next_eip); \
1200 gen_ ## op(s, ot); \
1201 gen_op_dec_ECX[s->aflag](); \
1202 /* a loop would cause two single step exceptions if ECX = 1 \
1203 before rep string_insn */ \
1204 if (!s->jmp_opt) \
1205 gen_op_jz_ecx[s->aflag](l2); \
1206 gen_jmp(s, cur_eip); \
1207}
1208
1209#define GEN_REPZ2(op) \
1210static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1211 target_ulong cur_eip, \
1212 target_ulong next_eip, \
1213 int nz) \
1214{ \
1215 int l2;\
1216 gen_update_cc_op(s); \
1217 l2 = gen_jz_ecx_string(s, next_eip); \
1218 gen_ ## op(s, ot); \
1219 gen_op_dec_ECX[s->aflag](); \
1220 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1221 gen_op_string_jnz_sub[nz][ot](l2);\
1222 if (!s->jmp_opt) \
1223 gen_op_jz_ecx[s->aflag](l2); \
1224 gen_jmp(s, cur_eip); \
1225}
1226
1227GEN_REPZ(movs)
1228GEN_REPZ(stos)
1229GEN_REPZ(lods)
1230GEN_REPZ(ins)
1231GEN_REPZ(outs)
1232GEN_REPZ2(scas)
1233GEN_REPZ2(cmps)
1234
1235enum {
1236 JCC_O,
1237 JCC_B,
1238 JCC_Z,
1239 JCC_BE,
1240 JCC_S,
1241 JCC_P,
1242 JCC_L,
1243 JCC_LE,
1244};
1245
1246static GenOpFunc1 *gen_jcc_sub[4][8] = {
1247 [OT_BYTE] = {
1248 NULL,
1249 gen_op_jb_subb,
1250 gen_op_jz_subb,
1251 gen_op_jbe_subb,
1252 gen_op_js_subb,
1253 NULL,
1254 gen_op_jl_subb,
1255 gen_op_jle_subb,
1256 },
1257 [OT_WORD] = {
1258 NULL,
1259 gen_op_jb_subw,
1260 gen_op_jz_subw,
1261 gen_op_jbe_subw,
1262 gen_op_js_subw,
1263 NULL,
1264 gen_op_jl_subw,
1265 gen_op_jle_subw,
1266 },
1267 [OT_LONG] = {
1268 NULL,
1269 gen_op_jb_subl,
1270 gen_op_jz_subl,
1271 gen_op_jbe_subl,
1272 gen_op_js_subl,
1273 NULL,
1274 gen_op_jl_subl,
1275 gen_op_jle_subl,
1276 },
1277#ifdef TARGET_X86_64
1278 [OT_QUAD] = {
1279 NULL,
1280 BUGGY_64(gen_op_jb_subq),
1281 gen_op_jz_subq,
1282 BUGGY_64(gen_op_jbe_subq),
1283 gen_op_js_subq,
1284 NULL,
1285 BUGGY_64(gen_op_jl_subq),
1286 BUGGY_64(gen_op_jle_subq),
1287 },
1288#endif
1289};
1290static GenOpFunc1 *gen_op_loop[3][4] = {
1291 [0] = {
1292 gen_op_loopnzw,
1293 gen_op_loopzw,
1294 gen_op_jnz_ecxw,
1295 },
1296 [1] = {
1297 gen_op_loopnzl,
1298 gen_op_loopzl,
1299 gen_op_jnz_ecxl,
1300 },
1301#ifdef TARGET_X86_64
1302 [2] = {
1303 gen_op_loopnzq,
1304 gen_op_loopzq,
1305 gen_op_jnz_ecxq,
1306 },
1307#endif
1308};
1309
1310static GenOpFunc *gen_setcc_slow[8] = {
1311 gen_op_seto_T0_cc,
1312 gen_op_setb_T0_cc,
1313 gen_op_setz_T0_cc,
1314 gen_op_setbe_T0_cc,
1315 gen_op_sets_T0_cc,
1316 gen_op_setp_T0_cc,
1317 gen_op_setl_T0_cc,
1318 gen_op_setle_T0_cc,
1319};
1320
1321static GenOpFunc *gen_setcc_sub[4][8] = {
1322 [OT_BYTE] = {
1323 NULL,
1324 gen_op_setb_T0_subb,
1325 gen_op_setz_T0_subb,
1326 gen_op_setbe_T0_subb,
1327 gen_op_sets_T0_subb,
1328 NULL,
1329 gen_op_setl_T0_subb,
1330 gen_op_setle_T0_subb,
1331 },
1332 [OT_WORD] = {
1333 NULL,
1334 gen_op_setb_T0_subw,
1335 gen_op_setz_T0_subw,
1336 gen_op_setbe_T0_subw,
1337 gen_op_sets_T0_subw,
1338 NULL,
1339 gen_op_setl_T0_subw,
1340 gen_op_setle_T0_subw,
1341 },
1342 [OT_LONG] = {
1343 NULL,
1344 gen_op_setb_T0_subl,
1345 gen_op_setz_T0_subl,
1346 gen_op_setbe_T0_subl,
1347 gen_op_sets_T0_subl,
1348 NULL,
1349 gen_op_setl_T0_subl,
1350 gen_op_setle_T0_subl,
1351 },
1352#ifdef TARGET_X86_64
1353 [OT_QUAD] = {
1354 NULL,
1355 gen_op_setb_T0_subq,
1356 gen_op_setz_T0_subq,
1357 gen_op_setbe_T0_subq,
1358 gen_op_sets_T0_subq,
1359 NULL,
1360 gen_op_setl_T0_subq,
1361 gen_op_setle_T0_subq,
1362 },
1363#endif
1364};
1365
1366static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1367 gen_op_fadd_ST0_FT0,
1368 gen_op_fmul_ST0_FT0,
1369 gen_op_fcom_ST0_FT0,
1370 gen_op_fcom_ST0_FT0,
1371 gen_op_fsub_ST0_FT0,
1372 gen_op_fsubr_ST0_FT0,
1373 gen_op_fdiv_ST0_FT0,
1374 gen_op_fdivr_ST0_FT0,
1375};
1376
1377/* NOTE the exception in "r" op ordering */
1378static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1379 gen_op_fadd_STN_ST0,
1380 gen_op_fmul_STN_ST0,
1381 NULL,
1382 NULL,
1383 gen_op_fsubr_STN_ST0,
1384 gen_op_fsub_STN_ST0,
1385 gen_op_fdivr_STN_ST0,
1386 gen_op_fdiv_STN_ST0,
1387};
1388
1389/* if d == OR_TMP0, it means memory operand (address in A0) */
1390static void gen_op(DisasContext *s1, int op, int ot, int d)
1391{
1392 GenOpFunc *gen_update_cc;
1393
1394 if (d != OR_TMP0) {
1395 gen_op_mov_TN_reg[ot][0][d]();
1396 } else {
1397 gen_op_ld_T0_A0[ot + s1->mem_index]();
1398 }
1399 switch(op) {
1400 case OP_ADCL:
1401 case OP_SBBL:
1402 if (s1->cc_op != CC_OP_DYNAMIC)
1403 gen_op_set_cc_op(s1->cc_op);
1404 if (d != OR_TMP0) {
1405 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1406 gen_op_mov_reg_T0[ot][d]();
1407 } else {
1408 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1409 }
1410 s1->cc_op = CC_OP_DYNAMIC;
1411 goto the_end;
1412 case OP_ADDL:
1413 gen_op_addl_T0_T1();
1414 s1->cc_op = CC_OP_ADDB + ot;
1415 gen_update_cc = gen_op_update2_cc;
1416 break;
1417 case OP_SUBL:
1418 gen_op_subl_T0_T1();
1419 s1->cc_op = CC_OP_SUBB + ot;
1420 gen_update_cc = gen_op_update2_cc;
1421 break;
1422 default:
1423 case OP_ANDL:
1424 case OP_ORL:
1425 case OP_XORL:
1426 gen_op_arith_T0_T1_cc[op]();
1427 s1->cc_op = CC_OP_LOGICB + ot;
1428 gen_update_cc = gen_op_update1_cc;
1429 break;
1430 case OP_CMPL:
1431 gen_op_cmpl_T0_T1_cc();
1432 s1->cc_op = CC_OP_SUBB + ot;
1433 gen_update_cc = NULL;
1434 break;
1435 }
1436 if (op != OP_CMPL) {
1437 if (d != OR_TMP0)
1438 gen_op_mov_reg_T0[ot][d]();
1439 else
1440 gen_op_st_T0_A0[ot + s1->mem_index]();
1441 }
1442 /* the flags update must happen after the memory write (precise
1443 exception support) */
1444 if (gen_update_cc)
1445 gen_update_cc();
1446 the_end: ;
1447}
1448
1449/* if d == OR_TMP0, it means memory operand (address in A0) */
1450static void gen_inc(DisasContext *s1, int ot, int d, int c)
1451{
1452 if (d != OR_TMP0)
1453 gen_op_mov_TN_reg[ot][0][d]();
1454 else
1455 gen_op_ld_T0_A0[ot + s1->mem_index]();
1456 if (s1->cc_op != CC_OP_DYNAMIC)
1457 gen_op_set_cc_op(s1->cc_op);
1458 if (c > 0) {
1459 gen_op_incl_T0();
1460 s1->cc_op = CC_OP_INCB + ot;
1461 } else {
1462 gen_op_decl_T0();
1463 s1->cc_op = CC_OP_DECB + ot;
1464 }
1465 if (d != OR_TMP0)
1466 gen_op_mov_reg_T0[ot][d]();
1467 else
1468 gen_op_st_T0_A0[ot + s1->mem_index]();
1469 gen_op_update_inc_cc();
1470}
1471
1472static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1473{
1474 if (d != OR_TMP0)
1475 gen_op_mov_TN_reg[ot][0][d]();
1476 else
1477 gen_op_ld_T0_A0[ot + s1->mem_index]();
1478 if (s != OR_TMP1)
1479 gen_op_mov_TN_reg[ot][1][s]();
1480 /* for zero counts, flags are not updated, so must do it dynamically */
1481 if (s1->cc_op != CC_OP_DYNAMIC)
1482 gen_op_set_cc_op(s1->cc_op);
1483
1484 if (d != OR_TMP0)
1485 gen_op_shift_T0_T1_cc[ot][op]();
1486 else
1487 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1488 if (d != OR_TMP0)
1489 gen_op_mov_reg_T0[ot][d]();
1490 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1491}
1492
1493static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1494{
1495 /* currently not optimized */
1496 gen_op_movl_T1_im(c);
1497 gen_shift(s1, op, ot, d, OR_TMP1);
1498}
1499
1500static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1501{
1502 target_long disp;
1503 int havesib;
1504 int base;
1505 int index;
1506 int scale;
1507 int opreg;
1508 int mod, rm, code, override, must_add_seg;
1509
1510 override = s->override;
1511 must_add_seg = s->addseg;
1512 if (override >= 0)
1513 must_add_seg = 1;
1514 mod = (modrm >> 6) & 3;
1515 rm = modrm & 7;
1516
1517 if (s->aflag) {
1518
1519 havesib = 0;
1520 base = rm;
1521 index = 0;
1522 scale = 0;
1523
1524 if (base == 4) {
1525 havesib = 1;
1526 code = ldub_code(s->pc++);
1527 scale = (code >> 6) & 3;
1528 index = ((code >> 3) & 7) | REX_X(s);
1529 base = (code & 7);
1530 }
1531 base |= REX_B(s);
1532
1533 switch (mod) {
1534 case 0:
1535 if ((base & 7) == 5) {
1536 base = -1;
1537 disp = (int32_t)ldl_code(s->pc);
1538 s->pc += 4;
1539 if (CODE64(s) && !havesib) {
1540 disp += s->pc + s->rip_offset;
1541 }
1542 } else {
1543 disp = 0;
1544 }
1545 break;
1546 case 1:
1547 disp = (int8_t)ldub_code(s->pc++);
1548 break;
1549 default:
1550 case 2:
1551 disp = ldl_code(s->pc);
1552 s->pc += 4;
1553 break;
1554 }
1555
1556 if (base >= 0) {
1557 /* for correct popl handling with esp */
1558 if (base == 4 && s->popl_esp_hack)
1559 disp += s->popl_esp_hack;
1560#ifdef TARGET_X86_64
1561 if (s->aflag == 2) {
1562 gen_op_movq_A0_reg[base]();
1563 if (disp != 0) {
1564 gen_op_addq_A0_im(disp);
1565 }
1566 } else
1567#endif
1568 {
1569 gen_op_movl_A0_reg[base]();
1570 if (disp != 0)
1571 gen_op_addl_A0_im(disp);
1572 }
1573 } else {
1574#ifdef TARGET_X86_64
1575 if (s->aflag == 2) {
1576 gen_op_movq_A0_im(disp);
1577 } else
1578#endif
1579 {
1580 gen_op_movl_A0_im(disp);
1581 }
1582 }
1583 /* XXX: index == 4 is always invalid */
1584 if (havesib && (index != 4 || scale != 0)) {
1585#ifdef TARGET_X86_64
1586 if (s->aflag == 2) {
1587 gen_op_addq_A0_reg_sN[scale][index]();
1588 } else
1589#endif
1590 {
1591 gen_op_addl_A0_reg_sN[scale][index]();
1592 }
1593 }
1594 if (must_add_seg) {
1595 if (override < 0) {
1596 if (base == R_EBP || base == R_ESP)
1597 override = R_SS;
1598 else
1599 override = R_DS;
1600 }
1601#ifdef TARGET_X86_64
1602 if (s->aflag == 2) {
1603 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1604 } else
1605#endif
1606 {
1607 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1608 }
1609 }
1610 } else {
1611 switch (mod) {
1612 case 0:
1613 if (rm == 6) {
1614 disp = lduw_code(s->pc);
1615 s->pc += 2;
1616 gen_op_movl_A0_im(disp);
1617 rm = 0; /* avoid SS override */
1618 goto no_rm;
1619 } else {
1620 disp = 0;
1621 }
1622 break;
1623 case 1:
1624 disp = (int8_t)ldub_code(s->pc++);
1625 break;
1626 default:
1627 case 2:
1628 disp = lduw_code(s->pc);
1629 s->pc += 2;
1630 break;
1631 }
1632 switch(rm) {
1633 case 0:
1634 gen_op_movl_A0_reg[R_EBX]();
1635 gen_op_addl_A0_reg_sN[0][R_ESI]();
1636 break;
1637 case 1:
1638 gen_op_movl_A0_reg[R_EBX]();
1639 gen_op_addl_A0_reg_sN[0][R_EDI]();
1640 break;
1641 case 2:
1642 gen_op_movl_A0_reg[R_EBP]();
1643 gen_op_addl_A0_reg_sN[0][R_ESI]();
1644 break;
1645 case 3:
1646 gen_op_movl_A0_reg[R_EBP]();
1647 gen_op_addl_A0_reg_sN[0][R_EDI]();
1648 break;
1649 case 4:
1650 gen_op_movl_A0_reg[R_ESI]();
1651 break;
1652 case 5:
1653 gen_op_movl_A0_reg[R_EDI]();
1654 break;
1655 case 6:
1656 gen_op_movl_A0_reg[R_EBP]();
1657 break;
1658 default:
1659 case 7:
1660 gen_op_movl_A0_reg[R_EBX]();
1661 break;
1662 }
1663 if (disp != 0)
1664 gen_op_addl_A0_im(disp);
1665 gen_op_andl_A0_ffff();
1666 no_rm:
1667 if (must_add_seg) {
1668 if (override < 0) {
1669 if (rm == 2 || rm == 3 || rm == 6)
1670 override = R_SS;
1671 else
1672 override = R_DS;
1673 }
1674 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1675 }
1676 }
1677
1678 opreg = OR_A0;
1679 disp = 0;
1680 *reg_ptr = opreg;
1681 *offset_ptr = disp;
1682}
1683
1684static void gen_nop_modrm(DisasContext *s, int modrm)
1685{
1686 int mod, rm, base, code;
1687
1688 mod = (modrm >> 6) & 3;
1689 if (mod == 3)
1690 return;
1691 rm = modrm & 7;
1692
1693 if (s->aflag) {
1694
1695 base = rm;
1696
1697 if (base == 4) {
1698 code = ldub_code(s->pc++);
1699 base = (code & 7);
1700 }
1701
1702 switch (mod) {
1703 case 0:
1704 if (base == 5) {
1705 s->pc += 4;
1706 }
1707 break;
1708 case 1:
1709 s->pc++;
1710 break;
1711 default:
1712 case 2:
1713 s->pc += 4;
1714 break;
1715 }
1716 } else {
1717 switch (mod) {
1718 case 0:
1719 if (rm == 6) {
1720 s->pc += 2;
1721 }
1722 break;
1723 case 1:
1724 s->pc++;
1725 break;
1726 default:
1727 case 2:
1728 s->pc += 2;
1729 break;
1730 }
1731 }
1732}
1733
1734/* used for LEA and MOV AX, mem */
1735static void gen_add_A0_ds_seg(DisasContext *s)
1736{
1737 int override, must_add_seg;
1738 must_add_seg = s->addseg;
1739 override = R_DS;
1740 if (s->override >= 0) {
1741 override = s->override;
1742 must_add_seg = 1;
1743 } else {
1744 override = R_DS;
1745 }
1746 if (must_add_seg) {
1747#ifdef TARGET_X86_64
1748 if (CODE64(s)) {
1749 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1750 } else
1751#endif
1752 {
1753 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1754 }
1755 }
1756}
1757
1758/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1759 OR_TMP0 */
1760static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1761{
1762 int mod, rm, opreg, disp;
1763
1764 mod = (modrm >> 6) & 3;
1765 rm = (modrm & 7) | REX_B(s);
1766 if (mod == 3) {
1767 if (is_store) {
1768 if (reg != OR_TMP0)
1769 gen_op_mov_TN_reg[ot][0][reg]();
1770 gen_op_mov_reg_T0[ot][rm]();
1771 } else {
1772 gen_op_mov_TN_reg[ot][0][rm]();
1773 if (reg != OR_TMP0)
1774 gen_op_mov_reg_T0[ot][reg]();
1775 }
1776 } else {
1777 gen_lea_modrm(s, modrm, &opreg, &disp);
1778 if (is_store) {
1779 if (reg != OR_TMP0)
1780 gen_op_mov_TN_reg[ot][0][reg]();
1781 gen_op_st_T0_A0[ot + s->mem_index]();
1782 } else {
1783 gen_op_ld_T0_A0[ot + s->mem_index]();
1784 if (reg != OR_TMP0)
1785 gen_op_mov_reg_T0[ot][reg]();
1786 }
1787 }
1788}
1789
1790static inline uint32_t insn_get(DisasContext *s, int ot)
1791{
1792 uint32_t ret;
1793
1794 switch(ot) {
1795 case OT_BYTE:
1796 ret = ldub_code(s->pc);
1797 s->pc++;
1798 break;
1799 case OT_WORD:
1800 ret = lduw_code(s->pc);
1801 s->pc += 2;
1802 break;
1803 default:
1804 case OT_LONG:
1805 ret = ldl_code(s->pc);
1806 s->pc += 4;
1807 break;
1808 }
1809 return ret;
1810}
1811
1812static inline int insn_const_size(unsigned int ot)
1813{
1814 if (ot <= OT_LONG)
1815 return 1 << ot;
1816 else
1817 return 4;
1818}
1819
1820static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1821{
1822 TranslationBlock *tb;
1823 target_ulong pc;
1824
1825 pc = s->cs_base + eip;
1826 tb = s->tb;
1827 /* NOTE: we handle the case where the TB spans two pages here */
1828 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1829 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1830 /* jump to same page: we can use a direct jump */
1831 if (tb_num == 0)
1832 gen_op_goto_tb0(TBPARAM(tb));
1833 else
1834 gen_op_goto_tb1(TBPARAM(tb));
1835 gen_jmp_im(eip);
1836 gen_op_movl_T0_im((long)tb + tb_num);
1837 gen_op_exit_tb();
1838 } else {
1839 /* jump to another page: currently not optimized */
1840 gen_jmp_im(eip);
1841 gen_eob(s);
1842 }
1843}
1844
1845static inline void gen_jcc(DisasContext *s, int b,
1846 target_ulong val, target_ulong next_eip)
1847{
1848 TranslationBlock *tb;
1849 int inv, jcc_op;
1850 GenOpFunc1 *func;
1851 target_ulong tmp;
1852 int l1, l2;
1853
1854 inv = b & 1;
1855 jcc_op = (b >> 1) & 7;
1856
1857 if (s->jmp_opt) {
1858#ifdef VBOX
1859 gen_check_external_event(s);
1860#endif /* VBOX */
1861 switch(s->cc_op) {
1862 /* we optimize the cmp/jcc case */
1863 case CC_OP_SUBB:
1864 case CC_OP_SUBW:
1865 case CC_OP_SUBL:
1866 case CC_OP_SUBQ:
1867 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1868 break;
1869
1870 /* some jumps are easy to compute */
1871 case CC_OP_ADDB:
1872 case CC_OP_ADDW:
1873 case CC_OP_ADDL:
1874 case CC_OP_ADDQ:
1875
1876 case CC_OP_ADCB:
1877 case CC_OP_ADCW:
1878 case CC_OP_ADCL:
1879 case CC_OP_ADCQ:
1880
1881 case CC_OP_SBBB:
1882 case CC_OP_SBBW:
1883 case CC_OP_SBBL:
1884 case CC_OP_SBBQ:
1885
1886 case CC_OP_LOGICB:
1887 case CC_OP_LOGICW:
1888 case CC_OP_LOGICL:
1889 case CC_OP_LOGICQ:
1890
1891 case CC_OP_INCB:
1892 case CC_OP_INCW:
1893 case CC_OP_INCL:
1894 case CC_OP_INCQ:
1895
1896 case CC_OP_DECB:
1897 case CC_OP_DECW:
1898 case CC_OP_DECL:
1899 case CC_OP_DECQ:
1900
1901 case CC_OP_SHLB:
1902 case CC_OP_SHLW:
1903 case CC_OP_SHLL:
1904 case CC_OP_SHLQ:
1905
1906 case CC_OP_SARB:
1907 case CC_OP_SARW:
1908 case CC_OP_SARL:
1909 case CC_OP_SARQ:
1910 switch(jcc_op) {
1911 case JCC_Z:
1912 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1913 break;
1914 case JCC_S:
1915 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1916 break;
1917 default:
1918 func = NULL;
1919 break;
1920 }
1921 break;
1922 default:
1923 func = NULL;
1924 break;
1925 }
1926
1927 if (s->cc_op != CC_OP_DYNAMIC) {
1928 gen_op_set_cc_op(s->cc_op);
1929 s->cc_op = CC_OP_DYNAMIC;
1930 }
1931
1932 if (!func) {
1933 gen_setcc_slow[jcc_op]();
1934 func = gen_op_jnz_T0_label;
1935 }
1936
1937 if (inv) {
1938 tmp = val;
1939 val = next_eip;
1940 next_eip = tmp;
1941 }
1942 tb = s->tb;
1943
1944 l1 = gen_new_label();
1945 func(l1);
1946
1947 gen_goto_tb(s, 0, next_eip);
1948
1949 gen_set_label(l1);
1950 gen_goto_tb(s, 1, val);
1951
1952 s->is_jmp = 3;
1953 } else {
1954
1955 if (s->cc_op != CC_OP_DYNAMIC) {
1956 gen_op_set_cc_op(s->cc_op);
1957 s->cc_op = CC_OP_DYNAMIC;
1958 }
1959 gen_setcc_slow[jcc_op]();
1960 if (inv) {
1961 tmp = val;
1962 val = next_eip;
1963 next_eip = tmp;
1964 }
1965 l1 = gen_new_label();
1966 l2 = gen_new_label();
1967 gen_op_jnz_T0_label(l1);
1968 gen_jmp_im(next_eip);
1969 gen_op_jmp_label(l2);
1970 gen_set_label(l1);
1971 gen_jmp_im(val);
1972 gen_set_label(l2);
1973 gen_eob(s);
1974 }
1975}
1976
1977static void gen_setcc(DisasContext *s, int b)
1978{
1979 int inv, jcc_op;
1980 GenOpFunc *func;
1981
1982 inv = b & 1;
1983 jcc_op = (b >> 1) & 7;
1984 switch(s->cc_op) {
1985 /* we optimize the cmp/jcc case */
1986 case CC_OP_SUBB:
1987 case CC_OP_SUBW:
1988 case CC_OP_SUBL:
1989 case CC_OP_SUBQ:
1990 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1991 if (!func)
1992 goto slow_jcc;
1993 break;
1994
1995 /* some jumps are easy to compute */
1996 case CC_OP_ADDB:
1997 case CC_OP_ADDW:
1998 case CC_OP_ADDL:
1999 case CC_OP_ADDQ:
2000
2001 case CC_OP_LOGICB:
2002 case CC_OP_LOGICW:
2003 case CC_OP_LOGICL:
2004 case CC_OP_LOGICQ:
2005
2006 case CC_OP_INCB:
2007 case CC_OP_INCW:
2008 case CC_OP_INCL:
2009 case CC_OP_INCQ:
2010
2011 case CC_OP_DECB:
2012 case CC_OP_DECW:
2013 case CC_OP_DECL:
2014 case CC_OP_DECQ:
2015
2016 case CC_OP_SHLB:
2017 case CC_OP_SHLW:
2018 case CC_OP_SHLL:
2019 case CC_OP_SHLQ:
2020 switch(jcc_op) {
2021 case JCC_Z:
2022 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2023 break;
2024 case JCC_S:
2025 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2026 break;
2027 default:
2028 goto slow_jcc;
2029 }
2030 break;
2031 default:
2032 slow_jcc:
2033 if (s->cc_op != CC_OP_DYNAMIC)
2034 gen_op_set_cc_op(s->cc_op);
2035 func = gen_setcc_slow[jcc_op];
2036 break;
2037 }
2038 func();
2039 if (inv) {
2040 gen_op_xor_T0_1();
2041 }
2042}
2043
2044/* move T0 to seg_reg and compute if the CPU state may change. Never
2045 call this function with seg_reg == R_CS */
2046static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2047{
2048 if (s->pe && !s->vm86) {
2049 /* XXX: optimize by finding processor state dynamically */
2050 if (s->cc_op != CC_OP_DYNAMIC)
2051 gen_op_set_cc_op(s->cc_op);
2052 gen_jmp_im(cur_eip);
2053 gen_op_movl_seg_T0(seg_reg);
2054 /* abort translation because the addseg value may change or
2055 because ss32 may change. For R_SS, translation must always
2056 stop as a special handling must be done to disable hardware
2057 interrupts for the next instruction */
2058 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2059 s->is_jmp = 3;
2060 } else {
2061 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2062 if (seg_reg == R_SS)
2063 s->is_jmp = 3;
2064 }
2065}
2066
2067static inline void gen_stack_update(DisasContext *s, int addend)
2068{
2069#ifdef TARGET_X86_64
2070 if (CODE64(s)) {
2071 if (addend == 8)
2072 gen_op_addq_ESP_8();
2073 else
2074 gen_op_addq_ESP_im(addend);
2075 } else
2076#endif
2077 if (s->ss32) {
2078 if (addend == 2)
2079 gen_op_addl_ESP_2();
2080 else if (addend == 4)
2081 gen_op_addl_ESP_4();
2082 else
2083 gen_op_addl_ESP_im(addend);
2084 } else {
2085 if (addend == 2)
2086 gen_op_addw_ESP_2();
2087 else if (addend == 4)
2088 gen_op_addw_ESP_4();
2089 else
2090 gen_op_addw_ESP_im(addend);
2091 }
2092}
2093
2094/* generate a push. It depends on ss32, addseg and dflag */
2095static void gen_push_T0(DisasContext *s)
2096{
2097#ifdef TARGET_X86_64
2098 if (CODE64(s)) {
2099 gen_op_movq_A0_reg[R_ESP]();
2100 if (s->dflag) {
2101 gen_op_subq_A0_8();
2102 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2103 } else {
2104 gen_op_subq_A0_2();
2105 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2106 }
2107 gen_op_movq_ESP_A0();
2108 } else
2109#endif
2110 {
2111 gen_op_movl_A0_reg[R_ESP]();
2112 if (!s->dflag)
2113 gen_op_subl_A0_2();
2114 else
2115 gen_op_subl_A0_4();
2116 if (s->ss32) {
2117 if (s->addseg) {
2118 gen_op_movl_T1_A0();
2119 gen_op_addl_A0_SS();
2120 }
2121 } else {
2122 gen_op_andl_A0_ffff();
2123 gen_op_movl_T1_A0();
2124 gen_op_addl_A0_SS();
2125 }
2126 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2127 if (s->ss32 && !s->addseg)
2128 gen_op_movl_ESP_A0();
2129 else
2130 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2131 }
2132}
2133
2134/* generate a push. It depends on ss32, addseg and dflag */
2135/* slower version for T1, only used for call Ev */
2136static void gen_push_T1(DisasContext *s)
2137{
2138#ifdef TARGET_X86_64
2139 if (CODE64(s)) {
2140 gen_op_movq_A0_reg[R_ESP]();
2141 if (s->dflag) {
2142 gen_op_subq_A0_8();
2143 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2144 } else {
2145 gen_op_subq_A0_2();
2146 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2147 }
2148 gen_op_movq_ESP_A0();
2149 } else
2150#endif
2151 {
2152 gen_op_movl_A0_reg[R_ESP]();
2153 if (!s->dflag)
2154 gen_op_subl_A0_2();
2155 else
2156 gen_op_subl_A0_4();
2157 if (s->ss32) {
2158 if (s->addseg) {
2159 gen_op_addl_A0_SS();
2160 }
2161 } else {
2162 gen_op_andl_A0_ffff();
2163 gen_op_addl_A0_SS();
2164 }
2165 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2166
2167 if (s->ss32 && !s->addseg)
2168 gen_op_movl_ESP_A0();
2169 else
2170 gen_stack_update(s, (-2) << s->dflag);
2171 }
2172}
2173
2174/* two step pop is necessary for precise exceptions */
2175static void gen_pop_T0(DisasContext *s)
2176{
2177#ifdef TARGET_X86_64
2178 if (CODE64(s)) {
2179 gen_op_movq_A0_reg[R_ESP]();
2180 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2181 } else
2182#endif
2183 {
2184 gen_op_movl_A0_reg[R_ESP]();
2185 if (s->ss32) {
2186 if (s->addseg)
2187 gen_op_addl_A0_SS();
2188 } else {
2189 gen_op_andl_A0_ffff();
2190 gen_op_addl_A0_SS();
2191 }
2192 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2193 }
2194}
2195
2196static void gen_pop_update(DisasContext *s)
2197{
2198#ifdef TARGET_X86_64
2199 if (CODE64(s) && s->dflag) {
2200 gen_stack_update(s, 8);
2201 } else
2202#endif
2203 {
2204 gen_stack_update(s, 2 << s->dflag);
2205 }
2206}
2207
2208static void gen_stack_A0(DisasContext *s)
2209{
2210 gen_op_movl_A0_ESP();
2211 if (!s->ss32)
2212 gen_op_andl_A0_ffff();
2213 gen_op_movl_T1_A0();
2214 if (s->addseg)
2215 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2216}
2217
2218/* NOTE: wrap around in 16 bit not fully handled */
2219static void gen_pusha(DisasContext *s)
2220{
2221 int i;
2222 gen_op_movl_A0_ESP();
2223 gen_op_addl_A0_im(-16 << s->dflag);
2224 if (!s->ss32)
2225 gen_op_andl_A0_ffff();
2226 gen_op_movl_T1_A0();
2227 if (s->addseg)
2228 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2229 for(i = 0;i < 8; i++) {
2230 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2231 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2232 gen_op_addl_A0_im(2 << s->dflag);
2233 }
2234 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2235}
2236
2237/* NOTE: wrap around in 16 bit not fully handled */
2238static void gen_popa(DisasContext *s)
2239{
2240 int i;
2241 gen_op_movl_A0_ESP();
2242 if (!s->ss32)
2243 gen_op_andl_A0_ffff();
2244 gen_op_movl_T1_A0();
2245 gen_op_addl_T1_im(16 << s->dflag);
2246 if (s->addseg)
2247 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2248 for(i = 0;i < 8; i++) {
2249 /* ESP is not reloaded */
2250 if (i != 3) {
2251 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2252 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2253 }
2254 gen_op_addl_A0_im(2 << s->dflag);
2255 }
2256 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2257}
2258
2259static void gen_enter(DisasContext *s, int esp_addend, int level)
2260{
2261 int ot, opsize;
2262
2263 level &= 0x1f;
2264#ifdef TARGET_X86_64
2265 if (CODE64(s)) {
2266 ot = s->dflag ? OT_QUAD : OT_WORD;
2267 opsize = 1 << ot;
2268
2269 gen_op_movl_A0_ESP();
2270 gen_op_addq_A0_im(-opsize);
2271 gen_op_movl_T1_A0();
2272
2273 /* push bp */
2274 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2275 gen_op_st_T0_A0[ot + s->mem_index]();
2276 if (level) {
2277 gen_op_enter64_level(level, (ot == OT_QUAD));
2278 }
2279 gen_op_mov_reg_T1[ot][R_EBP]();
2280 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2281 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2282 } else
2283#endif
2284 {
2285 ot = s->dflag + OT_WORD;
2286 opsize = 2 << s->dflag;
2287
2288 gen_op_movl_A0_ESP();
2289 gen_op_addl_A0_im(-opsize);
2290 if (!s->ss32)
2291 gen_op_andl_A0_ffff();
2292 gen_op_movl_T1_A0();
2293 if (s->addseg)
2294 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2295 /* push bp */
2296 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2297 gen_op_st_T0_A0[ot + s->mem_index]();
2298 if (level) {
2299 gen_op_enter_level(level, s->dflag);
2300 }
2301 gen_op_mov_reg_T1[ot][R_EBP]();
2302 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2303 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2304 }
2305}
2306
2307static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2308{
2309 if (s->cc_op != CC_OP_DYNAMIC)
2310 gen_op_set_cc_op(s->cc_op);
2311 gen_jmp_im(cur_eip);
2312 gen_op_raise_exception(trapno);
2313 s->is_jmp = 3;
2314}
2315
2316/* an interrupt is different from an exception because of the
2317 priviledge checks */
2318static void gen_interrupt(DisasContext *s, int intno,
2319 target_ulong cur_eip, target_ulong next_eip)
2320{
2321 if (s->cc_op != CC_OP_DYNAMIC)
2322 gen_op_set_cc_op(s->cc_op);
2323 gen_jmp_im(cur_eip);
2324 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2325 s->is_jmp = 3;
2326}
2327
2328static void gen_debug(DisasContext *s, target_ulong cur_eip)
2329{
2330 if (s->cc_op != CC_OP_DYNAMIC)
2331 gen_op_set_cc_op(s->cc_op);
2332 gen_jmp_im(cur_eip);
2333 gen_op_debug();
2334 s->is_jmp = 3;
2335}
2336
2337/* generate a generic end of block. Trace exception is also generated
2338 if needed */
2339static void gen_eob(DisasContext *s)
2340{
2341 if (s->cc_op != CC_OP_DYNAMIC)
2342 gen_op_set_cc_op(s->cc_op);
2343 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2344 gen_op_reset_inhibit_irq();
2345 }
2346 if (s->singlestep_enabled) {
2347 gen_op_debug();
2348 } else if (s->tf) {
2349 gen_op_single_step();
2350 } else {
2351 gen_op_movl_T0_0();
2352 gen_op_exit_tb();
2353 }
2354 s->is_jmp = 3;
2355}
2356
2357/* generate a jump to eip. No segment change must happen before as a
2358 direct call to the next block may occur */
2359static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2360{
2361 if (s->jmp_opt) {
2362#ifdef VBOX
2363 gen_check_external_event(s);
2364#endif /* VBOX */
2365 if (s->cc_op != CC_OP_DYNAMIC) {
2366 gen_op_set_cc_op(s->cc_op);
2367 s->cc_op = CC_OP_DYNAMIC;
2368 }
2369 gen_goto_tb(s, tb_num, eip);
2370 s->is_jmp = 3;
2371 } else {
2372 gen_jmp_im(eip);
2373 gen_eob(s);
2374 }
2375}
2376
2377static void gen_jmp(DisasContext *s, target_ulong eip)
2378{
2379 gen_jmp_tb(s, eip, 0);
2380}
2381
2382static void gen_movtl_T0_im(target_ulong val)
2383{
2384#ifdef TARGET_X86_64
2385 if ((int32_t)val == val) {
2386 gen_op_movl_T0_im(val);
2387 } else {
2388 gen_op_movq_T0_im64(val >> 32, val);
2389 }
2390#else
2391 gen_op_movl_T0_im(val);
2392#endif
2393}
2394
2395static void gen_movtl_T1_im(target_ulong val)
2396{
2397#ifdef TARGET_X86_64
2398 if ((int32_t)val == val) {
2399 gen_op_movl_T1_im(val);
2400 } else {
2401 gen_op_movq_T1_im64(val >> 32, val);
2402 }
2403#else
2404 gen_op_movl_T1_im(val);
2405#endif
2406}
2407
2408static void gen_add_A0_im(DisasContext *s, int val)
2409{
2410#ifdef TARGET_X86_64
2411 if (CODE64(s))
2412 gen_op_addq_A0_im(val);
2413 else
2414#endif
2415 gen_op_addl_A0_im(val);
2416}
2417
2418static GenOpFunc1 *gen_ldq_env_A0[3] = {
2419 gen_op_ldq_raw_env_A0,
2420#ifndef CONFIG_USER_ONLY
2421 gen_op_ldq_kernel_env_A0,
2422 gen_op_ldq_user_env_A0,
2423#endif
2424};
2425
2426static GenOpFunc1 *gen_stq_env_A0[3] = {
2427 gen_op_stq_raw_env_A0,
2428#ifndef CONFIG_USER_ONLY
2429 gen_op_stq_kernel_env_A0,
2430 gen_op_stq_user_env_A0,
2431#endif
2432};
2433
2434static GenOpFunc1 *gen_ldo_env_A0[3] = {
2435 gen_op_ldo_raw_env_A0,
2436#ifndef CONFIG_USER_ONLY
2437 gen_op_ldo_kernel_env_A0,
2438 gen_op_ldo_user_env_A0,
2439#endif
2440};
2441
2442static GenOpFunc1 *gen_sto_env_A0[3] = {
2443 gen_op_sto_raw_env_A0,
2444#ifndef CONFIG_USER_ONLY
2445 gen_op_sto_kernel_env_A0,
2446 gen_op_sto_user_env_A0,
2447#endif
2448};
2449
2450#define SSE_SPECIAL ((GenOpFunc2 *)1)
2451
2452#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2453#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2454 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2455
2456static GenOpFunc2 *sse_op_table1[256][4] = {
2457 /* pure SSE operations */
2458 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2459 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2460 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2461 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2462 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2463 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2464 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2465 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2466
2467 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2468 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2469 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2470 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2471 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2472 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2473 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2474 [0x2f] = { gen_op_comiss, gen_op_comisd },
2475 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2476 [0x51] = SSE_FOP(sqrt),
2477 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2478 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2479 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2480 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2481 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2482 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2483 [0x58] = SSE_FOP(add),
2484 [0x59] = SSE_FOP(mul),
2485 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2486 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2487 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2488 [0x5c] = SSE_FOP(sub),
2489 [0x5d] = SSE_FOP(min),
2490 [0x5e] = SSE_FOP(div),
2491 [0x5f] = SSE_FOP(max),
2492
2493 [0xc2] = SSE_FOP(cmpeq),
2494 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2495
2496 /* MMX ops and their SSE extensions */
2497 [0x60] = MMX_OP2(punpcklbw),
2498 [0x61] = MMX_OP2(punpcklwd),
2499 [0x62] = MMX_OP2(punpckldq),
2500 [0x63] = MMX_OP2(packsswb),
2501 [0x64] = MMX_OP2(pcmpgtb),
2502 [0x65] = MMX_OP2(pcmpgtw),
2503 [0x66] = MMX_OP2(pcmpgtl),
2504 [0x67] = MMX_OP2(packuswb),
2505 [0x68] = MMX_OP2(punpckhbw),
2506 [0x69] = MMX_OP2(punpckhwd),
2507 [0x6a] = MMX_OP2(punpckhdq),
2508 [0x6b] = MMX_OP2(packssdw),
2509 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2510 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2511 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2512 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2513 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2514 (GenOpFunc2 *)gen_op_pshufd_xmm,
2515 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2516 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2517 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2518 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2519 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2520 [0x74] = MMX_OP2(pcmpeqb),
2521 [0x75] = MMX_OP2(pcmpeqw),
2522 [0x76] = MMX_OP2(pcmpeql),
2523 [0x77] = { SSE_SPECIAL }, /* emms */
2524 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2525 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2526 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2527 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2528 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2529 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2530 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2531 [0xd1] = MMX_OP2(psrlw),
2532 [0xd2] = MMX_OP2(psrld),
2533 [0xd3] = MMX_OP2(psrlq),
2534 [0xd4] = MMX_OP2(paddq),
2535 [0xd5] = MMX_OP2(pmullw),
2536 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2537 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2538 [0xd8] = MMX_OP2(psubusb),
2539 [0xd9] = MMX_OP2(psubusw),
2540 [0xda] = MMX_OP2(pminub),
2541 [0xdb] = MMX_OP2(pand),
2542 [0xdc] = MMX_OP2(paddusb),
2543 [0xdd] = MMX_OP2(paddusw),
2544 [0xde] = MMX_OP2(pmaxub),
2545 [0xdf] = MMX_OP2(pandn),
2546 [0xe0] = MMX_OP2(pavgb),
2547 [0xe1] = MMX_OP2(psraw),
2548 [0xe2] = MMX_OP2(psrad),
2549 [0xe3] = MMX_OP2(pavgw),
2550 [0xe4] = MMX_OP2(pmulhuw),
2551 [0xe5] = MMX_OP2(pmulhw),
2552 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2553 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2554 [0xe8] = MMX_OP2(psubsb),
2555 [0xe9] = MMX_OP2(psubsw),
2556 [0xea] = MMX_OP2(pminsw),
2557 [0xeb] = MMX_OP2(por),
2558 [0xec] = MMX_OP2(paddsb),
2559 [0xed] = MMX_OP2(paddsw),
2560 [0xee] = MMX_OP2(pmaxsw),
2561 [0xef] = MMX_OP2(pxor),
2562 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2563 [0xf1] = MMX_OP2(psllw),
2564 [0xf2] = MMX_OP2(pslld),
2565 [0xf3] = MMX_OP2(psllq),
2566 [0xf4] = MMX_OP2(pmuludq),
2567 [0xf5] = MMX_OP2(pmaddwd),
2568 [0xf6] = MMX_OP2(psadbw),
2569 [0xf7] = MMX_OP2(maskmov),
2570 [0xf8] = MMX_OP2(psubb),
2571 [0xf9] = MMX_OP2(psubw),
2572 [0xfa] = MMX_OP2(psubl),
2573 [0xfb] = MMX_OP2(psubq),
2574 [0xfc] = MMX_OP2(paddb),
2575 [0xfd] = MMX_OP2(paddw),
2576 [0xfe] = MMX_OP2(paddl),
2577};
2578
2579static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2580 [0 + 2] = MMX_OP2(psrlw),
2581 [0 + 4] = MMX_OP2(psraw),
2582 [0 + 6] = MMX_OP2(psllw),
2583 [8 + 2] = MMX_OP2(psrld),
2584 [8 + 4] = MMX_OP2(psrad),
2585 [8 + 6] = MMX_OP2(pslld),
2586 [16 + 2] = MMX_OP2(psrlq),
2587 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2588 [16 + 6] = MMX_OP2(psllq),
2589 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2590};
2591
2592static GenOpFunc1 *sse_op_table3[4 * 3] = {
2593 gen_op_cvtsi2ss,
2594 gen_op_cvtsi2sd,
2595 X86_64_ONLY(gen_op_cvtsq2ss),
2596 X86_64_ONLY(gen_op_cvtsq2sd),
2597
2598 gen_op_cvttss2si,
2599 gen_op_cvttsd2si,
2600 X86_64_ONLY(gen_op_cvttss2sq),
2601 X86_64_ONLY(gen_op_cvttsd2sq),
2602
2603 gen_op_cvtss2si,
2604 gen_op_cvtsd2si,
2605 X86_64_ONLY(gen_op_cvtss2sq),
2606 X86_64_ONLY(gen_op_cvtsd2sq),
2607};
2608
2609static GenOpFunc2 *sse_op_table4[8][4] = {
2610 SSE_FOP(cmpeq),
2611 SSE_FOP(cmplt),
2612 SSE_FOP(cmple),
2613 SSE_FOP(cmpunord),
2614 SSE_FOP(cmpneq),
2615 SSE_FOP(cmpnlt),
2616 SSE_FOP(cmpnle),
2617 SSE_FOP(cmpord),
2618};
2619
2620static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2621{
2622 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2623 int modrm, mod, rm, reg, reg_addr, offset_addr;
2624 GenOpFunc2 *sse_op2;
2625 GenOpFunc3 *sse_op3;
2626
2627 b &= 0xff;
2628 if (s->prefix & PREFIX_DATA)
2629 b1 = 1;
2630 else if (s->prefix & PREFIX_REPZ)
2631 b1 = 2;
2632 else if (s->prefix & PREFIX_REPNZ)
2633 b1 = 3;
2634 else
2635 b1 = 0;
2636 sse_op2 = sse_op_table1[b][b1];
2637 if (!sse_op2)
2638 goto illegal_op;
2639 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2640 is_xmm = 1;
2641 } else {
2642 if (b1 == 0) {
2643 /* MMX case */
2644 is_xmm = 0;
2645 } else {
2646 is_xmm = 1;
2647 }
2648 }
2649 /* simple MMX/SSE operation */
2650 if (s->flags & HF_TS_MASK) {
2651 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2652 return;
2653 }
2654 if (s->flags & HF_EM_MASK) {
2655 illegal_op:
2656 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2657 return;
2658 }
2659 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2660 goto illegal_op;
2661 if (b == 0x77) {
2662 /* emms */
2663 gen_op_emms();
2664 return;
2665 }
2666 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2667 the static cpu state) */
2668 if (!is_xmm) {
2669 gen_op_enter_mmx();
2670 }
2671
2672 modrm = ldub_code(s->pc++);
2673 reg = ((modrm >> 3) & 7);
2674 if (is_xmm)
2675 reg |= rex_r;
2676 mod = (modrm >> 6) & 3;
2677 if (sse_op2 == SSE_SPECIAL) {
2678 b |= (b1 << 8);
2679 switch(b) {
2680 case 0x0e7: /* movntq */
2681 if (mod == 3)
2682 goto illegal_op;
2683 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2684 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2685 break;
2686 case 0x1e7: /* movntdq */
2687 case 0x02b: /* movntps */
2688 case 0x12b: /* movntps */
2689 case 0x3f0: /* lddqu */
2690 if (mod == 3)
2691 goto illegal_op;
2692 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2693 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2694 break;
2695 case 0x6e: /* movd mm, ea */
2696#ifdef TARGET_X86_64
2697 if (s->dflag == 2) {
2698 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2699 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2700 } else
2701#endif
2702 {
2703 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2704 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2705 }
2706 break;
2707 case 0x16e: /* movd xmm, ea */
2708#ifdef TARGET_X86_64
2709 if (s->dflag == 2) {
2710 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2711 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2712 } else
2713#endif
2714 {
2715 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2716 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2717 }
2718 break;
2719 case 0x6f: /* movq mm, ea */
2720 if (mod != 3) {
2721 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2722 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2723 } else {
2724 rm = (modrm & 7);
2725 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2726 offsetof(CPUX86State,fpregs[rm].mmx));
2727 }
2728 break;
2729 case 0x010: /* movups */
2730 case 0x110: /* movupd */
2731 case 0x028: /* movaps */
2732 case 0x128: /* movapd */
2733 case 0x16f: /* movdqa xmm, ea */
2734 case 0x26f: /* movdqu xmm, ea */
2735 if (mod != 3) {
2736 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2737 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2738 } else {
2739 rm = (modrm & 7) | REX_B(s);
2740 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2741 offsetof(CPUX86State,xmm_regs[rm]));
2742 }
2743 break;
2744 case 0x210: /* movss xmm, ea */
2745 if (mod != 3) {
2746 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2747 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2748 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2749 gen_op_movl_T0_0();
2750 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2751 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2752 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2753 } else {
2754 rm = (modrm & 7) | REX_B(s);
2755 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2756 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2757 }
2758 break;
2759 case 0x310: /* movsd xmm, ea */
2760 if (mod != 3) {
2761 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2762 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2763 gen_op_movl_T0_0();
2764 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2765 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2766 } else {
2767 rm = (modrm & 7) | REX_B(s);
2768 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2769 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2770 }
2771 break;
2772 case 0x012: /* movlps */
2773 case 0x112: /* movlpd */
2774 if (mod != 3) {
2775 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2776 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2777 } else {
2778 /* movhlps */
2779 rm = (modrm & 7) | REX_B(s);
2780 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2781 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2782 }
2783 break;
2784 case 0x212: /* movsldup */
2785 if (mod != 3) {
2786 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2787 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2788 } else {
2789 rm = (modrm & 7) | REX_B(s);
2790 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2791 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2792 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2793 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2794 }
2795 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2796 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2797 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2798 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2799 break;
2800 case 0x312: /* movddup */
2801 if (mod != 3) {
2802 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2803 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2804 } else {
2805 rm = (modrm & 7) | REX_B(s);
2806 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2807 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2808 }
2809 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2810 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2811 break;
2812 case 0x016: /* movhps */
2813 case 0x116: /* movhpd */
2814 if (mod != 3) {
2815 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2816 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2817 } else {
2818 /* movlhps */
2819 rm = (modrm & 7) | REX_B(s);
2820 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2821 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2822 }
2823 break;
2824 case 0x216: /* movshdup */
2825 if (mod != 3) {
2826 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2827 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2828 } else {
2829 rm = (modrm & 7) | REX_B(s);
2830 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2831 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2832 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2833 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2834 }
2835 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2836 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2837 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2838 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2839 break;
2840 case 0x7e: /* movd ea, mm */
2841#ifdef TARGET_X86_64
2842 if (s->dflag == 2) {
2843 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2844 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2845 } else
2846#endif
2847 {
2848 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2849 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2850 }
2851 break;
2852 case 0x17e: /* movd ea, xmm */
2853#ifdef TARGET_X86_64
2854 if (s->dflag == 2) {
2855 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2856 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2857 } else
2858#endif
2859 {
2860 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2861 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2862 }
2863 break;
2864 case 0x27e: /* movq xmm, ea */
2865 if (mod != 3) {
2866 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2867 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2868 } else {
2869 rm = (modrm & 7) | REX_B(s);
2870 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2871 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2872 }
2873 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2874 break;
2875 case 0x7f: /* movq ea, mm */
2876 if (mod != 3) {
2877 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2878 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2879 } else {
2880 rm = (modrm & 7);
2881 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2882 offsetof(CPUX86State,fpregs[reg].mmx));
2883 }
2884 break;
2885 case 0x011: /* movups */
2886 case 0x111: /* movupd */
2887 case 0x029: /* movaps */
2888 case 0x129: /* movapd */
2889 case 0x17f: /* movdqa ea, xmm */
2890 case 0x27f: /* movdqu ea, xmm */
2891 if (mod != 3) {
2892 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2893 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2894 } else {
2895 rm = (modrm & 7) | REX_B(s);
2896 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2897 offsetof(CPUX86State,xmm_regs[reg]));
2898 }
2899 break;
2900 case 0x211: /* movss ea, xmm */
2901 if (mod != 3) {
2902 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2903 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2904 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2905 } else {
2906 rm = (modrm & 7) | REX_B(s);
2907 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2908 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2909 }
2910 break;
2911 case 0x311: /* movsd ea, xmm */
2912 if (mod != 3) {
2913 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2914 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2915 } else {
2916 rm = (modrm & 7) | REX_B(s);
2917 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2918 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2919 }
2920 break;
2921 case 0x013: /* movlps */
2922 case 0x113: /* movlpd */
2923 if (mod != 3) {
2924 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2925 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2926 } else {
2927 goto illegal_op;
2928 }
2929 break;
2930 case 0x017: /* movhps */
2931 case 0x117: /* movhpd */
2932 if (mod != 3) {
2933 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2934 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2935 } else {
2936 goto illegal_op;
2937 }
2938 break;
2939 case 0x71: /* shift mm, im */
2940 case 0x72:
2941 case 0x73:
2942 case 0x171: /* shift xmm, im */
2943 case 0x172:
2944 case 0x173:
2945 val = ldub_code(s->pc++);
2946 if (is_xmm) {
2947 gen_op_movl_T0_im(val);
2948 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2949 gen_op_movl_T0_0();
2950 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2951 op1_offset = offsetof(CPUX86State,xmm_t0);
2952 } else {
2953 gen_op_movl_T0_im(val);
2954 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2955 gen_op_movl_T0_0();
2956 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2957 op1_offset = offsetof(CPUX86State,mmx_t0);
2958 }
2959 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2960 if (!sse_op2)
2961 goto illegal_op;
2962 if (is_xmm) {
2963 rm = (modrm & 7) | REX_B(s);
2964 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2965 } else {
2966 rm = (modrm & 7);
2967 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2968 }
2969 sse_op2(op2_offset, op1_offset);
2970 break;
2971 case 0x050: /* movmskps */
2972 rm = (modrm & 7) | REX_B(s);
2973 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2974 gen_op_mov_reg_T0[OT_LONG][reg]();
2975 break;
2976 case 0x150: /* movmskpd */
2977 rm = (modrm & 7) | REX_B(s);
2978 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2979 gen_op_mov_reg_T0[OT_LONG][reg]();
2980 break;
2981 case 0x02a: /* cvtpi2ps */
2982 case 0x12a: /* cvtpi2pd */
2983 gen_op_enter_mmx();
2984 if (mod != 3) {
2985 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2986 op2_offset = offsetof(CPUX86State,mmx_t0);
2987 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2988 } else {
2989 rm = (modrm & 7);
2990 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2991 }
2992 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2993 switch(b >> 8) {
2994 case 0x0:
2995 gen_op_cvtpi2ps(op1_offset, op2_offset);
2996 break;
2997 default:
2998 case 0x1:
2999 gen_op_cvtpi2pd(op1_offset, op2_offset);
3000 break;
3001 }
3002 break;
3003 case 0x22a: /* cvtsi2ss */
3004 case 0x32a: /* cvtsi2sd */
3005 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3006 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3007 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3008 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3009 break;
3010 case 0x02c: /* cvttps2pi */
3011 case 0x12c: /* cvttpd2pi */
3012 case 0x02d: /* cvtps2pi */
3013 case 0x12d: /* cvtpd2pi */
3014 gen_op_enter_mmx();
3015 if (mod != 3) {
3016 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3017 op2_offset = offsetof(CPUX86State,xmm_t0);
3018 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3019 } else {
3020 rm = (modrm & 7) | REX_B(s);
3021 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3022 }
3023 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3024 switch(b) {
3025 case 0x02c:
3026 gen_op_cvttps2pi(op1_offset, op2_offset);
3027 break;
3028 case 0x12c:
3029 gen_op_cvttpd2pi(op1_offset, op2_offset);
3030 break;
3031 case 0x02d:
3032 gen_op_cvtps2pi(op1_offset, op2_offset);
3033 break;
3034 case 0x12d:
3035 gen_op_cvtpd2pi(op1_offset, op2_offset);
3036 break;
3037 }
3038 break;
3039 case 0x22c: /* cvttss2si */
3040 case 0x32c: /* cvttsd2si */
3041 case 0x22d: /* cvtss2si */
3042 case 0x32d: /* cvtsd2si */
3043 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3044 if (mod != 3) {
3045 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3046 if ((b >> 8) & 1) {
3047 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3048 } else {
3049 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3050 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3051 }
3052 op2_offset = offsetof(CPUX86State,xmm_t0);
3053 } else {
3054 rm = (modrm & 7) | REX_B(s);
3055 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3056 }
3057 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3058 (b & 1) * 4](op2_offset);
3059 gen_op_mov_reg_T0[ot][reg]();
3060 break;
3061 case 0xc4: /* pinsrw */
3062 case 0x1c4:
3063 s->rip_offset = 1;
3064 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3065 val = ldub_code(s->pc++);
3066 if (b1) {
3067 val &= 7;
3068 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3069 } else {
3070 val &= 3;
3071 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3072 }
3073 break;
3074 case 0xc5: /* pextrw */
3075 case 0x1c5:
3076 if (mod != 3)
3077 goto illegal_op;
3078 val = ldub_code(s->pc++);
3079 if (b1) {
3080 val &= 7;
3081 rm = (modrm & 7) | REX_B(s);
3082 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3083 } else {
3084 val &= 3;
3085 rm = (modrm & 7);
3086 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3087 }
3088 reg = ((modrm >> 3) & 7) | rex_r;
3089 gen_op_mov_reg_T0[OT_LONG][reg]();
3090 break;
3091 case 0x1d6: /* movq ea, xmm */
3092 if (mod != 3) {
3093 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3094 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3095 } else {
3096 rm = (modrm & 7) | REX_B(s);
3097 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3098 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3099 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3100 }
3101 break;
3102 case 0x2d6: /* movq2dq */
3103 gen_op_enter_mmx();
3104 rm = (modrm & 7);
3105 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3106 offsetof(CPUX86State,fpregs[rm].mmx));
3107 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3108 break;
3109 case 0x3d6: /* movdq2q */
3110 gen_op_enter_mmx();
3111 rm = (modrm & 7) | REX_B(s);
3112 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3113 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3114 break;
3115 case 0xd7: /* pmovmskb */
3116 case 0x1d7:
3117 if (mod != 3)
3118 goto illegal_op;
3119 if (b1) {
3120 rm = (modrm & 7) | REX_B(s);
3121 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3122 } else {
3123 rm = (modrm & 7);
3124 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3125 }
3126 reg = ((modrm >> 3) & 7) | rex_r;
3127 gen_op_mov_reg_T0[OT_LONG][reg]();
3128 break;
3129 default:
3130 goto illegal_op;
3131 }
3132 } else {
3133 /* generic MMX or SSE operation */
3134 switch(b) {
3135 case 0xf7:
3136 /* maskmov : we must prepare A0 */
3137 if (mod != 3)
3138 goto illegal_op;
3139#ifdef TARGET_X86_64
3140 if (s->aflag == 2) {
3141 gen_op_movq_A0_reg[R_EDI]();
3142 } else
3143#endif
3144 {
3145 gen_op_movl_A0_reg[R_EDI]();
3146 if (s->aflag == 0)
3147 gen_op_andl_A0_ffff();
3148 }
3149 gen_add_A0_ds_seg(s);
3150 break;
3151 case 0x70: /* pshufx insn */
3152 case 0xc6: /* pshufx insn */
3153 case 0xc2: /* compare insns */
3154 s->rip_offset = 1;
3155 break;
3156 default:
3157 break;
3158 }
3159 if (is_xmm) {
3160 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3161 if (mod != 3) {
3162 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3163 op2_offset = offsetof(CPUX86State,xmm_t0);
3164 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3165 b == 0xc2)) {
3166 /* specific case for SSE single instructions */
3167 if (b1 == 2) {
3168 /* 32 bit access */
3169 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3170 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3171 } else {
3172 /* 64 bit access */
3173 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3174 }
3175 } else {
3176 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3177 }
3178 } else {
3179 rm = (modrm & 7) | REX_B(s);
3180 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3181 }
3182 } else {
3183 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3184 if (mod != 3) {
3185 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3186 op2_offset = offsetof(CPUX86State,mmx_t0);
3187 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3188 } else {
3189 rm = (modrm & 7);
3190 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3191 }
3192 }
3193 switch(b) {
3194 case 0x70: /* pshufx insn */
3195 case 0xc6: /* pshufx insn */
3196 val = ldub_code(s->pc++);
3197 sse_op3 = (GenOpFunc3 *)sse_op2;
3198 sse_op3(op1_offset, op2_offset, val);
3199 break;
3200 case 0xc2:
3201 /* compare insns */
3202 val = ldub_code(s->pc++);
3203 if (val >= 8)
3204 goto illegal_op;
3205 sse_op2 = sse_op_table4[val][b1];
3206 sse_op2(op1_offset, op2_offset);
3207 break;
3208 default:
3209 sse_op2(op1_offset, op2_offset);
3210 break;
3211 }
3212 if (b == 0x2e || b == 0x2f) {
3213 s->cc_op = CC_OP_EFLAGS;
3214 }
3215 }
3216}
3217
3218#ifdef VBOX
3219/* Checks if it's an invalid lock sequence. Only a few instructions
3220 can be used together with the lock prefix and of those only the
3221 form that write a memory operand. So, this is kind of annoying
3222 work to do...
3223 The AMD manual lists the following instructions.
3224 ADC
3225 ADD
3226 AND
3227 BTC
3228 BTR
3229 BTS
3230 CMPXCHG
3231 CMPXCHG8B
3232 CMPXCHG16B
3233 DEC
3234 INC
3235 NEG
3236 NOT
3237 OR
3238 SBB
3239 SUB
3240 XADD
3241 XCHG
3242 XOR */
3243static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
3244{
3245 target_ulong pc = s->pc;
3246 int modrm, mod, op;
3247
3248 /* X={8,16,32,64} Y={16,32,64} */
3249 switch (b)
3250 {
3251 /* /2: ADC reg/memX, immX */
3252 /* /0: ADD reg/memX, immX */
3253 /* /4: AND reg/memX, immX */
3254 /* /1: OR reg/memX, immX */
3255 /* /3: SBB reg/memX, immX */
3256 /* /5: SUB reg/memX, immX */
3257 /* /6: XOR reg/memX, immX */
3258 case 0x80:
3259 case 0x81:
3260 case 0x83:
3261 modrm = ldub_code(pc++);
3262 op = (modrm >> 3) & 7;
3263 if (op == 7) /* /7: CMP */
3264 break;
3265 mod = (modrm >> 6) & 3;
3266 if (mod == 3) /* register destination */
3267 break;
3268 return false;
3269
3270 case 0x10: /* /r: ADC reg/mem8, reg8 */
3271 case 0x11: /* /r: ADC reg/memX, regY */
3272 case 0x00: /* /r: ADD reg/mem8, reg8 */
3273 case 0x01: /* /r: ADD reg/memX, regY */
3274 case 0x20: /* /r: AND reg/mem8, reg8 */
3275 case 0x21: /* /r: AND reg/memY, regY */
3276 case 0x08: /* /r: OR reg/mem8, reg8 */
3277 case 0x09: /* /r: OR reg/memY, regY */
3278 case 0x18: /* /r: SBB reg/mem8, reg8 */
3279 case 0x19: /* /r: SBB reg/memY, regY */
3280 case 0x28: /* /r: SUB reg/mem8, reg8 */
3281 case 0x29: /* /r: SUB reg/memY, regY */
3282 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
3283 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
3284 case 0x30: /* /r: XOR reg/mem8, reg8 */
3285 case 0x31: /* /r: XOR reg/memY, regY */
3286 modrm = ldub_code(pc++);
3287 mod = (modrm >> 6) & 3;
3288 if (mod == 3) /* register destination */
3289 break;
3290 return false;
3291
3292 /* /1: DEC reg/memX */
3293 /* /0: INC reg/memX */
3294 case 0xfe:
3295 case 0xff:
3296 modrm = ldub_code(pc++);
3297 mod = (modrm >> 6) & 3;
3298 if (mod == 3) /* register destination */
3299 break;
3300 return false;
3301
3302 /* /3: NEG reg/memX */
3303 /* /2: NOT reg/memX */
3304 case 0xf6:
3305 case 0xf7:
3306 modrm = ldub_code(pc++);
3307 mod = (modrm >> 6) & 3;
3308 if (mod == 3) /* register destination */
3309 break;
3310 return false;
3311
3312 case 0x0f:
3313 b = ldub_code(pc++);
3314 switch (b)
3315 {
3316 /* /7: BTC reg/memY, imm8 */
3317 /* /6: BTR reg/memY, imm8 */
3318 /* /5: BTS reg/memY, imm8 */
3319 case 0xba:
3320 modrm = ldub_code(pc++);
3321 op = (modrm >> 3) & 7;
3322 if (op < 5)
3323 break;
3324 mod = (modrm >> 6) & 3;
3325 if (mod == 3) /* register destination */
3326 break;
3327 return false;
3328
3329 case 0xbb: /* /r: BTC reg/memY, regY */
3330 case 0xb3: /* /r: BTR reg/memY, regY */
3331 case 0xab: /* /r: BTS reg/memY, regY */
3332 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
3333 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
3334 case 0xc0: /* /r: XADD reg/mem8, reg8 */
3335 case 0xc1: /* /r: XADD reg/memY, regY */
3336 modrm = ldub_code(pc++);
3337 mod = (modrm >> 6) & 3;
3338 if (mod == 3) /* register destination */
3339 break;
3340 return false;
3341
3342 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
3343 case 0xc7:
3344 modrm = ldub_code(pc++);
3345 op = (modrm >> 3) & 7;
3346 if (op != 1)
3347 break;
3348 return false;
3349 }
3350 break;
3351 }
3352
3353 /* illegal sequence. The s->pc is past the lock prefix and that
3354 is sufficient for the TB, I think. */
3355 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
3356 return true;
3357}
3358#endif /* VBOX */
3359
3360
3361/* convert one instruction. s->is_jmp is set if the translation must
3362 be stopped. Return the next pc value */
3363static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3364{
3365 int b, prefixes, aflag, dflag;
3366 int shift, ot;
3367 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3368 target_ulong next_eip, tval;
3369 int rex_w, rex_r;
3370
3371 s->pc = pc_start;
3372 prefixes = 0;
3373 aflag = s->code32;
3374 dflag = s->code32;
3375 s->override = -1;
3376 rex_w = -1;
3377 rex_r = 0;
3378#ifdef TARGET_X86_64
3379 s->rex_x = 0;
3380 s->rex_b = 0;
3381 x86_64_hregs = 0;
3382#endif
3383 s->rip_offset = 0; /* for relative ip address */
3384
3385#ifdef VBOX
3386 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
3387 gen_update_eip(pc_start - s->cs_base);
3388#endif
3389
3390 next_byte:
3391 b = ldub_code(s->pc);
3392 s->pc++;
3393 /* check prefixes */
3394#ifdef TARGET_X86_64
3395 if (CODE64(s)) {
3396 switch (b) {
3397 case 0xf3:
3398 prefixes |= PREFIX_REPZ;
3399 goto next_byte;
3400 case 0xf2:
3401 prefixes |= PREFIX_REPNZ;
3402 goto next_byte;
3403 case 0xf0:
3404 prefixes |= PREFIX_LOCK;
3405 goto next_byte;
3406 case 0x2e:
3407 s->override = R_CS;
3408 goto next_byte;
3409 case 0x36:
3410 s->override = R_SS;
3411 goto next_byte;
3412 case 0x3e:
3413 s->override = R_DS;
3414 goto next_byte;
3415 case 0x26:
3416 s->override = R_ES;
3417 goto next_byte;
3418 case 0x64:
3419 s->override = R_FS;
3420 goto next_byte;
3421 case 0x65:
3422 s->override = R_GS;
3423 goto next_byte;
3424 case 0x66:
3425 prefixes |= PREFIX_DATA;
3426 goto next_byte;
3427 case 0x67:
3428 prefixes |= PREFIX_ADR;
3429 goto next_byte;
3430 case 0x40 ... 0x4f:
3431 /* REX prefix */
3432 rex_w = (b >> 3) & 1;
3433 rex_r = (b & 0x4) << 1;
3434 s->rex_x = (b & 0x2) << 2;
3435 REX_B(s) = (b & 0x1) << 3;
3436 x86_64_hregs = 1; /* select uniform byte register addressing */
3437 goto next_byte;
3438 }
3439 if (rex_w == 1) {
3440 /* 0x66 is ignored if rex.w is set */
3441 dflag = 2;
3442 } else {
3443 if (prefixes & PREFIX_DATA)
3444 dflag ^= 1;
3445 }
3446 if (!(prefixes & PREFIX_ADR))
3447 aflag = 2;
3448 } else
3449#endif
3450 {
3451 switch (b) {
3452 case 0xf3:
3453 prefixes |= PREFIX_REPZ;
3454 goto next_byte;
3455 case 0xf2:
3456 prefixes |= PREFIX_REPNZ;
3457 goto next_byte;
3458 case 0xf0:
3459 prefixes |= PREFIX_LOCK;
3460 goto next_byte;
3461 case 0x2e:
3462 s->override = R_CS;
3463 goto next_byte;
3464 case 0x36:
3465 s->override = R_SS;
3466 goto next_byte;
3467 case 0x3e:
3468 s->override = R_DS;
3469 goto next_byte;
3470 case 0x26:
3471 s->override = R_ES;
3472 goto next_byte;
3473 case 0x64:
3474 s->override = R_FS;
3475 goto next_byte;
3476 case 0x65:
3477 s->override = R_GS;
3478 goto next_byte;
3479 case 0x66:
3480 prefixes |= PREFIX_DATA;
3481 goto next_byte;
3482 case 0x67:
3483 prefixes |= PREFIX_ADR;
3484 goto next_byte;
3485 }
3486 if (prefixes & PREFIX_DATA)
3487 dflag ^= 1;
3488 if (prefixes & PREFIX_ADR)
3489 aflag ^= 1;
3490 }
3491
3492 s->prefix = prefixes;
3493 s->aflag = aflag;
3494 s->dflag = dflag;
3495
3496 /* lock generation */
3497#ifndef VBOX
3498 if (prefixes & PREFIX_LOCK)
3499 gen_op_lock();
3500#else /* VBOX */
3501 if (prefixes & PREFIX_LOCK) {
3502 if (is_invalid_lock_sequence(s, pc_start, b)) {
3503 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3504 return s->pc;
3505 }
3506 gen_op_lock();
3507 }
3508#endif /* VBOX */
3509
3510 /* now check op code */
3511 reswitch:
3512 switch(b) {
3513 case 0x0f:
3514 /**************************/
3515 /* extended op code */
3516 b = ldub_code(s->pc++) | 0x100;
3517 goto reswitch;
3518
3519 /**************************/
3520 /* arith & logic */
3521 case 0x00 ... 0x05:
3522 case 0x08 ... 0x0d:
3523 case 0x10 ... 0x15:
3524 case 0x18 ... 0x1d:
3525 case 0x20 ... 0x25:
3526 case 0x28 ... 0x2d:
3527 case 0x30 ... 0x35:
3528 case 0x38 ... 0x3d:
3529 {
3530 int op, f, val;
3531 op = (b >> 3) & 7;
3532 f = (b >> 1) & 3;
3533
3534 if ((b & 1) == 0)
3535 ot = OT_BYTE;
3536 else
3537 ot = dflag + OT_WORD;
3538
3539 switch(f) {
3540 case 0: /* OP Ev, Gv */
3541 modrm = ldub_code(s->pc++);
3542 reg = ((modrm >> 3) & 7) | rex_r;
3543 mod = (modrm >> 6) & 3;
3544 rm = (modrm & 7) | REX_B(s);
3545 if (mod != 3) {
3546 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3547 opreg = OR_TMP0;
3548 } else if (op == OP_XORL && rm == reg) {
3549 xor_zero:
3550 /* xor reg, reg optimisation */
3551 gen_op_movl_T0_0();
3552 s->cc_op = CC_OP_LOGICB + ot;
3553 gen_op_mov_reg_T0[ot][reg]();
3554 gen_op_update1_cc();
3555 break;
3556 } else {
3557 opreg = rm;
3558 }
3559 gen_op_mov_TN_reg[ot][1][reg]();
3560 gen_op(s, op, ot, opreg);
3561 break;
3562 case 1: /* OP Gv, Ev */
3563 modrm = ldub_code(s->pc++);
3564 mod = (modrm >> 6) & 3;
3565 reg = ((modrm >> 3) & 7) | rex_r;
3566 rm = (modrm & 7) | REX_B(s);
3567 if (mod != 3) {
3568 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3569 gen_op_ld_T1_A0[ot + s->mem_index]();
3570 } else if (op == OP_XORL && rm == reg) {
3571 goto xor_zero;
3572 } else {
3573 gen_op_mov_TN_reg[ot][1][rm]();
3574 }
3575 gen_op(s, op, ot, reg);
3576 break;
3577 case 2: /* OP A, Iv */
3578 val = insn_get(s, ot);
3579 gen_op_movl_T1_im(val);
3580 gen_op(s, op, ot, OR_EAX);
3581 break;
3582 }
3583 }
3584 break;
3585
3586 case 0x80: /* GRP1 */
3587 case 0x81:
3588 case 0x82:
3589 case 0x83:
3590 {
3591 int val;
3592
3593 if ((b & 1) == 0)
3594 ot = OT_BYTE;
3595 else
3596 ot = dflag + OT_WORD;
3597
3598 modrm = ldub_code(s->pc++);
3599 mod = (modrm >> 6) & 3;
3600 rm = (modrm & 7) | REX_B(s);
3601 op = (modrm >> 3) & 7;
3602
3603 if (mod != 3) {
3604 if (b == 0x83)
3605 s->rip_offset = 1;
3606 else
3607 s->rip_offset = insn_const_size(ot);
3608 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3609 opreg = OR_TMP0;
3610 } else {
3611 opreg = rm;
3612 }
3613
3614 switch(b) {
3615 default:
3616 case 0x80:
3617 case 0x81:
3618 case 0x82:
3619 val = insn_get(s, ot);
3620 break;
3621 case 0x83:
3622 val = (int8_t)insn_get(s, OT_BYTE);
3623 break;
3624 }
3625 gen_op_movl_T1_im(val);
3626 gen_op(s, op, ot, opreg);
3627 }
3628 break;
3629
3630 /**************************/
3631 /* inc, dec, and other misc arith */
3632 case 0x40 ... 0x47: /* inc Gv */
3633 ot = dflag ? OT_LONG : OT_WORD;
3634 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3635 break;
3636 case 0x48 ... 0x4f: /* dec Gv */
3637 ot = dflag ? OT_LONG : OT_WORD;
3638 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3639 break;
3640 case 0xf6: /* GRP3 */
3641 case 0xf7:
3642 if ((b & 1) == 0)
3643 ot = OT_BYTE;
3644 else
3645 ot = dflag + OT_WORD;
3646
3647 modrm = ldub_code(s->pc++);
3648 mod = (modrm >> 6) & 3;
3649 rm = (modrm & 7) | REX_B(s);
3650 op = (modrm >> 3) & 7;
3651 if (mod != 3) {
3652 if (op == 0)
3653 s->rip_offset = insn_const_size(ot);
3654 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3655 gen_op_ld_T0_A0[ot + s->mem_index]();
3656 } else {
3657 gen_op_mov_TN_reg[ot][0][rm]();
3658 }
3659
3660 switch(op) {
3661 case 0: /* test */
3662 val = insn_get(s, ot);
3663 gen_op_movl_T1_im(val);
3664 gen_op_testl_T0_T1_cc();
3665 s->cc_op = CC_OP_LOGICB + ot;
3666 break;
3667 case 2: /* not */
3668 gen_op_notl_T0();
3669 if (mod != 3) {
3670 gen_op_st_T0_A0[ot + s->mem_index]();
3671 } else {
3672 gen_op_mov_reg_T0[ot][rm]();
3673 }
3674 break;
3675 case 3: /* neg */
3676 gen_op_negl_T0();
3677 if (mod != 3) {
3678 gen_op_st_T0_A0[ot + s->mem_index]();
3679 } else {
3680 gen_op_mov_reg_T0[ot][rm]();
3681 }
3682 gen_op_update_neg_cc();
3683 s->cc_op = CC_OP_SUBB + ot;
3684 break;
3685 case 4: /* mul */
3686 switch(ot) {
3687 case OT_BYTE:
3688 gen_op_mulb_AL_T0();
3689 s->cc_op = CC_OP_MULB;
3690 break;
3691 case OT_WORD:
3692 gen_op_mulw_AX_T0();
3693 s->cc_op = CC_OP_MULW;
3694 break;
3695 default:
3696 case OT_LONG:
3697 gen_op_mull_EAX_T0();
3698 s->cc_op = CC_OP_MULL;
3699 break;
3700#ifdef TARGET_X86_64
3701 case OT_QUAD:
3702 gen_op_mulq_EAX_T0();
3703 s->cc_op = CC_OP_MULQ;
3704 break;
3705#endif
3706 }
3707 break;
3708 case 5: /* imul */
3709 switch(ot) {
3710 case OT_BYTE:
3711 gen_op_imulb_AL_T0();
3712 s->cc_op = CC_OP_MULB;
3713 break;
3714 case OT_WORD:
3715 gen_op_imulw_AX_T0();
3716 s->cc_op = CC_OP_MULW;
3717 break;
3718 default:
3719 case OT_LONG:
3720 gen_op_imull_EAX_T0();
3721 s->cc_op = CC_OP_MULL;
3722 break;
3723#ifdef TARGET_X86_64
3724 case OT_QUAD:
3725 gen_op_imulq_EAX_T0();
3726 s->cc_op = CC_OP_MULQ;
3727 break;
3728#endif
3729 }
3730 break;
3731 case 6: /* div */
3732 switch(ot) {
3733 case OT_BYTE:
3734 gen_jmp_im(pc_start - s->cs_base);
3735 gen_op_divb_AL_T0();
3736 break;
3737 case OT_WORD:
3738 gen_jmp_im(pc_start - s->cs_base);
3739 gen_op_divw_AX_T0();
3740 break;
3741 default:
3742 case OT_LONG:
3743 gen_jmp_im(pc_start - s->cs_base);
3744 gen_op_divl_EAX_T0();
3745 break;
3746#ifdef TARGET_X86_64
3747 case OT_QUAD:
3748 gen_jmp_im(pc_start - s->cs_base);
3749 gen_op_divq_EAX_T0();
3750 break;
3751#endif
3752 }
3753 break;
3754 case 7: /* idiv */
3755 switch(ot) {
3756 case OT_BYTE:
3757 gen_jmp_im(pc_start - s->cs_base);
3758 gen_op_idivb_AL_T0();
3759 break;
3760 case OT_WORD:
3761 gen_jmp_im(pc_start - s->cs_base);
3762 gen_op_idivw_AX_T0();
3763 break;
3764 default:
3765 case OT_LONG:
3766 gen_jmp_im(pc_start - s->cs_base);
3767 gen_op_idivl_EAX_T0();
3768 break;
3769#ifdef TARGET_X86_64
3770 case OT_QUAD:
3771 gen_jmp_im(pc_start - s->cs_base);
3772 gen_op_idivq_EAX_T0();
3773 break;
3774#endif
3775 }
3776 break;
3777 default:
3778 goto illegal_op;
3779 }
3780 break;
3781
3782 case 0xfe: /* GRP4 */
3783 case 0xff: /* GRP5 */
3784 if ((b & 1) == 0)
3785 ot = OT_BYTE;
3786 else
3787 ot = dflag + OT_WORD;
3788
3789 modrm = ldub_code(s->pc++);
3790 mod = (modrm >> 6) & 3;
3791 rm = (modrm & 7) | REX_B(s);
3792 op = (modrm >> 3) & 7;
3793 if (op >= 2 && b == 0xfe) {
3794 goto illegal_op;
3795 }
3796 if (CODE64(s)) {
3797 if (op == 2 || op == 4) {
3798 /* operand size for jumps is 64 bit */
3799 ot = OT_QUAD;
3800 } else if (op == 3 || op == 5) {
3801 /* for call calls, the operand is 16 or 32 bit, even
3802 in long mode */
3803 ot = dflag ? OT_LONG : OT_WORD;
3804 } else if (op == 6) {
3805 /* default push size is 64 bit */
3806 ot = dflag ? OT_QUAD : OT_WORD;
3807 }
3808 }
3809 if (mod != 3) {
3810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3811 if (op >= 2 && op != 3 && op != 5)
3812 gen_op_ld_T0_A0[ot + s->mem_index]();
3813 } else {
3814 gen_op_mov_TN_reg[ot][0][rm]();
3815 }
3816
3817 switch(op) {
3818 case 0: /* inc Ev */
3819 if (mod != 3)
3820 opreg = OR_TMP0;
3821 else
3822 opreg = rm;
3823 gen_inc(s, ot, opreg, 1);
3824 break;
3825 case 1: /* dec Ev */
3826 if (mod != 3)
3827 opreg = OR_TMP0;
3828 else
3829 opreg = rm;
3830 gen_inc(s, ot, opreg, -1);
3831 break;
3832 case 2: /* call Ev */
3833 /* XXX: optimize if memory (no 'and' is necessary) */
3834#ifdef VBOX_WITH_CALL_RECORD
3835 if (s->record_call)
3836 gen_op_record_call();
3837#endif
3838 if (s->dflag == 0)
3839 gen_op_andl_T0_ffff();
3840 next_eip = s->pc - s->cs_base;
3841 gen_movtl_T1_im(next_eip);
3842 gen_push_T1(s);
3843 gen_op_jmp_T0();
3844 gen_eob(s);
3845 break;
3846 case 3: /* lcall Ev */
3847 gen_op_ld_T1_A0[ot + s->mem_index]();
3848 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3849 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3850 do_lcall:
3851 if (s->pe && !s->vm86) {
3852 if (s->cc_op != CC_OP_DYNAMIC)
3853 gen_op_set_cc_op(s->cc_op);
3854 gen_jmp_im(pc_start - s->cs_base);
3855 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3856 } else {
3857 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3858 }
3859 gen_eob(s);
3860 break;
3861 case 4: /* jmp Ev */
3862 if (s->dflag == 0)
3863 gen_op_andl_T0_ffff();
3864 gen_op_jmp_T0();
3865 gen_eob(s);
3866 break;
3867 case 5: /* ljmp Ev */
3868 gen_op_ld_T1_A0[ot + s->mem_index]();
3869 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3870 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3871 do_ljmp:
3872 if (s->pe && !s->vm86) {
3873 if (s->cc_op != CC_OP_DYNAMIC)
3874 gen_op_set_cc_op(s->cc_op);
3875 gen_jmp_im(pc_start - s->cs_base);
3876 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3877 } else {
3878 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3879 gen_op_movl_T0_T1();
3880 gen_op_jmp_T0();
3881 }
3882 gen_eob(s);
3883 break;
3884 case 6: /* push Ev */
3885 gen_push_T0(s);
3886 break;
3887 default:
3888 goto illegal_op;
3889 }
3890 break;
3891
3892 case 0x84: /* test Ev, Gv */
3893 case 0x85:
3894 if ((b & 1) == 0)
3895 ot = OT_BYTE;
3896 else
3897 ot = dflag + OT_WORD;
3898
3899 modrm = ldub_code(s->pc++);
3900 mod = (modrm >> 6) & 3;
3901 rm = (modrm & 7) | REX_B(s);
3902 reg = ((modrm >> 3) & 7) | rex_r;
3903
3904 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3905 gen_op_mov_TN_reg[ot][1][reg]();
3906 gen_op_testl_T0_T1_cc();
3907 s->cc_op = CC_OP_LOGICB + ot;
3908 break;
3909
3910 case 0xa8: /* test eAX, Iv */
3911 case 0xa9:
3912 if ((b & 1) == 0)
3913 ot = OT_BYTE;
3914 else
3915 ot = dflag + OT_WORD;
3916 val = insn_get(s, ot);
3917
3918 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3919 gen_op_movl_T1_im(val);
3920 gen_op_testl_T0_T1_cc();
3921 s->cc_op = CC_OP_LOGICB + ot;
3922 break;
3923
3924 case 0x98: /* CWDE/CBW */
3925#ifdef TARGET_X86_64
3926 if (dflag == 2) {
3927 gen_op_movslq_RAX_EAX();
3928 } else
3929#endif
3930 if (dflag == 1)
3931 gen_op_movswl_EAX_AX();
3932 else
3933 gen_op_movsbw_AX_AL();
3934 break;
3935 case 0x99: /* CDQ/CWD */
3936#ifdef TARGET_X86_64
3937 if (dflag == 2) {
3938 gen_op_movsqo_RDX_RAX();
3939 } else
3940#endif
3941 if (dflag == 1)
3942 gen_op_movslq_EDX_EAX();
3943 else
3944 gen_op_movswl_DX_AX();
3945 break;
3946 case 0x1af: /* imul Gv, Ev */
3947 case 0x69: /* imul Gv, Ev, I */
3948 case 0x6b:
3949 ot = dflag + OT_WORD;
3950 modrm = ldub_code(s->pc++);
3951 reg = ((modrm >> 3) & 7) | rex_r;
3952 if (b == 0x69)
3953 s->rip_offset = insn_const_size(ot);
3954 else if (b == 0x6b)
3955 s->rip_offset = 1;
3956 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3957 if (b == 0x69) {
3958 val = insn_get(s, ot);
3959 gen_op_movl_T1_im(val);
3960 } else if (b == 0x6b) {
3961 val = (int8_t)insn_get(s, OT_BYTE);
3962 gen_op_movl_T1_im(val);
3963 } else {
3964 gen_op_mov_TN_reg[ot][1][reg]();
3965 }
3966
3967#ifdef TARGET_X86_64
3968 if (ot == OT_QUAD) {
3969 gen_op_imulq_T0_T1();
3970 } else
3971#endif
3972 if (ot == OT_LONG) {
3973 gen_op_imull_T0_T1();
3974 } else {
3975 gen_op_imulw_T0_T1();
3976 }
3977 gen_op_mov_reg_T0[ot][reg]();
3978 s->cc_op = CC_OP_MULB + ot;
3979 break;
3980 case 0x1c0:
3981 case 0x1c1: /* xadd Ev, Gv */
3982 if ((b & 1) == 0)
3983 ot = OT_BYTE;
3984 else
3985 ot = dflag + OT_WORD;
3986 modrm = ldub_code(s->pc++);
3987 reg = ((modrm >> 3) & 7) | rex_r;
3988 mod = (modrm >> 6) & 3;
3989 if (mod == 3) {
3990 rm = (modrm & 7) | REX_B(s);
3991 gen_op_mov_TN_reg[ot][0][reg]();
3992 gen_op_mov_TN_reg[ot][1][rm]();
3993 gen_op_addl_T0_T1();
3994 gen_op_mov_reg_T1[ot][reg]();
3995 gen_op_mov_reg_T0[ot][rm]();
3996 } else {
3997 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3998 gen_op_mov_TN_reg[ot][0][reg]();
3999 gen_op_ld_T1_A0[ot + s->mem_index]();
4000 gen_op_addl_T0_T1();
4001 gen_op_st_T0_A0[ot + s->mem_index]();
4002 gen_op_mov_reg_T1[ot][reg]();
4003 }
4004 gen_op_update2_cc();
4005 s->cc_op = CC_OP_ADDB + ot;
4006 break;
4007 case 0x1b0:
4008 case 0x1b1: /* cmpxchg Ev, Gv */
4009 if ((b & 1) == 0)
4010 ot = OT_BYTE;
4011 else
4012 ot = dflag + OT_WORD;
4013 modrm = ldub_code(s->pc++);
4014 reg = ((modrm >> 3) & 7) | rex_r;
4015 mod = (modrm >> 6) & 3;
4016 gen_op_mov_TN_reg[ot][1][reg]();
4017 if (mod == 3) {
4018 rm = (modrm & 7) | REX_B(s);
4019 gen_op_mov_TN_reg[ot][0][rm]();
4020 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
4021 gen_op_mov_reg_T0[ot][rm]();
4022 } else {
4023 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4024 gen_op_ld_T0_A0[ot + s->mem_index]();
4025 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
4026 }
4027 s->cc_op = CC_OP_SUBB + ot;
4028 break;
4029 case 0x1c7: /* cmpxchg8b */
4030 modrm = ldub_code(s->pc++);
4031 mod = (modrm >> 6) & 3;
4032 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4033 goto illegal_op;
4034 if (s->cc_op != CC_OP_DYNAMIC)
4035 gen_op_set_cc_op(s->cc_op);
4036 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4037 gen_op_cmpxchg8b();
4038 s->cc_op = CC_OP_EFLAGS;
4039 break;
4040
4041 /**************************/
4042 /* push/pop */
4043 case 0x50 ... 0x57: /* push */
4044 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
4045 gen_push_T0(s);
4046 break;
4047 case 0x58 ... 0x5f: /* pop */
4048 if (CODE64(s)) {
4049 ot = dflag ? OT_QUAD : OT_WORD;
4050 } else {
4051 ot = dflag + OT_WORD;
4052 }
4053 gen_pop_T0(s);
4054 /* NOTE: order is important for pop %sp */
4055 gen_pop_update(s);
4056 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
4057 break;
4058 case 0x60: /* pusha */
4059 if (CODE64(s))
4060 goto illegal_op;
4061 gen_pusha(s);
4062 break;
4063 case 0x61: /* popa */
4064 if (CODE64(s))
4065 goto illegal_op;
4066 gen_popa(s);
4067 break;
4068 case 0x68: /* push Iv */
4069 case 0x6a:
4070 if (CODE64(s)) {
4071 ot = dflag ? OT_QUAD : OT_WORD;
4072 } else {
4073 ot = dflag + OT_WORD;
4074 }
4075 if (b == 0x68)
4076 val = insn_get(s, ot);
4077 else
4078 val = (int8_t)insn_get(s, OT_BYTE);
4079 gen_op_movl_T0_im(val);
4080 gen_push_T0(s);
4081 break;
4082 case 0x8f: /* pop Ev */
4083 if (CODE64(s)) {
4084 ot = dflag ? OT_QUAD : OT_WORD;
4085 } else {
4086 ot = dflag + OT_WORD;
4087 }
4088 modrm = ldub_code(s->pc++);
4089 mod = (modrm >> 6) & 3;
4090 gen_pop_T0(s);
4091 if (mod == 3) {
4092 /* NOTE: order is important for pop %sp */
4093 gen_pop_update(s);
4094 rm = (modrm & 7) | REX_B(s);
4095 gen_op_mov_reg_T0[ot][rm]();
4096 } else {
4097 /* NOTE: order is important too for MMU exceptions */
4098 s->popl_esp_hack = 1 << ot;
4099 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4100 s->popl_esp_hack = 0;
4101 gen_pop_update(s);
4102 }
4103 break;
4104 case 0xc8: /* enter */
4105 {
4106 int level;
4107 val = lduw_code(s->pc);
4108 s->pc += 2;
4109 level = ldub_code(s->pc++);
4110 gen_enter(s, val, level);
4111 }
4112 break;
4113 case 0xc9: /* leave */
4114 /* XXX: exception not precise (ESP is updated before potential exception) */
4115 if (CODE64(s)) {
4116 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
4117 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
4118 } else if (s->ss32) {
4119 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
4120 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
4121 } else {
4122 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
4123 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
4124 }
4125 gen_pop_T0(s);
4126 if (CODE64(s)) {
4127 ot = dflag ? OT_QUAD : OT_WORD;
4128 } else {
4129 ot = dflag + OT_WORD;
4130 }
4131 gen_op_mov_reg_T0[ot][R_EBP]();
4132 gen_pop_update(s);
4133 break;
4134 case 0x06: /* push es */
4135 case 0x0e: /* push cs */
4136 case 0x16: /* push ss */
4137 case 0x1e: /* push ds */
4138 if (CODE64(s))
4139 goto illegal_op;
4140 gen_op_movl_T0_seg(b >> 3);
4141 gen_push_T0(s);
4142 break;
4143 case 0x1a0: /* push fs */
4144 case 0x1a8: /* push gs */
4145 gen_op_movl_T0_seg((b >> 3) & 7);
4146 gen_push_T0(s);
4147 break;
4148 case 0x07: /* pop es */
4149 case 0x17: /* pop ss */
4150 case 0x1f: /* pop ds */
4151 if (CODE64(s))
4152 goto illegal_op;
4153 reg = b >> 3;
4154 gen_pop_T0(s);
4155 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4156 gen_pop_update(s);
4157 if (reg == R_SS) {
4158 /* if reg == SS, inhibit interrupts/trace. */
4159 /* If several instructions disable interrupts, only the
4160 _first_ does it */
4161 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4162 gen_op_set_inhibit_irq();
4163 s->tf = 0;
4164 }
4165 if (s->is_jmp) {
4166 gen_jmp_im(s->pc - s->cs_base);
4167 gen_eob(s);
4168 }
4169 break;
4170 case 0x1a1: /* pop fs */
4171 case 0x1a9: /* pop gs */
4172 gen_pop_T0(s);
4173 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4174 gen_pop_update(s);
4175 if (s->is_jmp) {
4176 gen_jmp_im(s->pc - s->cs_base);
4177 gen_eob(s);
4178 }
4179 break;
4180
4181 /**************************/
4182 /* mov */
4183 case 0x88:
4184 case 0x89: /* mov Gv, Ev */
4185 if ((b & 1) == 0)
4186 ot = OT_BYTE;
4187 else
4188 ot = dflag + OT_WORD;
4189 modrm = ldub_code(s->pc++);
4190 reg = ((modrm >> 3) & 7) | rex_r;
4191
4192 /* generate a generic store */
4193 gen_ldst_modrm(s, modrm, ot, reg, 1);
4194 break;
4195 case 0xc6:
4196 case 0xc7: /* mov Ev, Iv */
4197 if ((b & 1) == 0)
4198 ot = OT_BYTE;
4199 else
4200 ot = dflag + OT_WORD;
4201 modrm = ldub_code(s->pc++);
4202 mod = (modrm >> 6) & 3;
4203 if (mod != 3) {
4204 s->rip_offset = insn_const_size(ot);
4205 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4206 }
4207 val = insn_get(s, ot);
4208 gen_op_movl_T0_im(val);
4209 if (mod != 3)
4210 gen_op_st_T0_A0[ot + s->mem_index]();
4211 else
4212 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
4213 break;
4214 case 0x8a:
4215 case 0x8b: /* mov Ev, Gv */
4216#ifdef VBOX /* dtrace hot fix */
4217 if (prefixes & PREFIX_LOCK)
4218 goto illegal_op;
4219#endif
4220 if ((b & 1) == 0)
4221 ot = OT_BYTE;
4222 else
4223 ot = OT_WORD + dflag;
4224 modrm = ldub_code(s->pc++);
4225 reg = ((modrm >> 3) & 7) | rex_r;
4226
4227 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4228 gen_op_mov_reg_T0[ot][reg]();
4229 break;
4230 case 0x8e: /* mov seg, Gv */
4231 modrm = ldub_code(s->pc++);
4232 reg = (modrm >> 3) & 7;
4233 if (reg >= 6 || reg == R_CS)
4234 goto illegal_op;
4235 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4236 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4237 if (reg == R_SS) {
4238 /* if reg == SS, inhibit interrupts/trace */
4239 /* If several instructions disable interrupts, only the
4240 _first_ does it */
4241 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4242 gen_op_set_inhibit_irq();
4243 s->tf = 0;
4244 }
4245 if (s->is_jmp) {
4246 gen_jmp_im(s->pc - s->cs_base);
4247 gen_eob(s);
4248 }
4249 break;
4250 case 0x8c: /* mov Gv, seg */
4251 modrm = ldub_code(s->pc++);
4252 reg = (modrm >> 3) & 7;
4253 mod = (modrm >> 6) & 3;
4254 if (reg >= 6)
4255 goto illegal_op;
4256 gen_op_movl_T0_seg(reg);
4257 if (mod == 3)
4258 ot = OT_WORD + dflag;
4259 else
4260 ot = OT_WORD;
4261 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4262 break;
4263
4264 case 0x1b6: /* movzbS Gv, Eb */
4265 case 0x1b7: /* movzwS Gv, Eb */
4266 case 0x1be: /* movsbS Gv, Eb */
4267 case 0x1bf: /* movswS Gv, Eb */
4268 {
4269 int d_ot;
4270 /* d_ot is the size of destination */
4271 d_ot = dflag + OT_WORD;
4272 /* ot is the size of source */
4273 ot = (b & 1) + OT_BYTE;
4274 modrm = ldub_code(s->pc++);
4275 reg = ((modrm >> 3) & 7) | rex_r;
4276 mod = (modrm >> 6) & 3;
4277 rm = (modrm & 7) | REX_B(s);
4278
4279 if (mod == 3) {
4280 gen_op_mov_TN_reg[ot][0][rm]();
4281 switch(ot | (b & 8)) {
4282 case OT_BYTE:
4283 gen_op_movzbl_T0_T0();
4284 break;
4285 case OT_BYTE | 8:
4286 gen_op_movsbl_T0_T0();
4287 break;
4288 case OT_WORD:
4289 gen_op_movzwl_T0_T0();
4290 break;
4291 default:
4292 case OT_WORD | 8:
4293 gen_op_movswl_T0_T0();
4294 break;
4295 }
4296 gen_op_mov_reg_T0[d_ot][reg]();
4297 } else {
4298 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4299 if (b & 8) {
4300 gen_op_lds_T0_A0[ot + s->mem_index]();
4301 } else {
4302 gen_op_ldu_T0_A0[ot + s->mem_index]();
4303 }
4304 gen_op_mov_reg_T0[d_ot][reg]();
4305 }
4306 }
4307 break;
4308
4309 case 0x8d: /* lea */
4310 ot = dflag + OT_WORD;
4311 modrm = ldub_code(s->pc++);
4312 mod = (modrm >> 6) & 3;
4313 if (mod == 3)
4314 goto illegal_op;
4315 reg = ((modrm >> 3) & 7) | rex_r;
4316 /* we must ensure that no segment is added */
4317 s->override = -1;
4318 val = s->addseg;
4319 s->addseg = 0;
4320 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4321 s->addseg = val;
4322 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4323 break;
4324
4325 case 0xa0: /* mov EAX, Ov */
4326 case 0xa1:
4327 case 0xa2: /* mov Ov, EAX */
4328 case 0xa3:
4329 {
4330 target_ulong offset_addr;
4331
4332 if ((b & 1) == 0)
4333 ot = OT_BYTE;
4334 else
4335 ot = dflag + OT_WORD;
4336#ifdef TARGET_X86_64
4337 if (s->aflag == 2) {
4338 offset_addr = ldq_code(s->pc);
4339 s->pc += 8;
4340 if (offset_addr == (int32_t)offset_addr)
4341 gen_op_movq_A0_im(offset_addr);
4342 else
4343 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4344 } else
4345#endif
4346 {
4347 if (s->aflag) {
4348 offset_addr = insn_get(s, OT_LONG);
4349 } else {
4350 offset_addr = insn_get(s, OT_WORD);
4351 }
4352 gen_op_movl_A0_im(offset_addr);
4353 }
4354 gen_add_A0_ds_seg(s);
4355 if ((b & 2) == 0) {
4356 gen_op_ld_T0_A0[ot + s->mem_index]();
4357 gen_op_mov_reg_T0[ot][R_EAX]();
4358 } else {
4359 gen_op_mov_TN_reg[ot][0][R_EAX]();
4360 gen_op_st_T0_A0[ot + s->mem_index]();
4361 }
4362 }
4363 break;
4364 case 0xd7: /* xlat */
4365#ifdef TARGET_X86_64
4366 if (s->aflag == 2) {
4367 gen_op_movq_A0_reg[R_EBX]();
4368 gen_op_addq_A0_AL();
4369 } else
4370#endif
4371 {
4372 gen_op_movl_A0_reg[R_EBX]();
4373 gen_op_addl_A0_AL();
4374 if (s->aflag == 0)
4375 gen_op_andl_A0_ffff();
4376 }
4377 gen_add_A0_ds_seg(s);
4378 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4379 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4380 break;
4381 case 0xb0 ... 0xb7: /* mov R, Ib */
4382 val = insn_get(s, OT_BYTE);
4383 gen_op_movl_T0_im(val);
4384 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4385 break;
4386 case 0xb8 ... 0xbf: /* mov R, Iv */
4387#ifdef TARGET_X86_64
4388 if (dflag == 2) {
4389 uint64_t tmp;
4390 /* 64 bit case */
4391 tmp = ldq_code(s->pc);
4392 s->pc += 8;
4393 reg = (b & 7) | REX_B(s);
4394 gen_movtl_T0_im(tmp);
4395 gen_op_mov_reg_T0[OT_QUAD][reg]();
4396 } else
4397#endif
4398 {
4399 ot = dflag ? OT_LONG : OT_WORD;
4400 val = insn_get(s, ot);
4401 reg = (b & 7) | REX_B(s);
4402 gen_op_movl_T0_im(val);
4403 gen_op_mov_reg_T0[ot][reg]();
4404 }
4405 break;
4406
4407 case 0x91 ... 0x97: /* xchg R, EAX */
4408 ot = dflag + OT_WORD;
4409 reg = (b & 7) | REX_B(s);
4410 rm = R_EAX;
4411 goto do_xchg_reg;
4412 case 0x86:
4413 case 0x87: /* xchg Ev, Gv */
4414 if ((b & 1) == 0)
4415 ot = OT_BYTE;
4416 else
4417 ot = dflag + OT_WORD;
4418 modrm = ldub_code(s->pc++);
4419 reg = ((modrm >> 3) & 7) | rex_r;
4420 mod = (modrm >> 6) & 3;
4421 if (mod == 3) {
4422 rm = (modrm & 7) | REX_B(s);
4423 do_xchg_reg:
4424 gen_op_mov_TN_reg[ot][0][reg]();
4425 gen_op_mov_TN_reg[ot][1][rm]();
4426 gen_op_mov_reg_T0[ot][rm]();
4427 gen_op_mov_reg_T1[ot][reg]();
4428 } else {
4429 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4430 gen_op_mov_TN_reg[ot][0][reg]();
4431 /* for xchg, lock is implicit */
4432 if (!(prefixes & PREFIX_LOCK))
4433 gen_op_lock();
4434 gen_op_ld_T1_A0[ot + s->mem_index]();
4435 gen_op_st_T0_A0[ot + s->mem_index]();
4436 if (!(prefixes & PREFIX_LOCK))
4437 gen_op_unlock();
4438 gen_op_mov_reg_T1[ot][reg]();
4439 }
4440 break;
4441 case 0xc4: /* les Gv */
4442 if (CODE64(s))
4443 goto illegal_op;
4444 op = R_ES;
4445 goto do_lxx;
4446 case 0xc5: /* lds Gv */
4447 if (CODE64(s))
4448 goto illegal_op;
4449 op = R_DS;
4450 goto do_lxx;
4451 case 0x1b2: /* lss Gv */
4452 op = R_SS;
4453 goto do_lxx;
4454 case 0x1b4: /* lfs Gv */
4455 op = R_FS;
4456 goto do_lxx;
4457 case 0x1b5: /* lgs Gv */
4458 op = R_GS;
4459 do_lxx:
4460 ot = dflag ? OT_LONG : OT_WORD;
4461 modrm = ldub_code(s->pc++);
4462 reg = ((modrm >> 3) & 7) | rex_r;
4463 mod = (modrm >> 6) & 3;
4464 if (mod == 3)
4465 goto illegal_op;
4466 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4467 gen_op_ld_T1_A0[ot + s->mem_index]();
4468 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4469 /* load the segment first to handle exceptions properly */
4470 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4471 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4472 /* then put the data */
4473 gen_op_mov_reg_T1[ot][reg]();
4474 if (s->is_jmp) {
4475 gen_jmp_im(s->pc - s->cs_base);
4476 gen_eob(s);
4477 }
4478 break;
4479
4480 /************************/
4481 /* shifts */
4482 case 0xc0:
4483 case 0xc1:
4484 /* shift Ev,Ib */
4485 shift = 2;
4486 grp2:
4487 {
4488 if ((b & 1) == 0)
4489 ot = OT_BYTE;
4490 else
4491 ot = dflag + OT_WORD;
4492
4493 modrm = ldub_code(s->pc++);
4494 mod = (modrm >> 6) & 3;
4495 op = (modrm >> 3) & 7;
4496
4497 if (mod != 3) {
4498 if (shift == 2) {
4499 s->rip_offset = 1;
4500 }
4501 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4502 opreg = OR_TMP0;
4503 } else {
4504 opreg = (modrm & 7) | REX_B(s);
4505 }
4506
4507 /* simpler op */
4508 if (shift == 0) {
4509 gen_shift(s, op, ot, opreg, OR_ECX);
4510 } else {
4511 if (shift == 2) {
4512 shift = ldub_code(s->pc++);
4513 }
4514 gen_shifti(s, op, ot, opreg, shift);
4515 }
4516 }
4517 break;
4518 case 0xd0:
4519 case 0xd1:
4520 /* shift Ev,1 */
4521 shift = 1;
4522 goto grp2;
4523 case 0xd2:
4524 case 0xd3:
4525 /* shift Ev,cl */
4526 shift = 0;
4527 goto grp2;
4528
4529 case 0x1a4: /* shld imm */
4530 op = 0;
4531 shift = 1;
4532 goto do_shiftd;
4533 case 0x1a5: /* shld cl */
4534 op = 0;
4535 shift = 0;
4536 goto do_shiftd;
4537 case 0x1ac: /* shrd imm */
4538 op = 1;
4539 shift = 1;
4540 goto do_shiftd;
4541 case 0x1ad: /* shrd cl */
4542 op = 1;
4543 shift = 0;
4544 do_shiftd:
4545 ot = dflag + OT_WORD;
4546 modrm = ldub_code(s->pc++);
4547 mod = (modrm >> 6) & 3;
4548 rm = (modrm & 7) | REX_B(s);
4549 reg = ((modrm >> 3) & 7) | rex_r;
4550
4551 if (mod != 3) {
4552 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4553 gen_op_ld_T0_A0[ot + s->mem_index]();
4554 } else {
4555 gen_op_mov_TN_reg[ot][0][rm]();
4556 }
4557 gen_op_mov_TN_reg[ot][1][reg]();
4558
4559 if (shift) {
4560 val = ldub_code(s->pc++);
4561 if (ot == OT_QUAD)
4562 val &= 0x3f;
4563 else
4564 val &= 0x1f;
4565 if (val) {
4566 if (mod == 3)
4567 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4568 else
4569 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4570 if (op == 0 && ot != OT_WORD)
4571 s->cc_op = CC_OP_SHLB + ot;
4572 else
4573 s->cc_op = CC_OP_SARB + ot;
4574 }
4575 } else {
4576 if (s->cc_op != CC_OP_DYNAMIC)
4577 gen_op_set_cc_op(s->cc_op);
4578 if (mod == 3)
4579 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4580 else
4581 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4582 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4583 }
4584 if (mod == 3) {
4585 gen_op_mov_reg_T0[ot][rm]();
4586 }
4587 break;
4588
4589 /************************/
4590 /* floats */
4591 case 0xd8 ... 0xdf:
4592 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4593 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4594 /* XXX: what to do if illegal op ? */
4595 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4596 break;
4597 }
4598 modrm = ldub_code(s->pc++);
4599 mod = (modrm >> 6) & 3;
4600 rm = modrm & 7;
4601 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4602 if (mod != 3) {
4603 /* memory op */
4604 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4605 switch(op) {
4606 case 0x00 ... 0x07: /* fxxxs */
4607 case 0x10 ... 0x17: /* fixxxl */
4608 case 0x20 ... 0x27: /* fxxxl */
4609 case 0x30 ... 0x37: /* fixxx */
4610 {
4611 int op1;
4612 op1 = op & 7;
4613
4614 switch(op >> 4) {
4615 case 0:
4616 gen_op_flds_FT0_A0();
4617 break;
4618 case 1:
4619 gen_op_fildl_FT0_A0();
4620 break;
4621 case 2:
4622 gen_op_fldl_FT0_A0();
4623 break;
4624 case 3:
4625 default:
4626 gen_op_fild_FT0_A0();
4627 break;
4628 }
4629
4630 gen_op_fp_arith_ST0_FT0[op1]();
4631 if (op1 == 3) {
4632 /* fcomp needs pop */
4633 gen_op_fpop();
4634 }
4635 }
4636 break;
4637 case 0x08: /* flds */
4638 case 0x0a: /* fsts */
4639 case 0x0b: /* fstps */
4640 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4641 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4642 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4643 switch(op & 7) {
4644 case 0:
4645 switch(op >> 4) {
4646 case 0:
4647 gen_op_flds_ST0_A0();
4648 break;
4649 case 1:
4650 gen_op_fildl_ST0_A0();
4651 break;
4652 case 2:
4653 gen_op_fldl_ST0_A0();
4654 break;
4655 case 3:
4656 default:
4657 gen_op_fild_ST0_A0();
4658 break;
4659 }
4660 break;
4661 case 1:
4662 switch(op >> 4) {
4663 case 1:
4664 gen_op_fisttl_ST0_A0();
4665 break;
4666 case 2:
4667 gen_op_fisttll_ST0_A0();
4668 break;
4669 case 3:
4670 default:
4671 gen_op_fistt_ST0_A0();
4672 }
4673 gen_op_fpop();
4674 break;
4675 default:
4676 switch(op >> 4) {
4677 case 0:
4678 gen_op_fsts_ST0_A0();
4679 break;
4680 case 1:
4681 gen_op_fistl_ST0_A0();
4682 break;
4683 case 2:
4684 gen_op_fstl_ST0_A0();
4685 break;
4686 case 3:
4687 default:
4688 gen_op_fist_ST0_A0();
4689 break;
4690 }
4691 if ((op & 7) == 3)
4692 gen_op_fpop();
4693 break;
4694 }
4695 break;
4696 case 0x0c: /* fldenv mem */
4697 gen_op_fldenv_A0(s->dflag);
4698 break;
4699 case 0x0d: /* fldcw mem */
4700 gen_op_fldcw_A0();
4701 break;
4702 case 0x0e: /* fnstenv mem */
4703 gen_op_fnstenv_A0(s->dflag);
4704 break;
4705 case 0x0f: /* fnstcw mem */
4706 gen_op_fnstcw_A0();
4707 break;
4708 case 0x1d: /* fldt mem */
4709 gen_op_fldt_ST0_A0();
4710 break;
4711 case 0x1f: /* fstpt mem */
4712 gen_op_fstt_ST0_A0();
4713 gen_op_fpop();
4714 break;
4715 case 0x2c: /* frstor mem */
4716 gen_op_frstor_A0(s->dflag);
4717 break;
4718 case 0x2e: /* fnsave mem */
4719 gen_op_fnsave_A0(s->dflag);
4720 break;
4721 case 0x2f: /* fnstsw mem */
4722 gen_op_fnstsw_A0();
4723 break;
4724 case 0x3c: /* fbld */
4725 gen_op_fbld_ST0_A0();
4726 break;
4727 case 0x3e: /* fbstp */
4728 gen_op_fbst_ST0_A0();
4729 gen_op_fpop();
4730 break;
4731 case 0x3d: /* fildll */
4732 gen_op_fildll_ST0_A0();
4733 break;
4734 case 0x3f: /* fistpll */
4735 gen_op_fistll_ST0_A0();
4736 gen_op_fpop();
4737 break;
4738 default:
4739 goto illegal_op;
4740 }
4741 } else {
4742 /* register float ops */
4743 opreg = rm;
4744
4745 switch(op) {
4746 case 0x08: /* fld sti */
4747 gen_op_fpush();
4748 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4749 break;
4750 case 0x09: /* fxchg sti */
4751 case 0x29: /* fxchg4 sti, undocumented op */
4752 case 0x39: /* fxchg7 sti, undocumented op */
4753 gen_op_fxchg_ST0_STN(opreg);
4754 break;
4755 case 0x0a: /* grp d9/2 */
4756 switch(rm) {
4757 case 0: /* fnop */
4758 /* check exceptions (FreeBSD FPU probe) */
4759 if (s->cc_op != CC_OP_DYNAMIC)
4760 gen_op_set_cc_op(s->cc_op);
4761 gen_jmp_im(pc_start - s->cs_base);
4762 gen_op_fwait();
4763 break;
4764 default:
4765 goto illegal_op;
4766 }
4767 break;
4768 case 0x0c: /* grp d9/4 */
4769 switch(rm) {
4770 case 0: /* fchs */
4771 gen_op_fchs_ST0();
4772 break;
4773 case 1: /* fabs */
4774 gen_op_fabs_ST0();
4775 break;
4776 case 4: /* ftst */
4777 gen_op_fldz_FT0();
4778 gen_op_fcom_ST0_FT0();
4779 break;
4780 case 5: /* fxam */
4781 gen_op_fxam_ST0();
4782 break;
4783 default:
4784 goto illegal_op;
4785 }
4786 break;
4787 case 0x0d: /* grp d9/5 */
4788 {
4789 switch(rm) {
4790 case 0:
4791 gen_op_fpush();
4792 gen_op_fld1_ST0();
4793 break;
4794 case 1:
4795 gen_op_fpush();
4796 gen_op_fldl2t_ST0();
4797 break;
4798 case 2:
4799 gen_op_fpush();
4800 gen_op_fldl2e_ST0();
4801 break;
4802 case 3:
4803 gen_op_fpush();
4804 gen_op_fldpi_ST0();
4805 break;
4806 case 4:
4807 gen_op_fpush();
4808 gen_op_fldlg2_ST0();
4809 break;
4810 case 5:
4811 gen_op_fpush();
4812 gen_op_fldln2_ST0();
4813 break;
4814 case 6:
4815 gen_op_fpush();
4816 gen_op_fldz_ST0();
4817 break;
4818 default:
4819 goto illegal_op;
4820 }
4821 }
4822 break;
4823 case 0x0e: /* grp d9/6 */
4824 switch(rm) {
4825 case 0: /* f2xm1 */
4826 gen_op_f2xm1();
4827 break;
4828 case 1: /* fyl2x */
4829 gen_op_fyl2x();
4830 break;
4831 case 2: /* fptan */
4832 gen_op_fptan();
4833 break;
4834 case 3: /* fpatan */
4835 gen_op_fpatan();
4836 break;
4837 case 4: /* fxtract */
4838 gen_op_fxtract();
4839 break;
4840 case 5: /* fprem1 */
4841 gen_op_fprem1();
4842 break;
4843 case 6: /* fdecstp */
4844 gen_op_fdecstp();
4845 break;
4846 default:
4847 case 7: /* fincstp */
4848 gen_op_fincstp();
4849 break;
4850 }
4851 break;
4852 case 0x0f: /* grp d9/7 */
4853 switch(rm) {
4854 case 0: /* fprem */
4855 gen_op_fprem();
4856 break;
4857 case 1: /* fyl2xp1 */
4858 gen_op_fyl2xp1();
4859 break;
4860 case 2: /* fsqrt */
4861 gen_op_fsqrt();
4862 break;
4863 case 3: /* fsincos */
4864 gen_op_fsincos();
4865 break;
4866 case 5: /* fscale */
4867 gen_op_fscale();
4868 break;
4869 case 4: /* frndint */
4870 gen_op_frndint();
4871 break;
4872 case 6: /* fsin */
4873 gen_op_fsin();
4874 break;
4875 default:
4876 case 7: /* fcos */
4877 gen_op_fcos();
4878 break;
4879 }
4880 break;
4881 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4882 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4883 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4884 {
4885 int op1;
4886
4887 op1 = op & 7;
4888 if (op >= 0x20) {
4889 gen_op_fp_arith_STN_ST0[op1](opreg);
4890 if (op >= 0x30)
4891 gen_op_fpop();
4892 } else {
4893 gen_op_fmov_FT0_STN(opreg);
4894 gen_op_fp_arith_ST0_FT0[op1]();
4895 }
4896 }
4897 break;
4898 case 0x02: /* fcom */
4899 case 0x22: /* fcom2, undocumented op */
4900 gen_op_fmov_FT0_STN(opreg);
4901 gen_op_fcom_ST0_FT0();
4902 break;
4903 case 0x03: /* fcomp */
4904 case 0x23: /* fcomp3, undocumented op */
4905 case 0x32: /* fcomp5, undocumented op */
4906 gen_op_fmov_FT0_STN(opreg);
4907 gen_op_fcom_ST0_FT0();
4908 gen_op_fpop();
4909 break;
4910 case 0x15: /* da/5 */
4911 switch(rm) {
4912 case 1: /* fucompp */
4913 gen_op_fmov_FT0_STN(1);
4914 gen_op_fucom_ST0_FT0();
4915 gen_op_fpop();
4916 gen_op_fpop();
4917 break;
4918 default:
4919 goto illegal_op;
4920 }
4921 break;
4922 case 0x1c:
4923 switch(rm) {
4924 case 0: /* feni (287 only, just do nop here) */
4925 break;
4926 case 1: /* fdisi (287 only, just do nop here) */
4927 break;
4928 case 2: /* fclex */
4929 gen_op_fclex();
4930 break;
4931 case 3: /* fninit */
4932 gen_op_fninit();
4933 break;
4934 case 4: /* fsetpm (287 only, just do nop here) */
4935 break;
4936 default:
4937 goto illegal_op;
4938 }
4939 break;
4940 case 0x1d: /* fucomi */
4941 if (s->cc_op != CC_OP_DYNAMIC)
4942 gen_op_set_cc_op(s->cc_op);
4943 gen_op_fmov_FT0_STN(opreg);
4944 gen_op_fucomi_ST0_FT0();
4945 s->cc_op = CC_OP_EFLAGS;
4946 break;
4947 case 0x1e: /* fcomi */
4948 if (s->cc_op != CC_OP_DYNAMIC)
4949 gen_op_set_cc_op(s->cc_op);
4950 gen_op_fmov_FT0_STN(opreg);
4951 gen_op_fcomi_ST0_FT0();
4952 s->cc_op = CC_OP_EFLAGS;
4953 break;
4954 case 0x28: /* ffree sti */
4955 gen_op_ffree_STN(opreg);
4956 break;
4957 case 0x2a: /* fst sti */
4958 gen_op_fmov_STN_ST0(opreg);
4959 break;
4960 case 0x2b: /* fstp sti */
4961 case 0x0b: /* fstp1 sti, undocumented op */
4962 case 0x3a: /* fstp8 sti, undocumented op */
4963 case 0x3b: /* fstp9 sti, undocumented op */
4964 gen_op_fmov_STN_ST0(opreg);
4965 gen_op_fpop();
4966 break;
4967 case 0x2c: /* fucom st(i) */
4968 gen_op_fmov_FT0_STN(opreg);
4969 gen_op_fucom_ST0_FT0();
4970 break;
4971 case 0x2d: /* fucomp st(i) */
4972 gen_op_fmov_FT0_STN(opreg);
4973 gen_op_fucom_ST0_FT0();
4974 gen_op_fpop();
4975 break;
4976 case 0x33: /* de/3 */
4977 switch(rm) {
4978 case 1: /* fcompp */
4979 gen_op_fmov_FT0_STN(1);
4980 gen_op_fcom_ST0_FT0();
4981 gen_op_fpop();
4982 gen_op_fpop();
4983 break;
4984 default:
4985 goto illegal_op;
4986 }
4987 break;
4988 case 0x38: /* ffreep sti, undocumented op */
4989 gen_op_ffree_STN(opreg);
4990 gen_op_fpop();
4991 break;
4992 case 0x3c: /* df/4 */
4993 switch(rm) {
4994 case 0:
4995 gen_op_fnstsw_EAX();
4996 break;
4997 default:
4998 goto illegal_op;
4999 }
5000 break;
5001 case 0x3d: /* fucomip */
5002 if (s->cc_op != CC_OP_DYNAMIC)
5003 gen_op_set_cc_op(s->cc_op);
5004 gen_op_fmov_FT0_STN(opreg);
5005 gen_op_fucomi_ST0_FT0();
5006 gen_op_fpop();
5007 s->cc_op = CC_OP_EFLAGS;
5008 break;
5009 case 0x3e: /* fcomip */
5010 if (s->cc_op != CC_OP_DYNAMIC)
5011 gen_op_set_cc_op(s->cc_op);
5012 gen_op_fmov_FT0_STN(opreg);
5013 gen_op_fcomi_ST0_FT0();
5014 gen_op_fpop();
5015 s->cc_op = CC_OP_EFLAGS;
5016 break;
5017 case 0x10 ... 0x13: /* fcmovxx */
5018 case 0x18 ... 0x1b:
5019 {
5020 int op1;
5021 const static uint8_t fcmov_cc[8] = {
5022 (JCC_B << 1),
5023 (JCC_Z << 1),
5024 (JCC_BE << 1),
5025 (JCC_P << 1),
5026 };
5027 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5028 gen_setcc(s, op1);
5029 gen_op_fcmov_ST0_STN_T0(opreg);
5030 }
5031 break;
5032 default:
5033 goto illegal_op;
5034 }
5035 }
5036#ifdef USE_CODE_COPY
5037 s->tb->cflags |= CF_TB_FP_USED;
5038#endif
5039 break;
5040 /************************/
5041 /* string ops */
5042
5043 case 0xa4: /* movsS */
5044 case 0xa5:
5045 if ((b & 1) == 0)
5046 ot = OT_BYTE;
5047 else
5048 ot = dflag + OT_WORD;
5049
5050 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5051 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5052 } else {
5053 gen_movs(s, ot);
5054 }
5055 break;
5056
5057 case 0xaa: /* stosS */
5058 case 0xab:
5059 if ((b & 1) == 0)
5060 ot = OT_BYTE;
5061 else
5062 ot = dflag + OT_WORD;
5063
5064 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5065 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5066 } else {
5067 gen_stos(s, ot);
5068 }
5069 break;
5070 case 0xac: /* lodsS */
5071 case 0xad:
5072 if ((b & 1) == 0)
5073 ot = OT_BYTE;
5074 else
5075 ot = dflag + OT_WORD;
5076 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5077 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5078 } else {
5079 gen_lods(s, ot);
5080 }
5081 break;
5082 case 0xae: /* scasS */
5083 case 0xaf:
5084 if ((b & 1) == 0)
5085 ot = OT_BYTE;
5086 else
5087 ot = dflag + OT_WORD;
5088 if (prefixes & PREFIX_REPNZ) {
5089 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5090 } else if (prefixes & PREFIX_REPZ) {
5091 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5092 } else {
5093 gen_scas(s, ot);
5094 s->cc_op = CC_OP_SUBB + ot;
5095 }
5096 break;
5097
5098 case 0xa6: /* cmpsS */
5099 case 0xa7:
5100 if ((b & 1) == 0)
5101 ot = OT_BYTE;
5102 else
5103 ot = dflag + OT_WORD;
5104 if (prefixes & PREFIX_REPNZ) {
5105 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5106 } else if (prefixes & PREFIX_REPZ) {
5107 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5108 } else {
5109 gen_cmps(s, ot);
5110 s->cc_op = CC_OP_SUBB + ot;
5111 }
5112 break;
5113 case 0x6c: /* insS */
5114 case 0x6d:
5115 if ((b & 1) == 0)
5116 ot = OT_BYTE;
5117 else
5118 ot = dflag ? OT_LONG : OT_WORD;
5119 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5120 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5121 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5122 } else {
5123 gen_ins(s, ot);
5124 }
5125 break;
5126 case 0x6e: /* outsS */
5127 case 0x6f:
5128 if ((b & 1) == 0)
5129 ot = OT_BYTE;
5130 else
5131 ot = dflag ? OT_LONG : OT_WORD;
5132 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5133 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5134 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5135 } else {
5136 gen_outs(s, ot);
5137 }
5138 break;
5139
5140 /************************/
5141 /* port I/O */
5142 case 0xe4:
5143 case 0xe5:
5144 if ((b & 1) == 0)
5145 ot = OT_BYTE;
5146 else
5147 ot = dflag ? OT_LONG : OT_WORD;
5148 val = ldub_code(s->pc++);
5149 gen_op_movl_T0_im(val);
5150 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5151 gen_op_in[ot]();
5152 gen_op_mov_reg_T1[ot][R_EAX]();
5153 break;
5154 case 0xe6:
5155 case 0xe7:
5156 if ((b & 1) == 0)
5157 ot = OT_BYTE;
5158 else
5159 ot = dflag ? OT_LONG : OT_WORD;
5160 val = ldub_code(s->pc++);
5161 gen_op_movl_T0_im(val);
5162 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5163#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
5164 if (val == 0x80)
5165 break;
5166#endif /* VBOX */
5167 gen_op_mov_TN_reg[ot][1][R_EAX]();
5168 gen_op_out[ot]();
5169 break;
5170 case 0xec:
5171 case 0xed:
5172 if ((b & 1) == 0)
5173 ot = OT_BYTE;
5174 else
5175 ot = dflag ? OT_LONG : OT_WORD;
5176 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5177 gen_op_andl_T0_ffff();
5178 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5179 gen_op_in[ot]();
5180 gen_op_mov_reg_T1[ot][R_EAX]();
5181 break;
5182 case 0xee:
5183 case 0xef:
5184 if ((b & 1) == 0)
5185 ot = OT_BYTE;
5186 else
5187 ot = dflag ? OT_LONG : OT_WORD;
5188 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5189 gen_op_andl_T0_ffff();
5190 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5191 gen_op_mov_TN_reg[ot][1][R_EAX]();
5192 gen_op_out[ot]();
5193 break;
5194
5195 /************************/
5196 /* control */
5197 case 0xc2: /* ret im */
5198 val = ldsw_code(s->pc);
5199 s->pc += 2;
5200 gen_pop_T0(s);
5201 if (CODE64(s) && s->dflag)
5202 s->dflag = 2;
5203 gen_stack_update(s, val + (2 << s->dflag));
5204 if (s->dflag == 0)
5205 gen_op_andl_T0_ffff();
5206 gen_op_jmp_T0();
5207 gen_eob(s);
5208 break;
5209 case 0xc3: /* ret */
5210 gen_pop_T0(s);
5211 gen_pop_update(s);
5212 if (s->dflag == 0)
5213 gen_op_andl_T0_ffff();
5214 gen_op_jmp_T0();
5215 gen_eob(s);
5216 break;
5217 case 0xca: /* lret im */
5218 val = ldsw_code(s->pc);
5219 s->pc += 2;
5220 do_lret:
5221 if (s->pe && !s->vm86) {
5222 if (s->cc_op != CC_OP_DYNAMIC)
5223 gen_op_set_cc_op(s->cc_op);
5224 gen_jmp_im(pc_start - s->cs_base);
5225 gen_op_lret_protected(s->dflag, val);
5226 } else {
5227 gen_stack_A0(s);
5228 /* pop offset */
5229 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5230 if (s->dflag == 0)
5231 gen_op_andl_T0_ffff();
5232 /* NOTE: keeping EIP updated is not a problem in case of
5233 exception */
5234 gen_op_jmp_T0();
5235 /* pop selector */
5236 gen_op_addl_A0_im(2 << s->dflag);
5237 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5238 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5239 /* add stack offset */
5240 gen_stack_update(s, val + (4 << s->dflag));
5241 }
5242 gen_eob(s);
5243 break;
5244 case 0xcb: /* lret */
5245 val = 0;
5246 goto do_lret;
5247 case 0xcf: /* iret */
5248 if (!s->pe) {
5249 /* real mode */
5250 gen_op_iret_real(s->dflag);
5251 s->cc_op = CC_OP_EFLAGS;
5252 } else if (s->vm86) {
5253#ifdef VBOX
5254 if (s->iopl != 3 && (!s->vme || s->dflag)) {
5255#else
5256 if (s->iopl != 3) {
5257#endif
5258 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5259 } else {
5260 gen_op_iret_real(s->dflag);
5261 s->cc_op = CC_OP_EFLAGS;
5262 }
5263 } else {
5264 if (s->cc_op != CC_OP_DYNAMIC)
5265 gen_op_set_cc_op(s->cc_op);
5266 gen_jmp_im(pc_start - s->cs_base);
5267 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5268 s->cc_op = CC_OP_EFLAGS;
5269 }
5270 gen_eob(s);
5271 break;
5272 case 0xe8: /* call im */
5273 {
5274 if (dflag)
5275 tval = (int32_t)insn_get(s, OT_LONG);
5276 else
5277 tval = (int16_t)insn_get(s, OT_WORD);
5278 next_eip = s->pc - s->cs_base;
5279 tval += next_eip;
5280 if (s->dflag == 0)
5281 tval &= 0xffff;
5282 gen_movtl_T0_im(next_eip);
5283 gen_push_T0(s);
5284 gen_jmp(s, tval);
5285 }
5286 break;
5287 case 0x9a: /* lcall im */
5288 {
5289 unsigned int selector, offset;
5290
5291 if (CODE64(s))
5292 goto illegal_op;
5293 ot = dflag ? OT_LONG : OT_WORD;
5294 offset = insn_get(s, ot);
5295 selector = insn_get(s, OT_WORD);
5296
5297 gen_op_movl_T0_im(selector);
5298 gen_op_movl_T1_imu(offset);
5299 }
5300 goto do_lcall;
5301 case 0xe9: /* jmp im */
5302 if (dflag)
5303 tval = (int32_t)insn_get(s, OT_LONG);
5304 else
5305 tval = (int16_t)insn_get(s, OT_WORD);
5306 tval += s->pc - s->cs_base;
5307 if (s->dflag == 0)
5308 tval &= 0xffff;
5309 gen_jmp(s, tval);
5310 break;
5311 case 0xea: /* ljmp im */
5312 {
5313 unsigned int selector, offset;
5314
5315 if (CODE64(s))
5316 goto illegal_op;
5317 ot = dflag ? OT_LONG : OT_WORD;
5318 offset = insn_get(s, ot);
5319 selector = insn_get(s, OT_WORD);
5320
5321 gen_op_movl_T0_im(selector);
5322 gen_op_movl_T1_imu(offset);
5323 }
5324 goto do_ljmp;
5325 case 0xeb: /* jmp Jb */
5326 tval = (int8_t)insn_get(s, OT_BYTE);
5327 tval += s->pc - s->cs_base;
5328 if (s->dflag == 0)
5329 tval &= 0xffff;
5330 gen_jmp(s, tval);
5331 break;
5332 case 0x70 ... 0x7f: /* jcc Jb */
5333 tval = (int8_t)insn_get(s, OT_BYTE);
5334 goto do_jcc;
5335 case 0x180 ... 0x18f: /* jcc Jv */
5336 if (dflag) {
5337 tval = (int32_t)insn_get(s, OT_LONG);
5338 } else {
5339 tval = (int16_t)insn_get(s, OT_WORD);
5340 }
5341 do_jcc:
5342 next_eip = s->pc - s->cs_base;
5343 tval += next_eip;
5344 if (s->dflag == 0)
5345 tval &= 0xffff;
5346 gen_jcc(s, b, tval, next_eip);
5347 break;
5348
5349 case 0x190 ... 0x19f: /* setcc Gv */
5350 modrm = ldub_code(s->pc++);
5351 gen_setcc(s, b);
5352 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5353 break;
5354 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5355 ot = dflag + OT_WORD;
5356 modrm = ldub_code(s->pc++);
5357 reg = ((modrm >> 3) & 7) | rex_r;
5358 mod = (modrm >> 6) & 3;
5359 gen_setcc(s, b);
5360 if (mod != 3) {
5361 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5362 gen_op_ld_T1_A0[ot + s->mem_index]();
5363 } else {
5364 rm = (modrm & 7) | REX_B(s);
5365 gen_op_mov_TN_reg[ot][1][rm]();
5366 }
5367 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5368 break;
5369
5370 /************************/
5371 /* flags */
5372 case 0x9c: /* pushf */
5373#ifdef VBOX
5374 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5375#else
5376 if (s->vm86 && s->iopl != 3) {
5377#endif
5378 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5379 } else {
5380 if (s->cc_op != CC_OP_DYNAMIC)
5381 gen_op_set_cc_op(s->cc_op);
5382#ifdef VBOX
5383 if (s->vm86 && s->vme && s->iopl != 3)
5384 gen_op_movl_T0_eflags_vme();
5385 else
5386#endif
5387 gen_op_movl_T0_eflags();
5388 gen_push_T0(s);
5389 }
5390 break;
5391 case 0x9d: /* popf */
5392#ifdef VBOX
5393 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5394#else
5395 if (s->vm86 && s->iopl != 3) {
5396#endif
5397 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5398 } else {
5399 gen_pop_T0(s);
5400 if (s->cpl == 0) {
5401 if (s->dflag) {
5402 gen_op_movl_eflags_T0_cpl0();
5403 } else {
5404 gen_op_movw_eflags_T0_cpl0();
5405 }
5406 } else {
5407 if (s->cpl <= s->iopl) {
5408 if (s->dflag) {
5409 gen_op_movl_eflags_T0_io();
5410 } else {
5411 gen_op_movw_eflags_T0_io();
5412 }
5413 } else {
5414 if (s->dflag) {
5415 gen_op_movl_eflags_T0();
5416 } else {
5417#ifdef VBOX
5418 if (s->vm86 && s->vme)
5419 gen_op_movw_eflags_T0_vme();
5420 else
5421#endif
5422 gen_op_movw_eflags_T0();
5423 }
5424 }
5425 }
5426 gen_pop_update(s);
5427 s->cc_op = CC_OP_EFLAGS;
5428 /* abort translation because TF flag may change */
5429 gen_jmp_im(s->pc - s->cs_base);
5430 gen_eob(s);
5431 }
5432 break;
5433 case 0x9e: /* sahf */
5434 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5435 goto illegal_op;
5436 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5437 if (s->cc_op != CC_OP_DYNAMIC)
5438 gen_op_set_cc_op(s->cc_op);
5439 gen_op_movb_eflags_T0();
5440 s->cc_op = CC_OP_EFLAGS;
5441 break;
5442 case 0x9f: /* lahf */
5443 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5444 goto illegal_op;
5445 if (s->cc_op != CC_OP_DYNAMIC)
5446 gen_op_set_cc_op(s->cc_op);
5447 gen_op_movl_T0_eflags();
5448 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5449 break;
5450 case 0xf5: /* cmc */
5451 if (s->cc_op != CC_OP_DYNAMIC)
5452 gen_op_set_cc_op(s->cc_op);
5453 gen_op_cmc();
5454 s->cc_op = CC_OP_EFLAGS;
5455 break;
5456 case 0xf8: /* clc */
5457 if (s->cc_op != CC_OP_DYNAMIC)
5458 gen_op_set_cc_op(s->cc_op);
5459 gen_op_clc();
5460 s->cc_op = CC_OP_EFLAGS;
5461 break;
5462 case 0xf9: /* stc */
5463 if (s->cc_op != CC_OP_DYNAMIC)
5464 gen_op_set_cc_op(s->cc_op);
5465 gen_op_stc();
5466 s->cc_op = CC_OP_EFLAGS;
5467 break;
5468 case 0xfc: /* cld */
5469 gen_op_cld();
5470 break;
5471 case 0xfd: /* std */
5472 gen_op_std();
5473 break;
5474
5475 /************************/
5476 /* bit operations */
5477 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5478 ot = dflag + OT_WORD;
5479 modrm = ldub_code(s->pc++);
5480 op = (modrm >> 3) & 7;
5481 mod = (modrm >> 6) & 3;
5482 rm = (modrm & 7) | REX_B(s);
5483 if (mod != 3) {
5484 s->rip_offset = 1;
5485 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5486 gen_op_ld_T0_A0[ot + s->mem_index]();
5487 } else {
5488 gen_op_mov_TN_reg[ot][0][rm]();
5489 }
5490 /* load shift */
5491 val = ldub_code(s->pc++);
5492 gen_op_movl_T1_im(val);
5493 if (op < 4)
5494 goto illegal_op;
5495 op -= 4;
5496 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5497 s->cc_op = CC_OP_SARB + ot;
5498 if (op != 0) {
5499 if (mod != 3)
5500 gen_op_st_T0_A0[ot + s->mem_index]();
5501 else
5502 gen_op_mov_reg_T0[ot][rm]();
5503 gen_op_update_bt_cc();
5504 }
5505 break;
5506 case 0x1a3: /* bt Gv, Ev */
5507 op = 0;
5508 goto do_btx;
5509 case 0x1ab: /* bts */
5510 op = 1;
5511 goto do_btx;
5512 case 0x1b3: /* btr */
5513 op = 2;
5514 goto do_btx;
5515 case 0x1bb: /* btc */
5516 op = 3;
5517 do_btx:
5518 ot = dflag + OT_WORD;
5519 modrm = ldub_code(s->pc++);
5520 reg = ((modrm >> 3) & 7) | rex_r;
5521 mod = (modrm >> 6) & 3;
5522 rm = (modrm & 7) | REX_B(s);
5523 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5524 if (mod != 3) {
5525 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5526 /* specific case: we need to add a displacement */
5527 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5528 gen_op_ld_T0_A0[ot + s->mem_index]();
5529 } else {
5530 gen_op_mov_TN_reg[ot][0][rm]();
5531 }
5532 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5533 s->cc_op = CC_OP_SARB + ot;
5534 if (op != 0) {
5535 if (mod != 3)
5536 gen_op_st_T0_A0[ot + s->mem_index]();
5537 else
5538 gen_op_mov_reg_T0[ot][rm]();
5539 gen_op_update_bt_cc();
5540 }
5541 break;
5542 case 0x1bc: /* bsf */
5543 case 0x1bd: /* bsr */
5544 ot = dflag + OT_WORD;
5545 modrm = ldub_code(s->pc++);
5546 reg = ((modrm >> 3) & 7) | rex_r;
5547 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5548 /* NOTE: in order to handle the 0 case, we must load the
5549 result. It could be optimized with a generated jump */
5550 gen_op_mov_TN_reg[ot][1][reg]();
5551 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5552 gen_op_mov_reg_T1[ot][reg]();
5553 s->cc_op = CC_OP_LOGICB + ot;
5554 break;
5555 /************************/
5556 /* bcd */
5557 case 0x27: /* daa */
5558 if (CODE64(s))
5559 goto illegal_op;
5560 if (s->cc_op != CC_OP_DYNAMIC)
5561 gen_op_set_cc_op(s->cc_op);
5562 gen_op_daa();
5563 s->cc_op = CC_OP_EFLAGS;
5564 break;
5565 case 0x2f: /* das */
5566 if (CODE64(s))
5567 goto illegal_op;
5568 if (s->cc_op != CC_OP_DYNAMIC)
5569 gen_op_set_cc_op(s->cc_op);
5570 gen_op_das();
5571 s->cc_op = CC_OP_EFLAGS;
5572 break;
5573 case 0x37: /* aaa */
5574 if (CODE64(s))
5575 goto illegal_op;
5576 if (s->cc_op != CC_OP_DYNAMIC)
5577 gen_op_set_cc_op(s->cc_op);
5578 gen_op_aaa();
5579 s->cc_op = CC_OP_EFLAGS;
5580 break;
5581 case 0x3f: /* aas */
5582 if (CODE64(s))
5583 goto illegal_op;
5584 if (s->cc_op != CC_OP_DYNAMIC)
5585 gen_op_set_cc_op(s->cc_op);
5586 gen_op_aas();
5587 s->cc_op = CC_OP_EFLAGS;
5588 break;
5589 case 0xd4: /* aam */
5590 if (CODE64(s))
5591 goto illegal_op;
5592 val = ldub_code(s->pc++);
5593 if (val == 0) {
5594 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5595 } else {
5596 gen_op_aam(val);
5597 s->cc_op = CC_OP_LOGICB;
5598 }
5599 break;
5600 case 0xd5: /* aad */
5601 if (CODE64(s))
5602 goto illegal_op;
5603 val = ldub_code(s->pc++);
5604 gen_op_aad(val);
5605 s->cc_op = CC_OP_LOGICB;
5606 break;
5607 /************************/
5608 /* misc */
5609 case 0x90: /* nop */
5610 /* XXX: xchg + rex handling */
5611 /* XXX: correct lock test for all insn */
5612 if (prefixes & PREFIX_LOCK)
5613 goto illegal_op;
5614 break;
5615 case 0x9b: /* fwait */
5616 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5617 (HF_MP_MASK | HF_TS_MASK)) {
5618 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5619 } else {
5620 if (s->cc_op != CC_OP_DYNAMIC)
5621 gen_op_set_cc_op(s->cc_op);
5622 gen_jmp_im(pc_start - s->cs_base);
5623 gen_op_fwait();
5624 }
5625 break;
5626 case 0xcc: /* int3 */
5627#ifdef VBOX
5628 if (s->vm86 && s->iopl != 3 && !s->vme) {
5629 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5630 } else
5631#endif
5632 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5633 break;
5634 case 0xcd: /* int N */
5635 val = ldub_code(s->pc++);
5636#ifdef VBOX
5637 if (s->vm86 && s->iopl != 3 && !s->vme) {
5638#else
5639 if (s->vm86 && s->iopl != 3) {
5640#endif
5641 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5642 } else {
5643 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5644 }
5645 break;
5646 case 0xce: /* into */
5647 if (CODE64(s))
5648 goto illegal_op;
5649 if (s->cc_op != CC_OP_DYNAMIC)
5650 gen_op_set_cc_op(s->cc_op);
5651 gen_jmp_im(pc_start - s->cs_base);
5652 gen_op_into(s->pc - pc_start);
5653 break;
5654 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5655#if 1
5656 gen_debug(s, pc_start - s->cs_base);
5657#else
5658 /* start debug */
5659 tb_flush(cpu_single_env);
5660 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5661#endif
5662 break;
5663 case 0xfa: /* cli */
5664 if (!s->vm86) {
5665 if (s->cpl <= s->iopl) {
5666 gen_op_cli();
5667 } else {
5668 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5669 }
5670 } else {
5671 if (s->iopl == 3) {
5672 gen_op_cli();
5673#ifdef VBOX
5674 } else if (s->iopl != 3 && s->vme) {
5675 gen_op_cli_vme();
5676#endif
5677 } else {
5678 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5679 }
5680 }
5681 break;
5682 case 0xfb: /* sti */
5683 if (!s->vm86) {
5684 if (s->cpl <= s->iopl) {
5685 gen_sti:
5686 gen_op_sti();
5687 /* interruptions are enabled only the first insn after sti */
5688 /* If several instructions disable interrupts, only the
5689 _first_ does it */
5690 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5691 gen_op_set_inhibit_irq();
5692 /* give a chance to handle pending irqs */
5693 gen_jmp_im(s->pc - s->cs_base);
5694 gen_eob(s);
5695 } else {
5696 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5697 }
5698 } else {
5699 if (s->iopl == 3) {
5700 goto gen_sti;
5701#ifdef VBOX
5702 } else if (s->iopl != 3 && s->vme) {
5703 gen_op_sti_vme();
5704 /* give a chance to handle pending irqs */
5705 gen_jmp_im(s->pc - s->cs_base);
5706 gen_eob(s);
5707#endif
5708 } else {
5709 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5710 }
5711 }
5712 break;
5713 case 0x62: /* bound */
5714 if (CODE64(s))
5715 goto illegal_op;
5716 ot = dflag ? OT_LONG : OT_WORD;
5717 modrm = ldub_code(s->pc++);
5718 reg = (modrm >> 3) & 7;
5719 mod = (modrm >> 6) & 3;
5720 if (mod == 3)
5721 goto illegal_op;
5722 gen_op_mov_TN_reg[ot][0][reg]();
5723 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5724 gen_jmp_im(pc_start - s->cs_base);
5725 if (ot == OT_WORD)
5726 gen_op_boundw();
5727 else
5728 gen_op_boundl();
5729 break;
5730 case 0x1c8 ... 0x1cf: /* bswap reg */
5731 reg = (b & 7) | REX_B(s);
5732#ifdef TARGET_X86_64
5733 if (dflag == 2) {
5734 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5735 gen_op_bswapq_T0();
5736 gen_op_mov_reg_T0[OT_QUAD][reg]();
5737 } else
5738#endif
5739 {
5740 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5741 gen_op_bswapl_T0();
5742 gen_op_mov_reg_T0[OT_LONG][reg]();
5743 }
5744 break;
5745 case 0xd6: /* salc */
5746 if (CODE64(s))
5747 goto illegal_op;
5748 if (s->cc_op != CC_OP_DYNAMIC)
5749 gen_op_set_cc_op(s->cc_op);
5750 gen_op_salc();
5751 break;
5752 case 0xe0: /* loopnz */
5753 case 0xe1: /* loopz */
5754 if (s->cc_op != CC_OP_DYNAMIC)
5755 gen_op_set_cc_op(s->cc_op);
5756 /* FALL THRU */
5757 case 0xe2: /* loop */
5758 case 0xe3: /* jecxz */
5759 {
5760 int l1, l2;
5761
5762 tval = (int8_t)insn_get(s, OT_BYTE);
5763 next_eip = s->pc - s->cs_base;
5764 tval += next_eip;
5765 if (s->dflag == 0)
5766 tval &= 0xffff;
5767
5768 l1 = gen_new_label();
5769 l2 = gen_new_label();
5770 b &= 3;
5771 if (b == 3) {
5772 gen_op_jz_ecx[s->aflag](l1);
5773 } else {
5774 gen_op_dec_ECX[s->aflag]();
5775 if (b <= 1)
5776 gen_op_mov_T0_cc();
5777 gen_op_loop[s->aflag][b](l1);
5778 }
5779
5780 gen_jmp_im(next_eip);
5781 gen_op_jmp_label(l2);
5782 gen_set_label(l1);
5783 gen_jmp_im(tval);
5784 gen_set_label(l2);
5785 gen_eob(s);
5786 }
5787 break;
5788 case 0x130: /* wrmsr */
5789 case 0x132: /* rdmsr */
5790 if (s->cpl != 0) {
5791 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5792 } else {
5793 if (b & 2)
5794 gen_op_rdmsr();
5795 else
5796 gen_op_wrmsr();
5797 }
5798 break;
5799 case 0x131: /* rdtsc */
5800 gen_jmp_im(pc_start - s->cs_base);
5801 gen_op_rdtsc();
5802 break;
5803 case 0x134: /* sysenter */
5804 if (CODE64(s))
5805 goto illegal_op;
5806 if (!s->pe) {
5807 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5808 } else {
5809 if (s->cc_op != CC_OP_DYNAMIC) {
5810 gen_op_set_cc_op(s->cc_op);
5811 s->cc_op = CC_OP_DYNAMIC;
5812 }
5813 gen_jmp_im(pc_start - s->cs_base);
5814 gen_op_sysenter();
5815 gen_eob(s);
5816 }
5817 break;
5818 case 0x135: /* sysexit */
5819 if (CODE64(s))
5820 goto illegal_op;
5821 if (!s->pe) {
5822 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5823 } else {
5824 if (s->cc_op != CC_OP_DYNAMIC) {
5825 gen_op_set_cc_op(s->cc_op);
5826 s->cc_op = CC_OP_DYNAMIC;
5827 }
5828 gen_jmp_im(pc_start - s->cs_base);
5829 gen_op_sysexit();
5830 gen_eob(s);
5831 }
5832 break;
5833#ifdef TARGET_X86_64
5834 case 0x105: /* syscall */
5835 /* XXX: is it usable in real mode ? */
5836 if (s->cc_op != CC_OP_DYNAMIC) {
5837 gen_op_set_cc_op(s->cc_op);
5838 s->cc_op = CC_OP_DYNAMIC;
5839 }
5840 gen_jmp_im(pc_start - s->cs_base);
5841 gen_op_syscall(s->pc - pc_start);
5842 gen_eob(s);
5843 break;
5844 case 0x107: /* sysret */
5845 if (!s->pe) {
5846 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5847 } else {
5848 if (s->cc_op != CC_OP_DYNAMIC) {
5849 gen_op_set_cc_op(s->cc_op);
5850 s->cc_op = CC_OP_DYNAMIC;
5851 }
5852 gen_jmp_im(pc_start - s->cs_base);
5853 gen_op_sysret(s->dflag);
5854 /* condition codes are modified only in long mode */
5855 if (s->lma)
5856 s->cc_op = CC_OP_EFLAGS;
5857 gen_eob(s);
5858 }
5859 break;
5860#endif
5861 case 0x1a2: /* cpuid */
5862 gen_op_cpuid();
5863 break;
5864 case 0xf4: /* hlt */
5865 if (s->cpl != 0) {
5866 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5867 } else {
5868 if (s->cc_op != CC_OP_DYNAMIC)
5869 gen_op_set_cc_op(s->cc_op);
5870 gen_jmp_im(s->pc - s->cs_base);
5871 gen_op_hlt();
5872 s->is_jmp = 3;
5873 }
5874 break;
5875 case 0x100:
5876 modrm = ldub_code(s->pc++);
5877 mod = (modrm >> 6) & 3;
5878 op = (modrm >> 3) & 7;
5879 switch(op) {
5880 case 0: /* sldt */
5881 if (!s->pe || s->vm86)
5882 goto illegal_op;
5883 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5884 ot = OT_WORD;
5885 if (mod == 3)
5886 ot += s->dflag;
5887 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5888 break;
5889 case 2: /* lldt */
5890 if (!s->pe || s->vm86)
5891 goto illegal_op;
5892 if (s->cpl != 0) {
5893 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5894 } else {
5895 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5896 gen_jmp_im(pc_start - s->cs_base);
5897 gen_op_lldt_T0();
5898 }
5899 break;
5900 case 1: /* str */
5901 if (!s->pe || s->vm86)
5902 goto illegal_op;
5903 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5904 ot = OT_WORD;
5905 if (mod == 3)
5906 ot += s->dflag;
5907 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5908 break;
5909 case 3: /* ltr */
5910 if (!s->pe || s->vm86)
5911 goto illegal_op;
5912 if (s->cpl != 0) {
5913 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5914 } else {
5915 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5916 gen_jmp_im(pc_start - s->cs_base);
5917 gen_op_ltr_T0();
5918 }
5919 break;
5920 case 4: /* verr */
5921 case 5: /* verw */
5922 if (!s->pe || s->vm86)
5923 goto illegal_op;
5924 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5925 if (s->cc_op != CC_OP_DYNAMIC)
5926 gen_op_set_cc_op(s->cc_op);
5927 if (op == 4)
5928 gen_op_verr();
5929 else
5930 gen_op_verw();
5931 s->cc_op = CC_OP_EFLAGS;
5932 break;
5933 default:
5934 goto illegal_op;
5935 }
5936 break;
5937 case 0x101:
5938 modrm = ldub_code(s->pc++);
5939 mod = (modrm >> 6) & 3;
5940 op = (modrm >> 3) & 7;
5941 rm = modrm & 7;
5942
5943#ifdef VBOX
5944 /* 0f 01 f9 */
5945 if (modrm == 0xf9)
5946 {
5947 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
5948 goto illegal_op;
5949 gen_jmp_im(pc_start - s->cs_base);
5950 gen_op_rdtscp();
5951 break;
5952 }
5953#endif
5954
5955 switch(op) {
5956 case 0: /* sgdt */
5957 if (mod == 3)
5958 goto illegal_op;
5959 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5960 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5961 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5962 gen_add_A0_im(s, 2);
5963 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5964 if (!s->dflag)
5965 gen_op_andl_T0_im(0xffffff);
5966 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5967 break;
5968 case 1:
5969 if (mod == 3) {
5970 switch (rm) {
5971 case 0: /* monitor */
5972 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5973 s->cpl != 0)
5974 goto illegal_op;
5975 gen_jmp_im(pc_start - s->cs_base);
5976#ifdef TARGET_X86_64
5977 if (s->aflag == 2) {
5978 gen_op_movq_A0_reg[R_EBX]();
5979 gen_op_addq_A0_AL();
5980 } else
5981#endif
5982 {
5983 gen_op_movl_A0_reg[R_EBX]();
5984 gen_op_addl_A0_AL();
5985 if (s->aflag == 0)
5986 gen_op_andl_A0_ffff();
5987 }
5988 gen_add_A0_ds_seg(s);
5989 gen_op_monitor();
5990 break;
5991 case 1: /* mwait */
5992 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5993 s->cpl != 0)
5994 goto illegal_op;
5995 if (s->cc_op != CC_OP_DYNAMIC) {
5996 gen_op_set_cc_op(s->cc_op);
5997 s->cc_op = CC_OP_DYNAMIC;
5998 }
5999 gen_jmp_im(s->pc - s->cs_base);
6000 gen_op_mwait();
6001 gen_eob(s);
6002 break;
6003 default:
6004 goto illegal_op;
6005 }
6006 } else { /* sidt */
6007 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6008 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6009 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
6010 gen_add_A0_im(s, 2);
6011 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6012 if (!s->dflag)
6013 gen_op_andl_T0_im(0xffffff);
6014 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
6015 }
6016 break;
6017 case 2: /* lgdt */
6018 case 3: /* lidt */
6019 if (mod == 3)
6020 goto illegal_op;
6021 if (s->cpl != 0) {
6022 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6023 } else {
6024 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6025 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
6026 gen_add_A0_im(s, 2);
6027 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
6028 if (!s->dflag)
6029 gen_op_andl_T0_im(0xffffff);
6030 if (op == 2) {
6031 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6032 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6033 } else {
6034 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6035 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6036 }
6037 }
6038 break;
6039 case 4: /* smsw */
6040 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6041 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6042 break;
6043 case 6: /* lmsw */
6044 if (s->cpl != 0) {
6045 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6046 } else {
6047 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6048 gen_op_lmsw_T0();
6049 gen_jmp_im(s->pc - s->cs_base);
6050 gen_eob(s);
6051 }
6052 break;
6053 case 7: /* invlpg */
6054 if (s->cpl != 0) {
6055 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6056 } else {
6057 if (mod == 3) {
6058#ifdef TARGET_X86_64
6059 if (CODE64(s) && rm == 0) {
6060 /* swapgs */
6061 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6062 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6063 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6064 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6065 } else
6066#endif
6067 {
6068 goto illegal_op;
6069 }
6070 } else {
6071 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6072 gen_op_invlpg_A0();
6073 gen_jmp_im(s->pc - s->cs_base);
6074 gen_eob(s);
6075 }
6076 }
6077 break;
6078 default:
6079 goto illegal_op;
6080 }
6081 break;
6082 case 0x108: /* invd */
6083 case 0x109: /* wbinvd */
6084 if (s->cpl != 0) {
6085 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6086 } else {
6087 /* nothing to do */
6088 }
6089 break;
6090 case 0x63: /* arpl or movslS (x86_64) */
6091#ifdef TARGET_X86_64
6092 if (CODE64(s)) {
6093 int d_ot;
6094 /* d_ot is the size of destination */
6095 d_ot = dflag + OT_WORD;
6096
6097 modrm = ldub_code(s->pc++);
6098 reg = ((modrm >> 3) & 7) | rex_r;
6099 mod = (modrm >> 6) & 3;
6100 rm = (modrm & 7) | REX_B(s);
6101
6102 if (mod == 3) {
6103 gen_op_mov_TN_reg[OT_LONG][0][rm]();
6104 /* sign extend */
6105 if (d_ot == OT_QUAD)
6106 gen_op_movslq_T0_T0();
6107 gen_op_mov_reg_T0[d_ot][reg]();
6108 } else {
6109 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6110 if (d_ot == OT_QUAD) {
6111 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
6112 } else {
6113 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6114 }
6115 gen_op_mov_reg_T0[d_ot][reg]();
6116 }
6117 } else
6118#endif
6119 {
6120 if (!s->pe || s->vm86)
6121 goto illegal_op;
6122 ot = dflag ? OT_LONG : OT_WORD;
6123 modrm = ldub_code(s->pc++);
6124 reg = (modrm >> 3) & 7;
6125 mod = (modrm >> 6) & 3;
6126 rm = modrm & 7;
6127#ifdef VBOX /* Fix for obvious bug - T1 needs to be loaded */
6128 gen_op_mov_TN_reg[ot][1][reg]();
6129#endif
6130 if (mod != 3) {
6131 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6132 gen_op_ld_T0_A0[ot + s->mem_index]();
6133 } else {
6134 gen_op_mov_TN_reg[ot][0][rm]();
6135 }
6136 if (s->cc_op != CC_OP_DYNAMIC)
6137 gen_op_set_cc_op(s->cc_op);
6138 gen_op_arpl();
6139 s->cc_op = CC_OP_EFLAGS;
6140 if (mod != 3) {
6141 gen_op_st_T0_A0[ot + s->mem_index]();
6142 } else {
6143 gen_op_mov_reg_T0[ot][rm]();
6144 }
6145 gen_op_arpl_update();
6146 }
6147 break;
6148 case 0x102: /* lar */
6149 case 0x103: /* lsl */
6150 if (!s->pe || s->vm86)
6151 goto illegal_op;
6152 ot = dflag ? OT_LONG : OT_WORD;
6153 modrm = ldub_code(s->pc++);
6154 reg = ((modrm >> 3) & 7) | rex_r;
6155 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6156 gen_op_mov_TN_reg[ot][1][reg]();
6157 if (s->cc_op != CC_OP_DYNAMIC)
6158 gen_op_set_cc_op(s->cc_op);
6159 if (b == 0x102)
6160 gen_op_lar();
6161 else
6162 gen_op_lsl();
6163 s->cc_op = CC_OP_EFLAGS;
6164 gen_op_mov_reg_T1[ot][reg]();
6165 break;
6166 case 0x118:
6167 modrm = ldub_code(s->pc++);
6168 mod = (modrm >> 6) & 3;
6169 op = (modrm >> 3) & 7;
6170 switch(op) {
6171 case 0: /* prefetchnta */
6172 case 1: /* prefetchnt0 */
6173 case 2: /* prefetchnt0 */
6174 case 3: /* prefetchnt0 */
6175 if (mod == 3)
6176 goto illegal_op;
6177 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6178 /* nothing more to do */
6179 break;
6180 default: /* nop (multi byte) */
6181 gen_nop_modrm(s, modrm);
6182 break;
6183 }
6184 break;
6185 case 0x119 ... 0x11f: /* nop (multi byte) */
6186 modrm = ldub_code(s->pc++);
6187 gen_nop_modrm(s, modrm);
6188 break;
6189 case 0x120: /* mov reg, crN */
6190 case 0x122: /* mov crN, reg */
6191 if (s->cpl != 0) {
6192 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6193 } else {
6194 modrm = ldub_code(s->pc++);
6195 if ((modrm & 0xc0) != 0xc0)
6196 goto illegal_op;
6197 rm = (modrm & 7) | REX_B(s);
6198 reg = ((modrm >> 3) & 7) | rex_r;
6199 if (CODE64(s))
6200 ot = OT_QUAD;
6201 else
6202 ot = OT_LONG;
6203 switch(reg) {
6204 case 0:
6205 case 2:
6206 case 3:
6207 case 4:
6208 case 8:
6209 if (b & 2) {
6210 gen_op_mov_TN_reg[ot][0][rm]();
6211 gen_op_movl_crN_T0(reg);
6212 gen_jmp_im(s->pc - s->cs_base);
6213 gen_eob(s);
6214 } else {
6215#if !defined(CONFIG_USER_ONLY)
6216 if (reg == 8)
6217 gen_op_movtl_T0_cr8();
6218 else
6219#endif
6220 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6221 gen_op_mov_reg_T0[ot][rm]();
6222 }
6223 break;
6224 default:
6225 goto illegal_op;
6226 }
6227 }
6228 break;
6229 case 0x121: /* mov reg, drN */
6230 case 0x123: /* mov drN, reg */
6231 if (s->cpl != 0) {
6232 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6233 } else {
6234 modrm = ldub_code(s->pc++);
6235 if ((modrm & 0xc0) != 0xc0)
6236 goto illegal_op;
6237 rm = (modrm & 7) | REX_B(s);
6238 reg = ((modrm >> 3) & 7) | rex_r;
6239 if (CODE64(s))
6240 ot = OT_QUAD;
6241 else
6242 ot = OT_LONG;
6243 /* XXX: do it dynamically with CR4.DE bit */
6244 if (reg == 4 || reg == 5 || reg >= 8)
6245 goto illegal_op;
6246 if (b & 2) {
6247 gen_op_mov_TN_reg[ot][0][rm]();
6248 gen_op_movl_drN_T0(reg);
6249 gen_jmp_im(s->pc - s->cs_base);
6250 gen_eob(s);
6251 } else {
6252 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6253 gen_op_mov_reg_T0[ot][rm]();
6254 }
6255 }
6256 break;
6257 case 0x106: /* clts */
6258 if (s->cpl != 0) {
6259 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6260 } else {
6261 gen_op_clts();
6262 /* abort block because static cpu state changed */
6263 gen_jmp_im(s->pc - s->cs_base);
6264 gen_eob(s);
6265 }
6266 break;
6267 /* MMX/SSE/SSE2/PNI support */
6268 case 0x1c3: /* MOVNTI reg, mem */
6269 if (!(s->cpuid_features & CPUID_SSE2))
6270 goto illegal_op;
6271 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6272 modrm = ldub_code(s->pc++);
6273 mod = (modrm >> 6) & 3;
6274 if (mod == 3)
6275 goto illegal_op;
6276 reg = ((modrm >> 3) & 7) | rex_r;
6277 /* generate a generic store */
6278 gen_ldst_modrm(s, modrm, ot, reg, 1);
6279 break;
6280 case 0x1ae:
6281 modrm = ldub_code(s->pc++);
6282 mod = (modrm >> 6) & 3;
6283 op = (modrm >> 3) & 7;
6284 switch(op) {
6285 case 0: /* fxsave */
6286 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6287 (s->flags & HF_EM_MASK))
6288 goto illegal_op;
6289 if (s->flags & HF_TS_MASK) {
6290 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6291 break;
6292 }
6293 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6294 gen_op_fxsave_A0((s->dflag == 2));
6295 break;
6296 case 1: /* fxrstor */
6297 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6298 (s->flags & HF_EM_MASK))
6299 goto illegal_op;
6300 if (s->flags & HF_TS_MASK) {
6301 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6302 break;
6303 }
6304 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6305 gen_op_fxrstor_A0((s->dflag == 2));
6306 break;
6307 case 2: /* ldmxcsr */
6308 case 3: /* stmxcsr */
6309 if (s->flags & HF_TS_MASK) {
6310 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6311 break;
6312 }
6313 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6314 mod == 3)
6315 goto illegal_op;
6316 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6317 if (op == 2) {
6318 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6319 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6320 } else {
6321 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6322 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6323 }
6324 break;
6325 case 5: /* lfence */
6326 case 6: /* mfence */
6327 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6328 goto illegal_op;
6329 break;
6330 case 7: /* sfence / clflush */
6331 if ((modrm & 0xc7) == 0xc0) {
6332 /* sfence */
6333 if (!(s->cpuid_features & CPUID_SSE))
6334 goto illegal_op;
6335 } else {
6336 /* clflush */
6337 if (!(s->cpuid_features & CPUID_CLFLUSH))
6338 goto illegal_op;
6339 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6340 }
6341 break;
6342 default:
6343 goto illegal_op;
6344 }
6345 break;
6346 case 0x10d: /* prefetch */
6347 modrm = ldub_code(s->pc++);
6348 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6349 /* ignore for now */
6350 break;
6351 case 0x1aa: /* rsm */
6352 if (!(s->flags & HF_SMM_MASK))
6353 goto illegal_op;
6354 if (s->cc_op != CC_OP_DYNAMIC) {
6355 gen_op_set_cc_op(s->cc_op);
6356 s->cc_op = CC_OP_DYNAMIC;
6357 }
6358 gen_jmp_im(s->pc - s->cs_base);
6359 gen_op_rsm();
6360 gen_eob(s);
6361 break;
6362 case 0x110 ... 0x117:
6363 case 0x128 ... 0x12f:
6364 case 0x150 ... 0x177:
6365 case 0x17c ... 0x17f:
6366 case 0x1c2:
6367 case 0x1c4 ... 0x1c6:
6368 case 0x1d0 ... 0x1fe:
6369 gen_sse(s, b, pc_start, rex_r);
6370 break;
6371 default:
6372 goto illegal_op;
6373 }
6374 /* lock generation */
6375 if (s->prefix & PREFIX_LOCK)
6376 gen_op_unlock();
6377 return s->pc;
6378 illegal_op:
6379 if (s->prefix & PREFIX_LOCK)
6380 gen_op_unlock();
6381 /* XXX: ensure that no lock was generated */
6382 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6383 return s->pc;
6384}
6385
6386#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6387#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6388
6389/* flags read by an operation */
6390static uint16_t opc_read_flags[NB_OPS] = {
6391 [INDEX_op_aas] = CC_A,
6392 [INDEX_op_aaa] = CC_A,
6393 [INDEX_op_das] = CC_A | CC_C,
6394 [INDEX_op_daa] = CC_A | CC_C,
6395
6396 /* subtle: due to the incl/decl implementation, C is used */
6397 [INDEX_op_update_inc_cc] = CC_C,
6398
6399 [INDEX_op_into] = CC_O,
6400
6401 [INDEX_op_jb_subb] = CC_C,
6402 [INDEX_op_jb_subw] = CC_C,
6403 [INDEX_op_jb_subl] = CC_C,
6404
6405 [INDEX_op_jz_subb] = CC_Z,
6406 [INDEX_op_jz_subw] = CC_Z,
6407 [INDEX_op_jz_subl] = CC_Z,
6408
6409 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6410 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6411 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6412
6413 [INDEX_op_js_subb] = CC_S,
6414 [INDEX_op_js_subw] = CC_S,
6415 [INDEX_op_js_subl] = CC_S,
6416
6417 [INDEX_op_jl_subb] = CC_O | CC_S,
6418 [INDEX_op_jl_subw] = CC_O | CC_S,
6419 [INDEX_op_jl_subl] = CC_O | CC_S,
6420
6421 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6422 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6423 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6424
6425 [INDEX_op_loopnzw] = CC_Z,
6426 [INDEX_op_loopnzl] = CC_Z,
6427 [INDEX_op_loopzw] = CC_Z,
6428 [INDEX_op_loopzl] = CC_Z,
6429
6430 [INDEX_op_seto_T0_cc] = CC_O,
6431 [INDEX_op_setb_T0_cc] = CC_C,
6432 [INDEX_op_setz_T0_cc] = CC_Z,
6433 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6434 [INDEX_op_sets_T0_cc] = CC_S,
6435 [INDEX_op_setp_T0_cc] = CC_P,
6436 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6437 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6438
6439 [INDEX_op_setb_T0_subb] = CC_C,
6440 [INDEX_op_setb_T0_subw] = CC_C,
6441 [INDEX_op_setb_T0_subl] = CC_C,
6442
6443 [INDEX_op_setz_T0_subb] = CC_Z,
6444 [INDEX_op_setz_T0_subw] = CC_Z,
6445 [INDEX_op_setz_T0_subl] = CC_Z,
6446
6447 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6448 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6449 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6450
6451 [INDEX_op_sets_T0_subb] = CC_S,
6452 [INDEX_op_sets_T0_subw] = CC_S,
6453 [INDEX_op_sets_T0_subl] = CC_S,
6454
6455 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6456 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6457 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6458
6459 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6460 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6461 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6462
6463 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6464 [INDEX_op_cmc] = CC_C,
6465 [INDEX_op_salc] = CC_C,
6466
6467 /* needed for correct flag optimisation before string ops */
6468 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6469 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6470 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6471 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6472
6473#ifdef TARGET_X86_64
6474 [INDEX_op_jb_subq] = CC_C,
6475 [INDEX_op_jz_subq] = CC_Z,
6476 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6477 [INDEX_op_js_subq] = CC_S,
6478 [INDEX_op_jl_subq] = CC_O | CC_S,
6479 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6480
6481 [INDEX_op_loopnzq] = CC_Z,
6482 [INDEX_op_loopzq] = CC_Z,
6483
6484 [INDEX_op_setb_T0_subq] = CC_C,
6485 [INDEX_op_setz_T0_subq] = CC_Z,
6486 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6487 [INDEX_op_sets_T0_subq] = CC_S,
6488 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6489 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6490
6491 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6492 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6493#endif
6494
6495#define DEF_READF(SUFFIX)\
6496 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6497 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6498 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6499 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6500 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6501 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6502 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6503 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6504\
6505 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6506 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6507 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6508 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6509 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6510 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6511 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6512 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6513
6514 DEF_READF( )
6515 DEF_READF(_raw)
6516#ifndef CONFIG_USER_ONLY
6517 DEF_READF(_kernel)
6518 DEF_READF(_user)
6519#endif
6520};
6521
6522/* flags written by an operation */
6523static uint16_t opc_write_flags[NB_OPS] = {
6524 [INDEX_op_update2_cc] = CC_OSZAPC,
6525 [INDEX_op_update1_cc] = CC_OSZAPC,
6526 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6527 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6528 /* subtle: due to the incl/decl implementation, C is used */
6529 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6530 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6531
6532 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6533 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6534 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6535 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6536 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6537 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6538 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6539 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6540 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6541 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6542 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6543
6544 /* sse */
6545 [INDEX_op_ucomiss] = CC_OSZAPC,
6546 [INDEX_op_ucomisd] = CC_OSZAPC,
6547 [INDEX_op_comiss] = CC_OSZAPC,
6548 [INDEX_op_comisd] = CC_OSZAPC,
6549
6550 /* bcd */
6551 [INDEX_op_aam] = CC_OSZAPC,
6552 [INDEX_op_aad] = CC_OSZAPC,
6553 [INDEX_op_aas] = CC_OSZAPC,
6554 [INDEX_op_aaa] = CC_OSZAPC,
6555 [INDEX_op_das] = CC_OSZAPC,
6556 [INDEX_op_daa] = CC_OSZAPC,
6557
6558 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6559 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6560 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6561 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6562 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6563 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6564 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6565 [INDEX_op_clc] = CC_C,
6566 [INDEX_op_stc] = CC_C,
6567 [INDEX_op_cmc] = CC_C,
6568
6569 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6570 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6571 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6572 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6573 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6574 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6575 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6576 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6577 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6578 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6579 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6580 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6581
6582 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6583 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6584 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6585 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6586 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6587 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6588
6589 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6590 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6591 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6592 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6593
6594 [INDEX_op_cmpxchg8b] = CC_Z,
6595 [INDEX_op_lar] = CC_Z,
6596 [INDEX_op_lsl] = CC_Z,
6597 [INDEX_op_verr] = CC_Z,
6598 [INDEX_op_verw] = CC_Z,
6599 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6600 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6601
6602#define DEF_WRITEF(SUFFIX)\
6603 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6604 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6605 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6606 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6607 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6608 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6609 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6610 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6611\
6612 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6613 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6614 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6615 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6616 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6617 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6618 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6619 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6620\
6621 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6622 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6623 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6624 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6625 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6626 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6627 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6628 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6629\
6630 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6631 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6632 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6633 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6634\
6635 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6636 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6637 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6638 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6639\
6640 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6641 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6642 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6643 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6644\
6645 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6646 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6647 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6648 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6649 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6650 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6651\
6652 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6653 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6654 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6655 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6656 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6657 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6658\
6659 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6660 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6661 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6662 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6663
6664
6665 DEF_WRITEF( )
6666 DEF_WRITEF(_raw)
6667#ifndef CONFIG_USER_ONLY
6668 DEF_WRITEF(_kernel)
6669 DEF_WRITEF(_user)
6670#endif
6671};
6672
6673/* simpler form of an operation if no flags need to be generated */
6674static uint16_t opc_simpler[NB_OPS] = {
6675 [INDEX_op_update2_cc] = INDEX_op_nop,
6676 [INDEX_op_update1_cc] = INDEX_op_nop,
6677 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6678#if 0
6679 /* broken: CC_OP logic must be rewritten */
6680 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6681#endif
6682
6683 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6684 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6685 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6686 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6687
6688 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6689 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6690 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6691 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6692
6693 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6694 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6695 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6696 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6697
6698#define DEF_SIMPLER(SUFFIX)\
6699 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6700 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6701 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6702 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6703\
6704 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6705 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6706 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6707 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6708
6709 DEF_SIMPLER( )
6710 DEF_SIMPLER(_raw)
6711#ifndef CONFIG_USER_ONLY
6712 DEF_SIMPLER(_kernel)
6713 DEF_SIMPLER(_user)
6714#endif
6715};
6716
6717void optimize_flags_init(void)
6718{
6719 int i;
6720 /* put default values in arrays */
6721 for(i = 0; i < NB_OPS; i++) {
6722 if (opc_simpler[i] == 0)
6723 opc_simpler[i] = i;
6724 }
6725}
6726
6727/* CPU flags computation optimization: we move backward thru the
6728 generated code to see which flags are needed. The operation is
6729 modified if suitable */
6730static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6731{
6732 uint16_t *opc_ptr;
6733 int live_flags, write_flags, op;
6734
6735 opc_ptr = opc_buf + opc_buf_len;
6736 /* live_flags contains the flags needed by the next instructions
6737 in the code. At the end of the bloc, we consider that all the
6738 flags are live. */
6739 live_flags = CC_OSZAPC;
6740 while (opc_ptr > opc_buf) {
6741 op = *--opc_ptr;
6742 /* if none of the flags written by the instruction is used,
6743 then we can try to find a simpler instruction */
6744 write_flags = opc_write_flags[op];
6745 if ((live_flags & write_flags) == 0) {
6746 *opc_ptr = opc_simpler[op];
6747 }
6748 /* compute the live flags before the instruction */
6749 live_flags &= ~write_flags;
6750 live_flags |= opc_read_flags[op];
6751 }
6752}
6753
6754/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6755 basic block 'tb'. If search_pc is TRUE, also generate PC
6756 information for each intermediate instruction. */
6757static inline int gen_intermediate_code_internal(CPUState *env,
6758 TranslationBlock *tb,
6759 int search_pc)
6760{
6761 DisasContext dc1, *dc = &dc1;
6762 target_ulong pc_ptr;
6763 uint16_t *gen_opc_end;
6764 int flags, j, lj, cflags;
6765 target_ulong pc_start;
6766 target_ulong cs_base;
6767
6768 /* generate intermediate code */
6769 pc_start = tb->pc;
6770 cs_base = tb->cs_base;
6771 flags = tb->flags;
6772 cflags = tb->cflags;
6773
6774 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6775 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6776 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6777 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6778 dc->f_st = 0;
6779 dc->vm86 = (flags >> VM_SHIFT) & 1;
6780#ifdef VBOX
6781 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
6782#ifdef VBOX_WITH_CALL_RECORD
6783 if ( !(env->state & CPU_RAW_RING0)
6784 && (env->cr[0] & CR0_PG_MASK)
6785 && !(env->eflags & X86_EFL_IF)
6786 && dc->code32)
6787 dc->record_call = 1;
6788 else
6789 dc->record_call = 0;
6790#endif
6791#endif
6792 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6793 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6794 dc->tf = (flags >> TF_SHIFT) & 1;
6795 dc->singlestep_enabled = env->singlestep_enabled;
6796 dc->cc_op = CC_OP_DYNAMIC;
6797 dc->cs_base = cs_base;
6798 dc->tb = tb;
6799 dc->popl_esp_hack = 0;
6800 /* select memory access functions */
6801 dc->mem_index = 0;
6802 if (flags & HF_SOFTMMU_MASK) {
6803 if (dc->cpl == 3)
6804 dc->mem_index = 2 * 4;
6805 else
6806 dc->mem_index = 1 * 4;
6807 }
6808 dc->cpuid_features = env->cpuid_features;
6809 dc->cpuid_ext_features = env->cpuid_ext_features;
6810 dc->cpuid_ext2_features = env->cpuid_ext2_features;
6811 dc->cpuid_ext3_features = env->cpuid_ext3_features;
6812#ifdef TARGET_X86_64
6813 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6814 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6815#endif
6816 dc->flags = flags;
6817 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6818 (flags & HF_INHIBIT_IRQ_MASK)
6819#ifndef CONFIG_SOFTMMU
6820 || (flags & HF_SOFTMMU_MASK)
6821#endif
6822 );
6823#if 0
6824 /* check addseg logic */
6825 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6826 printf("ERROR addseg\n");
6827#endif
6828
6829 gen_opc_ptr = gen_opc_buf;
6830 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6831 gen_opparam_ptr = gen_opparam_buf;
6832 nb_gen_labels = 0;
6833
6834 dc->is_jmp = DISAS_NEXT;
6835 pc_ptr = pc_start;
6836 lj = -1;
6837
6838 for(;;) {
6839 if (env->nb_breakpoints > 0) {
6840 for(j = 0; j < env->nb_breakpoints; j++) {
6841 if (env->breakpoints[j] == pc_ptr) {
6842 gen_debug(dc, pc_ptr - dc->cs_base);
6843 break;
6844 }
6845 }
6846 }
6847 if (search_pc) {
6848 j = gen_opc_ptr - gen_opc_buf;
6849 if (lj < j) {
6850 lj++;
6851 while (lj < j)
6852 gen_opc_instr_start[lj++] = 0;
6853 }
6854 gen_opc_pc[lj] = pc_ptr;
6855 gen_opc_cc_op[lj] = dc->cc_op;
6856 gen_opc_instr_start[lj] = 1;
6857 }
6858 pc_ptr = disas_insn(dc, pc_ptr);
6859 /* stop translation if indicated */
6860 if (dc->is_jmp)
6861 break;
6862
6863#ifdef VBOX
6864#ifdef DEBUG
6865/*
6866 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6867 {
6868 //should never happen as the jump to the patch code terminates the translation block
6869 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6870 }
6871*/
6872#endif
6873 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6874 {
6875 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6876 gen_jmp_im(pc_ptr - dc->cs_base);
6877 gen_eob(dc);
6878 break;
6879 }
6880#endif /* VBOX */
6881
6882 /* if single step mode, we generate only one instruction and
6883 generate an exception */
6884 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6885 the flag and abort the translation to give the irqs a
6886 change to be happen */
6887 if (dc->tf || dc->singlestep_enabled ||
6888 (flags & HF_INHIBIT_IRQ_MASK) ||
6889 (cflags & CF_SINGLE_INSN)) {
6890 gen_jmp_im(pc_ptr - dc->cs_base);
6891 gen_eob(dc);
6892 break;
6893 }
6894 /* if too long translation, stop generation too */
6895 if (gen_opc_ptr >= gen_opc_end ||
6896 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6897 gen_jmp_im(pc_ptr - dc->cs_base);
6898 gen_eob(dc);
6899 break;
6900 }
6901 }
6902 *gen_opc_ptr = INDEX_op_end;
6903 /* we don't forget to fill the last values */
6904 if (search_pc) {
6905 j = gen_opc_ptr - gen_opc_buf;
6906 lj++;
6907 while (lj <= j)
6908 gen_opc_instr_start[lj++] = 0;
6909 }
6910
6911#ifdef DEBUG_DISAS
6912 if (loglevel & CPU_LOG_TB_CPU) {
6913 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6914 }
6915 if (loglevel & CPU_LOG_TB_IN_ASM) {
6916 int disas_flags;
6917 fprintf(logfile, "----------------\n");
6918 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6919#ifdef TARGET_X86_64
6920 if (dc->code64)
6921 disas_flags = 2;
6922 else
6923#endif
6924 disas_flags = !dc->code32;
6925 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6926 fprintf(logfile, "\n");
6927 if (loglevel & CPU_LOG_TB_OP) {
6928 fprintf(logfile, "OP:\n");
6929 dump_ops(gen_opc_buf, gen_opparam_buf);
6930 fprintf(logfile, "\n");
6931 }
6932 }
6933#endif
6934
6935 /* optimize flag computations */
6936 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6937
6938#ifdef DEBUG_DISAS
6939 if (loglevel & CPU_LOG_TB_OP_OPT) {
6940 fprintf(logfile, "AFTER FLAGS OPT:\n");
6941 dump_ops(gen_opc_buf, gen_opparam_buf);
6942 fprintf(logfile, "\n");
6943 }
6944#endif
6945 if (!search_pc)
6946 tb->size = pc_ptr - pc_start;
6947 return 0;
6948}
6949
6950int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6951{
6952 return gen_intermediate_code_internal(env, tb, 0);
6953}
6954
6955int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6956{
6957 return gen_intermediate_code_internal(env, tb, 1);
6958}
6959
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette