VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 9380

Last change on this file since 9380 was 9015, checked in by vboxsync, 17 years ago

removed #if 1, the code seems working fine.

  • Property svn:eol-style set to native
File size: 205.3 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20#include <stdarg.h>
21#include <stdlib.h>
22#include <stdio.h>
23#include <string.h>
24#include <inttypes.h>
25#ifndef VBOX
26#include <signal.h>
27#include <assert.h>
28#endif /* !VBOX */
29
30#include "cpu.h"
31#include "exec-all.h"
32#include "disas.h"
33
34/* XXX: move that elsewhere */
35static uint16_t *gen_opc_ptr;
36static uint32_t *gen_opparam_ptr;
37
38#define PREFIX_REPZ 0x01
39#define PREFIX_REPNZ 0x02
40#define PREFIX_LOCK 0x04
41#define PREFIX_DATA 0x08
42#define PREFIX_ADR 0x10
43
44#ifdef TARGET_X86_64
45#define X86_64_ONLY(x) x
46#define X86_64_DEF(x...) x
47#define CODE64(s) ((s)->code64)
48#define REX_X(s) ((s)->rex_x)
49#define REX_B(s) ((s)->rex_b)
50/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
51#if 1
52#define BUGGY_64(x) NULL
53#endif
54#else
55#define X86_64_ONLY(x) NULL
56#define X86_64_DEF(x...)
57#define CODE64(s) 0
58#define REX_X(s) 0
59#define REX_B(s) 0
60#endif
61
62#ifdef TARGET_X86_64
63static int x86_64_hregs;
64#endif
65
66#ifdef USE_DIRECT_JUMP
67#define TBPARAM(x)
68#else
69#define TBPARAM(x) (long)(x)
70#endif
71
72#ifdef VBOX
73/* Special/override code readers to hide patched code. */
74
75uint8_t ldub_code_raw(target_ulong pc)
76{
77 uint8_t b;
78
79 if (!remR3GetOpcode(cpu_single_env, pc, &b))
80 b = ldub_code(pc);
81 return b;
82}
83#define ldub_code(a) ldub_code_raw(a)
84
85uint16_t lduw_code_raw(target_ulong pc)
86{
87 return (ldub_code(pc+1) << 8) | ldub_code(pc);
88}
89#define lduw_code(a) lduw_code_raw(a)
90
91
92uint32_t ldl_code_raw(target_ulong pc)
93{
94 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
95}
96#define ldl_code(a) ldl_code_raw(a)
97
98#endif /* VBOX */
99
100
101typedef struct DisasContext {
102 /* current insn context */
103 int override; /* -1 if no override */
104 int prefix;
105 int aflag, dflag;
106 target_ulong pc; /* pc = eip + cs_base */
107 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
108 static state change (stop translation) */
109 /* current block context */
110 target_ulong cs_base; /* base of CS segment */
111 int pe; /* protected mode */
112 int code32; /* 32 bit code segment */
113#ifdef TARGET_X86_64
114 int lma; /* long mode active */
115 int code64; /* 64 bit code segment */
116 int rex_x, rex_b;
117#endif
118 int ss32; /* 32 bit stack segment */
119 int cc_op; /* current CC operation */
120 int addseg; /* non zero if either DS/ES/SS have a non zero base */
121 int f_st; /* currently unused */
122 int vm86; /* vm86 mode */
123#ifdef VBOX
124 int vme; /* CR4.VME */
125 int record_call; /* record calls for CSAM or not? */
126#endif
127 int cpl;
128 int iopl;
129 int tf; /* TF cpu flag */
130 int singlestep_enabled; /* "hardware" single step enabled */
131 int jmp_opt; /* use direct block chaining for direct jumps */
132 int mem_index; /* select memory access functions */
133 int flags; /* all execution flags */
134 struct TranslationBlock *tb;
135 int popl_esp_hack; /* for correct popl with esp base handling */
136 int rip_offset; /* only used in x86_64, but left for simplicity */
137 int cpuid_features;
138 int cpuid_ext_features;
139} DisasContext;
140
141static void gen_eob(DisasContext *s);
142static void gen_jmp(DisasContext *s, target_ulong eip);
143static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
144
145/* i386 arith/logic operations */
146enum {
147 OP_ADDL,
148 OP_ORL,
149 OP_ADCL,
150 OP_SBBL,
151 OP_ANDL,
152 OP_SUBL,
153 OP_XORL,
154 OP_CMPL,
155};
156
157/* i386 shift ops */
158enum {
159 OP_ROL,
160 OP_ROR,
161 OP_RCL,
162 OP_RCR,
163 OP_SHL,
164 OP_SHR,
165 OP_SHL1, /* undocumented */
166 OP_SAR = 7,
167};
168
169enum {
170#define DEF(s, n, copy_size) INDEX_op_ ## s,
171#include "opc.h"
172#undef DEF
173 NB_OPS,
174};
175
176#include "gen-op.h"
177
178/* operand size */
179enum {
180 OT_BYTE = 0,
181 OT_WORD,
182 OT_LONG,
183 OT_QUAD,
184};
185
186enum {
187 /* I386 int registers */
188 OR_EAX, /* MUST be even numbered */
189 OR_ECX,
190 OR_EDX,
191 OR_EBX,
192 OR_ESP,
193 OR_EBP,
194 OR_ESI,
195 OR_EDI,
196
197 OR_TMP0 = 16, /* temporary operand register */
198 OR_TMP1,
199 OR_A0, /* temporary register used when doing address evaluation */
200};
201
202#ifdef TARGET_X86_64
203
204#define NB_OP_SIZES 4
205
206#define DEF_REGS(prefix, suffix) \
207 prefix ## EAX ## suffix,\
208 prefix ## ECX ## suffix,\
209 prefix ## EDX ## suffix,\
210 prefix ## EBX ## suffix,\
211 prefix ## ESP ## suffix,\
212 prefix ## EBP ## suffix,\
213 prefix ## ESI ## suffix,\
214 prefix ## EDI ## suffix,\
215 prefix ## R8 ## suffix,\
216 prefix ## R9 ## suffix,\
217 prefix ## R10 ## suffix,\
218 prefix ## R11 ## suffix,\
219 prefix ## R12 ## suffix,\
220 prefix ## R13 ## suffix,\
221 prefix ## R14 ## suffix,\
222 prefix ## R15 ## suffix,
223
224#define DEF_BREGS(prefixb, prefixh, suffix) \
225 \
226static void prefixb ## ESP ## suffix ## _wrapper(void) \
227{ \
228 if (x86_64_hregs) \
229 prefixb ## ESP ## suffix (); \
230 else \
231 prefixh ## EAX ## suffix (); \
232} \
233 \
234static void prefixb ## EBP ## suffix ## _wrapper(void) \
235{ \
236 if (x86_64_hregs) \
237 prefixb ## EBP ## suffix (); \
238 else \
239 prefixh ## ECX ## suffix (); \
240} \
241 \
242static void prefixb ## ESI ## suffix ## _wrapper(void) \
243{ \
244 if (x86_64_hregs) \
245 prefixb ## ESI ## suffix (); \
246 else \
247 prefixh ## EDX ## suffix (); \
248} \
249 \
250static void prefixb ## EDI ## suffix ## _wrapper(void) \
251{ \
252 if (x86_64_hregs) \
253 prefixb ## EDI ## suffix (); \
254 else \
255 prefixh ## EBX ## suffix (); \
256}
257
258DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
259DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
260DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
261DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
262
263#else /* !TARGET_X86_64 */
264
265#define NB_OP_SIZES 3
266
267#define DEF_REGS(prefix, suffix) \
268 prefix ## EAX ## suffix,\
269 prefix ## ECX ## suffix,\
270 prefix ## EDX ## suffix,\
271 prefix ## EBX ## suffix,\
272 prefix ## ESP ## suffix,\
273 prefix ## EBP ## suffix,\
274 prefix ## ESI ## suffix,\
275 prefix ## EDI ## suffix,
276
277#endif /* !TARGET_X86_64 */
278
279static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
280 [OT_BYTE] = {
281 gen_op_movb_EAX_T0,
282 gen_op_movb_ECX_T0,
283 gen_op_movb_EDX_T0,
284 gen_op_movb_EBX_T0,
285#ifdef TARGET_X86_64
286 gen_op_movb_ESP_T0_wrapper,
287 gen_op_movb_EBP_T0_wrapper,
288 gen_op_movb_ESI_T0_wrapper,
289 gen_op_movb_EDI_T0_wrapper,
290 gen_op_movb_R8_T0,
291 gen_op_movb_R9_T0,
292 gen_op_movb_R10_T0,
293 gen_op_movb_R11_T0,
294 gen_op_movb_R12_T0,
295 gen_op_movb_R13_T0,
296 gen_op_movb_R14_T0,
297 gen_op_movb_R15_T0,
298#else
299 gen_op_movh_EAX_T0,
300 gen_op_movh_ECX_T0,
301 gen_op_movh_EDX_T0,
302 gen_op_movh_EBX_T0,
303#endif
304 },
305 [OT_WORD] = {
306 DEF_REGS(gen_op_movw_, _T0)
307 },
308 [OT_LONG] = {
309 DEF_REGS(gen_op_movl_, _T0)
310 },
311#ifdef TARGET_X86_64
312 [OT_QUAD] = {
313 DEF_REGS(gen_op_movq_, _T0)
314 },
315#endif
316};
317
318static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
319 [OT_BYTE] = {
320 gen_op_movb_EAX_T1,
321 gen_op_movb_ECX_T1,
322 gen_op_movb_EDX_T1,
323 gen_op_movb_EBX_T1,
324#ifdef TARGET_X86_64
325 gen_op_movb_ESP_T1_wrapper,
326 gen_op_movb_EBP_T1_wrapper,
327 gen_op_movb_ESI_T1_wrapper,
328 gen_op_movb_EDI_T1_wrapper,
329 gen_op_movb_R8_T1,
330 gen_op_movb_R9_T1,
331 gen_op_movb_R10_T1,
332 gen_op_movb_R11_T1,
333 gen_op_movb_R12_T1,
334 gen_op_movb_R13_T1,
335 gen_op_movb_R14_T1,
336 gen_op_movb_R15_T1,
337#else
338 gen_op_movh_EAX_T1,
339 gen_op_movh_ECX_T1,
340 gen_op_movh_EDX_T1,
341 gen_op_movh_EBX_T1,
342#endif
343 },
344 [OT_WORD] = {
345 DEF_REGS(gen_op_movw_, _T1)
346 },
347 [OT_LONG] = {
348 DEF_REGS(gen_op_movl_, _T1)
349 },
350#ifdef TARGET_X86_64
351 [OT_QUAD] = {
352 DEF_REGS(gen_op_movq_, _T1)
353 },
354#endif
355};
356
357static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
358 [0] = {
359 DEF_REGS(gen_op_movw_, _A0)
360 },
361 [1] = {
362 DEF_REGS(gen_op_movl_, _A0)
363 },
364#ifdef TARGET_X86_64
365 [2] = {
366 DEF_REGS(gen_op_movq_, _A0)
367 },
368#endif
369};
370
371static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
372{
373 [OT_BYTE] = {
374 {
375 gen_op_movl_T0_EAX,
376 gen_op_movl_T0_ECX,
377 gen_op_movl_T0_EDX,
378 gen_op_movl_T0_EBX,
379#ifdef TARGET_X86_64
380 gen_op_movl_T0_ESP_wrapper,
381 gen_op_movl_T0_EBP_wrapper,
382 gen_op_movl_T0_ESI_wrapper,
383 gen_op_movl_T0_EDI_wrapper,
384 gen_op_movl_T0_R8,
385 gen_op_movl_T0_R9,
386 gen_op_movl_T0_R10,
387 gen_op_movl_T0_R11,
388 gen_op_movl_T0_R12,
389 gen_op_movl_T0_R13,
390 gen_op_movl_T0_R14,
391 gen_op_movl_T0_R15,
392#else
393 gen_op_movh_T0_EAX,
394 gen_op_movh_T0_ECX,
395 gen_op_movh_T0_EDX,
396 gen_op_movh_T0_EBX,
397#endif
398 },
399 {
400 gen_op_movl_T1_EAX,
401 gen_op_movl_T1_ECX,
402 gen_op_movl_T1_EDX,
403 gen_op_movl_T1_EBX,
404#ifdef TARGET_X86_64
405 gen_op_movl_T1_ESP_wrapper,
406 gen_op_movl_T1_EBP_wrapper,
407 gen_op_movl_T1_ESI_wrapper,
408 gen_op_movl_T1_EDI_wrapper,
409 gen_op_movl_T1_R8,
410 gen_op_movl_T1_R9,
411 gen_op_movl_T1_R10,
412 gen_op_movl_T1_R11,
413 gen_op_movl_T1_R12,
414 gen_op_movl_T1_R13,
415 gen_op_movl_T1_R14,
416 gen_op_movl_T1_R15,
417#else
418 gen_op_movh_T1_EAX,
419 gen_op_movh_T1_ECX,
420 gen_op_movh_T1_EDX,
421 gen_op_movh_T1_EBX,
422#endif
423 },
424 },
425 [OT_WORD] = {
426 {
427 DEF_REGS(gen_op_movl_T0_, )
428 },
429 {
430 DEF_REGS(gen_op_movl_T1_, )
431 },
432 },
433 [OT_LONG] = {
434 {
435 DEF_REGS(gen_op_movl_T0_, )
436 },
437 {
438 DEF_REGS(gen_op_movl_T1_, )
439 },
440 },
441#ifdef TARGET_X86_64
442 [OT_QUAD] = {
443 {
444 DEF_REGS(gen_op_movl_T0_, )
445 },
446 {
447 DEF_REGS(gen_op_movl_T1_, )
448 },
449 },
450#endif
451};
452
453static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
454 DEF_REGS(gen_op_movl_A0_, )
455};
456
457static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
458 [0] = {
459 DEF_REGS(gen_op_addl_A0_, )
460 },
461 [1] = {
462 DEF_REGS(gen_op_addl_A0_, _s1)
463 },
464 [2] = {
465 DEF_REGS(gen_op_addl_A0_, _s2)
466 },
467 [3] = {
468 DEF_REGS(gen_op_addl_A0_, _s3)
469 },
470};
471
472#ifdef TARGET_X86_64
473static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
474 DEF_REGS(gen_op_movq_A0_, )
475};
476
477static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
478 [0] = {
479 DEF_REGS(gen_op_addq_A0_, )
480 },
481 [1] = {
482 DEF_REGS(gen_op_addq_A0_, _s1)
483 },
484 [2] = {
485 DEF_REGS(gen_op_addq_A0_, _s2)
486 },
487 [3] = {
488 DEF_REGS(gen_op_addq_A0_, _s3)
489 },
490};
491#endif
492
493static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
494 [0] = {
495 DEF_REGS(gen_op_cmovw_, _T1_T0)
496 },
497 [1] = {
498 DEF_REGS(gen_op_cmovl_, _T1_T0)
499 },
500#ifdef TARGET_X86_64
501 [2] = {
502 DEF_REGS(gen_op_cmovq_, _T1_T0)
503 },
504#endif
505};
506
507static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
508 NULL,
509 gen_op_orl_T0_T1,
510 NULL,
511 NULL,
512 gen_op_andl_T0_T1,
513 NULL,
514 gen_op_xorl_T0_T1,
515 NULL,
516};
517
518#define DEF_ARITHC(SUFFIX)\
519 {\
520 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
521 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
522 },\
523 {\
524 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
525 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
526 },\
527 {\
528 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
529 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
530 },\
531 {\
532 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
533 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
534 },
535
536static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
537 DEF_ARITHC( )
538};
539
540static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
541 DEF_ARITHC(_raw)
542#ifndef CONFIG_USER_ONLY
543 DEF_ARITHC(_kernel)
544 DEF_ARITHC(_user)
545#endif
546};
547
548static const int cc_op_arithb[8] = {
549 CC_OP_ADDB,
550 CC_OP_LOGICB,
551 CC_OP_ADDB,
552 CC_OP_SUBB,
553 CC_OP_LOGICB,
554 CC_OP_SUBB,
555 CC_OP_LOGICB,
556 CC_OP_SUBB,
557};
558
559#define DEF_CMPXCHG(SUFFIX)\
560 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
561 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
562 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
563 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
564
565static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
566 DEF_CMPXCHG( )
567};
568
569static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
570 DEF_CMPXCHG(_raw)
571#ifndef CONFIG_USER_ONLY
572 DEF_CMPXCHG(_kernel)
573 DEF_CMPXCHG(_user)
574#endif
575};
576
577#define DEF_SHIFT(SUFFIX)\
578 {\
579 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
580 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
581 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
583 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
584 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
586 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
587 },\
588 {\
589 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
590 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
591 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
592 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
593 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
594 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
595 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
596 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
597 },\
598 {\
599 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
600 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
601 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
602 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
603 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
604 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
605 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
606 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
607 },\
608 {\
609 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
610 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
611 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
612 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
613 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
614 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
615 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
616 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
617 },
618
619static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
620 DEF_SHIFT( )
621};
622
623static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
624 DEF_SHIFT(_raw)
625#ifndef CONFIG_USER_ONLY
626 DEF_SHIFT(_kernel)
627 DEF_SHIFT(_user)
628#endif
629};
630
631#define DEF_SHIFTD(SUFFIX, op)\
632 {\
633 NULL,\
634 NULL,\
635 },\
636 {\
637 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
638 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
639 },\
640 {\
641 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
642 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
643 },\
644 {\
645X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
646 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
647 },
648
649static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
650 DEF_SHIFTD(, im)
651};
652
653static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
654 DEF_SHIFTD(, ECX)
655};
656
657static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
658 DEF_SHIFTD(_raw, im)
659#ifndef CONFIG_USER_ONLY
660 DEF_SHIFTD(_kernel, im)
661 DEF_SHIFTD(_user, im)
662#endif
663};
664
665static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
666 DEF_SHIFTD(_raw, ECX)
667#ifndef CONFIG_USER_ONLY
668 DEF_SHIFTD(_kernel, ECX)
669 DEF_SHIFTD(_user, ECX)
670#endif
671};
672
673static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
674 [0] = {
675 gen_op_btw_T0_T1_cc,
676 gen_op_btsw_T0_T1_cc,
677 gen_op_btrw_T0_T1_cc,
678 gen_op_btcw_T0_T1_cc,
679 },
680 [1] = {
681 gen_op_btl_T0_T1_cc,
682 gen_op_btsl_T0_T1_cc,
683 gen_op_btrl_T0_T1_cc,
684 gen_op_btcl_T0_T1_cc,
685 },
686#ifdef TARGET_X86_64
687 [2] = {
688 gen_op_btq_T0_T1_cc,
689 gen_op_btsq_T0_T1_cc,
690 gen_op_btrq_T0_T1_cc,
691 gen_op_btcq_T0_T1_cc,
692 },
693#endif
694};
695
696static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
697 gen_op_add_bitw_A0_T1,
698 gen_op_add_bitl_A0_T1,
699 X86_64_ONLY(gen_op_add_bitq_A0_T1),
700};
701
702static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
703 [0] = {
704 gen_op_bsfw_T0_cc,
705 gen_op_bsrw_T0_cc,
706 },
707 [1] = {
708 gen_op_bsfl_T0_cc,
709 gen_op_bsrl_T0_cc,
710 },
711#ifdef TARGET_X86_64
712 [2] = {
713 gen_op_bsfq_T0_cc,
714 gen_op_bsrq_T0_cc,
715 },
716#endif
717};
718
719static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
720 gen_op_ldsb_raw_T0_A0,
721 gen_op_ldsw_raw_T0_A0,
722 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
723 NULL,
724#ifndef CONFIG_USER_ONLY
725 gen_op_ldsb_kernel_T0_A0,
726 gen_op_ldsw_kernel_T0_A0,
727 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
728 NULL,
729
730 gen_op_ldsb_user_T0_A0,
731 gen_op_ldsw_user_T0_A0,
732 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
733 NULL,
734#endif
735};
736
737static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
738 gen_op_ldub_raw_T0_A0,
739 gen_op_lduw_raw_T0_A0,
740 NULL,
741 NULL,
742
743#ifndef CONFIG_USER_ONLY
744 gen_op_ldub_kernel_T0_A0,
745 gen_op_lduw_kernel_T0_A0,
746 NULL,
747 NULL,
748
749 gen_op_ldub_user_T0_A0,
750 gen_op_lduw_user_T0_A0,
751 NULL,
752 NULL,
753#endif
754};
755
756/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
757static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
758 gen_op_ldub_raw_T0_A0,
759 gen_op_lduw_raw_T0_A0,
760 gen_op_ldl_raw_T0_A0,
761 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
762
763#ifndef CONFIG_USER_ONLY
764 gen_op_ldub_kernel_T0_A0,
765 gen_op_lduw_kernel_T0_A0,
766 gen_op_ldl_kernel_T0_A0,
767 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
768
769 gen_op_ldub_user_T0_A0,
770 gen_op_lduw_user_T0_A0,
771 gen_op_ldl_user_T0_A0,
772 X86_64_ONLY(gen_op_ldq_user_T0_A0),
773#endif
774};
775
776static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
777 gen_op_ldub_raw_T1_A0,
778 gen_op_lduw_raw_T1_A0,
779 gen_op_ldl_raw_T1_A0,
780 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
781
782#ifndef CONFIG_USER_ONLY
783 gen_op_ldub_kernel_T1_A0,
784 gen_op_lduw_kernel_T1_A0,
785 gen_op_ldl_kernel_T1_A0,
786 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
787
788 gen_op_ldub_user_T1_A0,
789 gen_op_lduw_user_T1_A0,
790 gen_op_ldl_user_T1_A0,
791 X86_64_ONLY(gen_op_ldq_user_T1_A0),
792#endif
793};
794
795static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
796 gen_op_stb_raw_T0_A0,
797 gen_op_stw_raw_T0_A0,
798 gen_op_stl_raw_T0_A0,
799 X86_64_ONLY(gen_op_stq_raw_T0_A0),
800
801#ifndef CONFIG_USER_ONLY
802 gen_op_stb_kernel_T0_A0,
803 gen_op_stw_kernel_T0_A0,
804 gen_op_stl_kernel_T0_A0,
805 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
806
807 gen_op_stb_user_T0_A0,
808 gen_op_stw_user_T0_A0,
809 gen_op_stl_user_T0_A0,
810 X86_64_ONLY(gen_op_stq_user_T0_A0),
811#endif
812};
813
814static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
815 NULL,
816 gen_op_stw_raw_T1_A0,
817 gen_op_stl_raw_T1_A0,
818 X86_64_ONLY(gen_op_stq_raw_T1_A0),
819
820#ifndef CONFIG_USER_ONLY
821 NULL,
822 gen_op_stw_kernel_T1_A0,
823 gen_op_stl_kernel_T1_A0,
824 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
825
826 NULL,
827 gen_op_stw_user_T1_A0,
828 gen_op_stl_user_T1_A0,
829 X86_64_ONLY(gen_op_stq_user_T1_A0),
830#endif
831};
832
833#ifdef VBOX
834static void gen_check_external_event()
835{
836 gen_op_check_external_event();
837}
838
839static inline void gen_update_eip(target_ulong pc)
840{
841#ifdef TARGET_X86_64
842 if (pc == (uint32_t)pc) {
843 gen_op_movl_eip_im(pc);
844 } else if (pc == (int32_t)pc) {
845 gen_op_movq_eip_im(pc);
846 } else {
847 gen_op_movq_eip_im64(pc >> 32, pc);
848 }
849#else
850 gen_op_movl_eip_im(pc);
851#endif
852}
853
854#endif /* VBOX */
855
856static inline void gen_jmp_im(target_ulong pc)
857{
858#ifdef VBOX
859 gen_check_external_event();
860#endif /* VBOX */
861#ifdef TARGET_X86_64
862 if (pc == (uint32_t)pc) {
863 gen_op_movl_eip_im(pc);
864 } else if (pc == (int32_t)pc) {
865 gen_op_movq_eip_im(pc);
866 } else {
867 gen_op_movq_eip_im64(pc >> 32, pc);
868 }
869#else
870 gen_op_movl_eip_im(pc);
871#endif
872}
873
874static inline void gen_string_movl_A0_ESI(DisasContext *s)
875{
876 int override;
877
878 override = s->override;
879#ifdef TARGET_X86_64
880 if (s->aflag == 2) {
881 if (override >= 0) {
882 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
883 gen_op_addq_A0_reg_sN[0][R_ESI]();
884 } else {
885 gen_op_movq_A0_reg[R_ESI]();
886 }
887 } else
888#endif
889 if (s->aflag) {
890 /* 32 bit address */
891 if (s->addseg && override < 0)
892 override = R_DS;
893 if (override >= 0) {
894 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
895 gen_op_addl_A0_reg_sN[0][R_ESI]();
896 } else {
897 gen_op_movl_A0_reg[R_ESI]();
898 }
899 } else {
900 /* 16 address, always override */
901 if (override < 0)
902 override = R_DS;
903 gen_op_movl_A0_reg[R_ESI]();
904 gen_op_andl_A0_ffff();
905 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
906 }
907}
908
909static inline void gen_string_movl_A0_EDI(DisasContext *s)
910{
911#ifdef TARGET_X86_64
912 if (s->aflag == 2) {
913 gen_op_movq_A0_reg[R_EDI]();
914 } else
915#endif
916 if (s->aflag) {
917 if (s->addseg) {
918 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
919 gen_op_addl_A0_reg_sN[0][R_EDI]();
920 } else {
921 gen_op_movl_A0_reg[R_EDI]();
922 }
923 } else {
924 gen_op_movl_A0_reg[R_EDI]();
925 gen_op_andl_A0_ffff();
926 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
927 }
928}
929
930static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
931 gen_op_movl_T0_Dshiftb,
932 gen_op_movl_T0_Dshiftw,
933 gen_op_movl_T0_Dshiftl,
934 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
935};
936
937static GenOpFunc1 *gen_op_jnz_ecx[3] = {
938 gen_op_jnz_ecxw,
939 gen_op_jnz_ecxl,
940 X86_64_ONLY(gen_op_jnz_ecxq),
941};
942
943static GenOpFunc1 *gen_op_jz_ecx[3] = {
944 gen_op_jz_ecxw,
945 gen_op_jz_ecxl,
946 X86_64_ONLY(gen_op_jz_ecxq),
947};
948
949static GenOpFunc *gen_op_dec_ECX[3] = {
950 gen_op_decw_ECX,
951 gen_op_decl_ECX,
952 X86_64_ONLY(gen_op_decq_ECX),
953};
954
955static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
956 {
957 gen_op_jnz_subb,
958 gen_op_jnz_subw,
959 gen_op_jnz_subl,
960 X86_64_ONLY(gen_op_jnz_subq),
961 },
962 {
963 gen_op_jz_subb,
964 gen_op_jz_subw,
965 gen_op_jz_subl,
966 X86_64_ONLY(gen_op_jz_subq),
967 },
968};
969
970static GenOpFunc *gen_op_in_DX_T0[3] = {
971 gen_op_inb_DX_T0,
972 gen_op_inw_DX_T0,
973 gen_op_inl_DX_T0,
974};
975
976static GenOpFunc *gen_op_out_DX_T0[3] = {
977 gen_op_outb_DX_T0,
978 gen_op_outw_DX_T0,
979 gen_op_outl_DX_T0,
980};
981
982static GenOpFunc *gen_op_in[3] = {
983 gen_op_inb_T0_T1,
984 gen_op_inw_T0_T1,
985 gen_op_inl_T0_T1,
986};
987
988static GenOpFunc *gen_op_out[3] = {
989 gen_op_outb_T0_T1,
990 gen_op_outw_T0_T1,
991 gen_op_outl_T0_T1,
992};
993
994static GenOpFunc *gen_check_io_T0[3] = {
995 gen_op_check_iob_T0,
996 gen_op_check_iow_T0,
997 gen_op_check_iol_T0,
998};
999
1000static GenOpFunc *gen_check_io_DX[3] = {
1001 gen_op_check_iob_DX,
1002 gen_op_check_iow_DX,
1003 gen_op_check_iol_DX,
1004};
1005
1006static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
1007{
1008 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1009 if (s->cc_op != CC_OP_DYNAMIC)
1010 gen_op_set_cc_op(s->cc_op);
1011 gen_jmp_im(cur_eip);
1012 if (use_dx)
1013 gen_check_io_DX[ot]();
1014 else
1015 gen_check_io_T0[ot]();
1016 }
1017}
1018
1019static inline void gen_movs(DisasContext *s, int ot)
1020{
1021 gen_string_movl_A0_ESI(s);
1022 gen_op_ld_T0_A0[ot + s->mem_index]();
1023 gen_string_movl_A0_EDI(s);
1024 gen_op_st_T0_A0[ot + s->mem_index]();
1025 gen_op_movl_T0_Dshift[ot]();
1026#ifdef TARGET_X86_64
1027 if (s->aflag == 2) {
1028 gen_op_addq_ESI_T0();
1029 gen_op_addq_EDI_T0();
1030 } else
1031#endif
1032 if (s->aflag) {
1033 gen_op_addl_ESI_T0();
1034 gen_op_addl_EDI_T0();
1035 } else {
1036 gen_op_addw_ESI_T0();
1037 gen_op_addw_EDI_T0();
1038 }
1039}
1040
1041static inline void gen_update_cc_op(DisasContext *s)
1042{
1043 if (s->cc_op != CC_OP_DYNAMIC) {
1044 gen_op_set_cc_op(s->cc_op);
1045 s->cc_op = CC_OP_DYNAMIC;
1046 }
1047}
1048
1049/* XXX: does not work with gdbstub "ice" single step - not a
1050 serious problem */
1051static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1052{
1053 int l1, l2;
1054
1055 l1 = gen_new_label();
1056 l2 = gen_new_label();
1057 gen_op_jnz_ecx[s->aflag](l1);
1058 gen_set_label(l2);
1059 gen_jmp_tb(s, next_eip, 1);
1060 gen_set_label(l1);
1061 return l2;
1062}
1063
1064static inline void gen_stos(DisasContext *s, int ot)
1065{
1066 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1067 gen_string_movl_A0_EDI(s);
1068 gen_op_st_T0_A0[ot + s->mem_index]();
1069 gen_op_movl_T0_Dshift[ot]();
1070#ifdef TARGET_X86_64
1071 if (s->aflag == 2) {
1072 gen_op_addq_EDI_T0();
1073 } else
1074#endif
1075 if (s->aflag) {
1076 gen_op_addl_EDI_T0();
1077 } else {
1078 gen_op_addw_EDI_T0();
1079 }
1080}
1081
1082static inline void gen_lods(DisasContext *s, int ot)
1083{
1084 gen_string_movl_A0_ESI(s);
1085 gen_op_ld_T0_A0[ot + s->mem_index]();
1086 gen_op_mov_reg_T0[ot][R_EAX]();
1087 gen_op_movl_T0_Dshift[ot]();
1088#ifdef TARGET_X86_64
1089 if (s->aflag == 2) {
1090 gen_op_addq_ESI_T0();
1091 } else
1092#endif
1093 if (s->aflag) {
1094 gen_op_addl_ESI_T0();
1095 } else {
1096 gen_op_addw_ESI_T0();
1097 }
1098}
1099
1100static inline void gen_scas(DisasContext *s, int ot)
1101{
1102 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1103 gen_string_movl_A0_EDI(s);
1104 gen_op_ld_T1_A0[ot + s->mem_index]();
1105 gen_op_cmpl_T0_T1_cc();
1106 gen_op_movl_T0_Dshift[ot]();
1107#ifdef TARGET_X86_64
1108 if (s->aflag == 2) {
1109 gen_op_addq_EDI_T0();
1110 } else
1111#endif
1112 if (s->aflag) {
1113 gen_op_addl_EDI_T0();
1114 } else {
1115 gen_op_addw_EDI_T0();
1116 }
1117}
1118
1119static inline void gen_cmps(DisasContext *s, int ot)
1120{
1121 gen_string_movl_A0_ESI(s);
1122 gen_op_ld_T0_A0[ot + s->mem_index]();
1123 gen_string_movl_A0_EDI(s);
1124 gen_op_ld_T1_A0[ot + s->mem_index]();
1125 gen_op_cmpl_T0_T1_cc();
1126 gen_op_movl_T0_Dshift[ot]();
1127#ifdef TARGET_X86_64
1128 if (s->aflag == 2) {
1129 gen_op_addq_ESI_T0();
1130 gen_op_addq_EDI_T0();
1131 } else
1132#endif
1133 if (s->aflag) {
1134 gen_op_addl_ESI_T0();
1135 gen_op_addl_EDI_T0();
1136 } else {
1137 gen_op_addw_ESI_T0();
1138 gen_op_addw_EDI_T0();
1139 }
1140}
1141
1142static inline void gen_ins(DisasContext *s, int ot)
1143{
1144 gen_string_movl_A0_EDI(s);
1145 gen_op_movl_T0_0();
1146 gen_op_st_T0_A0[ot + s->mem_index]();
1147 gen_op_in_DX_T0[ot]();
1148 gen_op_st_T0_A0[ot + s->mem_index]();
1149 gen_op_movl_T0_Dshift[ot]();
1150#ifdef TARGET_X86_64
1151 if (s->aflag == 2) {
1152 gen_op_addq_EDI_T0();
1153 } else
1154#endif
1155 if (s->aflag) {
1156 gen_op_addl_EDI_T0();
1157 } else {
1158 gen_op_addw_EDI_T0();
1159 }
1160}
1161
1162static inline void gen_outs(DisasContext *s, int ot)
1163{
1164 gen_string_movl_A0_ESI(s);
1165 gen_op_ld_T0_A0[ot + s->mem_index]();
1166 gen_op_out_DX_T0[ot]();
1167 gen_op_movl_T0_Dshift[ot]();
1168#ifdef TARGET_X86_64
1169 if (s->aflag == 2) {
1170 gen_op_addq_ESI_T0();
1171 } else
1172#endif
1173 if (s->aflag) {
1174 gen_op_addl_ESI_T0();
1175 } else {
1176 gen_op_addw_ESI_T0();
1177 }
1178}
1179
1180/* same method as Valgrind : we generate jumps to current or next
1181 instruction */
1182#define GEN_REPZ(op) \
1183static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1184 target_ulong cur_eip, target_ulong next_eip) \
1185{ \
1186 int l2;\
1187 gen_update_cc_op(s); \
1188 l2 = gen_jz_ecx_string(s, next_eip); \
1189 gen_ ## op(s, ot); \
1190 gen_op_dec_ECX[s->aflag](); \
1191 /* a loop would cause two single step exceptions if ECX = 1 \
1192 before rep string_insn */ \
1193 if (!s->jmp_opt) \
1194 gen_op_jz_ecx[s->aflag](l2); \
1195 gen_jmp(s, cur_eip); \
1196}
1197
1198#define GEN_REPZ2(op) \
1199static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1200 target_ulong cur_eip, \
1201 target_ulong next_eip, \
1202 int nz) \
1203{ \
1204 int l2;\
1205 gen_update_cc_op(s); \
1206 l2 = gen_jz_ecx_string(s, next_eip); \
1207 gen_ ## op(s, ot); \
1208 gen_op_dec_ECX[s->aflag](); \
1209 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1210 gen_op_string_jnz_sub[nz][ot](l2);\
1211 if (!s->jmp_opt) \
1212 gen_op_jz_ecx[s->aflag](l2); \
1213 gen_jmp(s, cur_eip); \
1214}
1215
1216GEN_REPZ(movs)
1217GEN_REPZ(stos)
1218GEN_REPZ(lods)
1219GEN_REPZ(ins)
1220GEN_REPZ(outs)
1221GEN_REPZ2(scas)
1222GEN_REPZ2(cmps)
1223
1224enum {
1225 JCC_O,
1226 JCC_B,
1227 JCC_Z,
1228 JCC_BE,
1229 JCC_S,
1230 JCC_P,
1231 JCC_L,
1232 JCC_LE,
1233};
1234
1235static GenOpFunc1 *gen_jcc_sub[4][8] = {
1236 [OT_BYTE] = {
1237 NULL,
1238 gen_op_jb_subb,
1239 gen_op_jz_subb,
1240 gen_op_jbe_subb,
1241 gen_op_js_subb,
1242 NULL,
1243 gen_op_jl_subb,
1244 gen_op_jle_subb,
1245 },
1246 [OT_WORD] = {
1247 NULL,
1248 gen_op_jb_subw,
1249 gen_op_jz_subw,
1250 gen_op_jbe_subw,
1251 gen_op_js_subw,
1252 NULL,
1253 gen_op_jl_subw,
1254 gen_op_jle_subw,
1255 },
1256 [OT_LONG] = {
1257 NULL,
1258 gen_op_jb_subl,
1259 gen_op_jz_subl,
1260 gen_op_jbe_subl,
1261 gen_op_js_subl,
1262 NULL,
1263 gen_op_jl_subl,
1264 gen_op_jle_subl,
1265 },
1266#ifdef TARGET_X86_64
1267 [OT_QUAD] = {
1268 NULL,
1269 BUGGY_64(gen_op_jb_subq),
1270 gen_op_jz_subq,
1271 BUGGY_64(gen_op_jbe_subq),
1272 gen_op_js_subq,
1273 NULL,
1274 BUGGY_64(gen_op_jl_subq),
1275 BUGGY_64(gen_op_jle_subq),
1276 },
1277#endif
1278};
1279static GenOpFunc1 *gen_op_loop[3][4] = {
1280 [0] = {
1281 gen_op_loopnzw,
1282 gen_op_loopzw,
1283 gen_op_jnz_ecxw,
1284 },
1285 [1] = {
1286 gen_op_loopnzl,
1287 gen_op_loopzl,
1288 gen_op_jnz_ecxl,
1289 },
1290#ifdef TARGET_X86_64
1291 [2] = {
1292 gen_op_loopnzq,
1293 gen_op_loopzq,
1294 gen_op_jnz_ecxq,
1295 },
1296#endif
1297};
1298
1299static GenOpFunc *gen_setcc_slow[8] = {
1300 gen_op_seto_T0_cc,
1301 gen_op_setb_T0_cc,
1302 gen_op_setz_T0_cc,
1303 gen_op_setbe_T0_cc,
1304 gen_op_sets_T0_cc,
1305 gen_op_setp_T0_cc,
1306 gen_op_setl_T0_cc,
1307 gen_op_setle_T0_cc,
1308};
1309
1310static GenOpFunc *gen_setcc_sub[4][8] = {
1311 [OT_BYTE] = {
1312 NULL,
1313 gen_op_setb_T0_subb,
1314 gen_op_setz_T0_subb,
1315 gen_op_setbe_T0_subb,
1316 gen_op_sets_T0_subb,
1317 NULL,
1318 gen_op_setl_T0_subb,
1319 gen_op_setle_T0_subb,
1320 },
1321 [OT_WORD] = {
1322 NULL,
1323 gen_op_setb_T0_subw,
1324 gen_op_setz_T0_subw,
1325 gen_op_setbe_T0_subw,
1326 gen_op_sets_T0_subw,
1327 NULL,
1328 gen_op_setl_T0_subw,
1329 gen_op_setle_T0_subw,
1330 },
1331 [OT_LONG] = {
1332 NULL,
1333 gen_op_setb_T0_subl,
1334 gen_op_setz_T0_subl,
1335 gen_op_setbe_T0_subl,
1336 gen_op_sets_T0_subl,
1337 NULL,
1338 gen_op_setl_T0_subl,
1339 gen_op_setle_T0_subl,
1340 },
1341#ifdef TARGET_X86_64
1342 [OT_QUAD] = {
1343 NULL,
1344 gen_op_setb_T0_subq,
1345 gen_op_setz_T0_subq,
1346 gen_op_setbe_T0_subq,
1347 gen_op_sets_T0_subq,
1348 NULL,
1349 gen_op_setl_T0_subq,
1350 gen_op_setle_T0_subq,
1351 },
1352#endif
1353};
1354
1355static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1356 gen_op_fadd_ST0_FT0,
1357 gen_op_fmul_ST0_FT0,
1358 gen_op_fcom_ST0_FT0,
1359 gen_op_fcom_ST0_FT0,
1360 gen_op_fsub_ST0_FT0,
1361 gen_op_fsubr_ST0_FT0,
1362 gen_op_fdiv_ST0_FT0,
1363 gen_op_fdivr_ST0_FT0,
1364};
1365
1366/* NOTE the exception in "r" op ordering */
1367static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1368 gen_op_fadd_STN_ST0,
1369 gen_op_fmul_STN_ST0,
1370 NULL,
1371 NULL,
1372 gen_op_fsubr_STN_ST0,
1373 gen_op_fsub_STN_ST0,
1374 gen_op_fdivr_STN_ST0,
1375 gen_op_fdiv_STN_ST0,
1376};
1377
1378/* if d == OR_TMP0, it means memory operand (address in A0) */
1379static void gen_op(DisasContext *s1, int op, int ot, int d)
1380{
1381 GenOpFunc *gen_update_cc;
1382
1383 if (d != OR_TMP0) {
1384 gen_op_mov_TN_reg[ot][0][d]();
1385 } else {
1386 gen_op_ld_T0_A0[ot + s1->mem_index]();
1387 }
1388 switch(op) {
1389 case OP_ADCL:
1390 case OP_SBBL:
1391 if (s1->cc_op != CC_OP_DYNAMIC)
1392 gen_op_set_cc_op(s1->cc_op);
1393 if (d != OR_TMP0) {
1394 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1395 gen_op_mov_reg_T0[ot][d]();
1396 } else {
1397 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1398 }
1399 s1->cc_op = CC_OP_DYNAMIC;
1400 goto the_end;
1401 case OP_ADDL:
1402 gen_op_addl_T0_T1();
1403 s1->cc_op = CC_OP_ADDB + ot;
1404 gen_update_cc = gen_op_update2_cc;
1405 break;
1406 case OP_SUBL:
1407 gen_op_subl_T0_T1();
1408 s1->cc_op = CC_OP_SUBB + ot;
1409 gen_update_cc = gen_op_update2_cc;
1410 break;
1411 default:
1412 case OP_ANDL:
1413 case OP_ORL:
1414 case OP_XORL:
1415 gen_op_arith_T0_T1_cc[op]();
1416 s1->cc_op = CC_OP_LOGICB + ot;
1417 gen_update_cc = gen_op_update1_cc;
1418 break;
1419 case OP_CMPL:
1420 gen_op_cmpl_T0_T1_cc();
1421 s1->cc_op = CC_OP_SUBB + ot;
1422 gen_update_cc = NULL;
1423 break;
1424 }
1425 if (op != OP_CMPL) {
1426 if (d != OR_TMP0)
1427 gen_op_mov_reg_T0[ot][d]();
1428 else
1429 gen_op_st_T0_A0[ot + s1->mem_index]();
1430 }
1431 /* the flags update must happen after the memory write (precise
1432 exception support) */
1433 if (gen_update_cc)
1434 gen_update_cc();
1435 the_end: ;
1436}
1437
1438/* if d == OR_TMP0, it means memory operand (address in A0) */
1439static void gen_inc(DisasContext *s1, int ot, int d, int c)
1440{
1441 if (d != OR_TMP0)
1442 gen_op_mov_TN_reg[ot][0][d]();
1443 else
1444 gen_op_ld_T0_A0[ot + s1->mem_index]();
1445 if (s1->cc_op != CC_OP_DYNAMIC)
1446 gen_op_set_cc_op(s1->cc_op);
1447 if (c > 0) {
1448 gen_op_incl_T0();
1449 s1->cc_op = CC_OP_INCB + ot;
1450 } else {
1451 gen_op_decl_T0();
1452 s1->cc_op = CC_OP_DECB + ot;
1453 }
1454 if (d != OR_TMP0)
1455 gen_op_mov_reg_T0[ot][d]();
1456 else
1457 gen_op_st_T0_A0[ot + s1->mem_index]();
1458 gen_op_update_inc_cc();
1459}
1460
1461static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1462{
1463 if (d != OR_TMP0)
1464 gen_op_mov_TN_reg[ot][0][d]();
1465 else
1466 gen_op_ld_T0_A0[ot + s1->mem_index]();
1467 if (s != OR_TMP1)
1468 gen_op_mov_TN_reg[ot][1][s]();
1469 /* for zero counts, flags are not updated, so must do it dynamically */
1470 if (s1->cc_op != CC_OP_DYNAMIC)
1471 gen_op_set_cc_op(s1->cc_op);
1472
1473 if (d != OR_TMP0)
1474 gen_op_shift_T0_T1_cc[ot][op]();
1475 else
1476 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1477 if (d != OR_TMP0)
1478 gen_op_mov_reg_T0[ot][d]();
1479 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1480}
1481
1482static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1483{
1484 /* currently not optimized */
1485 gen_op_movl_T1_im(c);
1486 gen_shift(s1, op, ot, d, OR_TMP1);
1487}
1488
1489static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1490{
1491 target_long disp;
1492 int havesib;
1493 int base;
1494 int index;
1495 int scale;
1496 int opreg;
1497 int mod, rm, code, override, must_add_seg;
1498
1499 override = s->override;
1500 must_add_seg = s->addseg;
1501 if (override >= 0)
1502 must_add_seg = 1;
1503 mod = (modrm >> 6) & 3;
1504 rm = modrm & 7;
1505
1506 if (s->aflag) {
1507
1508 havesib = 0;
1509 base = rm;
1510 index = 0;
1511 scale = 0;
1512
1513 if (base == 4) {
1514 havesib = 1;
1515 code = ldub_code(s->pc++);
1516 scale = (code >> 6) & 3;
1517 index = ((code >> 3) & 7) | REX_X(s);
1518 base = (code & 7);
1519 }
1520 base |= REX_B(s);
1521
1522 switch (mod) {
1523 case 0:
1524 if ((base & 7) == 5) {
1525 base = -1;
1526 disp = (int32_t)ldl_code(s->pc);
1527 s->pc += 4;
1528 if (CODE64(s) && !havesib) {
1529 disp += s->pc + s->rip_offset;
1530 }
1531 } else {
1532 disp = 0;
1533 }
1534 break;
1535 case 1:
1536 disp = (int8_t)ldub_code(s->pc++);
1537 break;
1538 default:
1539 case 2:
1540 disp = ldl_code(s->pc);
1541 s->pc += 4;
1542 break;
1543 }
1544
1545 if (base >= 0) {
1546 /* for correct popl handling with esp */
1547 if (base == 4 && s->popl_esp_hack)
1548 disp += s->popl_esp_hack;
1549#ifdef TARGET_X86_64
1550 if (s->aflag == 2) {
1551 gen_op_movq_A0_reg[base]();
1552 if (disp != 0) {
1553 if ((int32_t)disp == disp)
1554 gen_op_addq_A0_im(disp);
1555 else
1556 gen_op_addq_A0_im64(disp >> 32, disp);
1557 }
1558 } else
1559#endif
1560 {
1561 gen_op_movl_A0_reg[base]();
1562 if (disp != 0)
1563 gen_op_addl_A0_im(disp);
1564 }
1565 } else {
1566#ifdef TARGET_X86_64
1567 if (s->aflag == 2) {
1568 if ((int32_t)disp == disp)
1569 gen_op_movq_A0_im(disp);
1570 else
1571 gen_op_movq_A0_im64(disp >> 32, disp);
1572 } else
1573#endif
1574 {
1575 gen_op_movl_A0_im(disp);
1576 }
1577 }
1578 /* XXX: index == 4 is always invalid */
1579 if (havesib && (index != 4 || scale != 0)) {
1580#ifdef TARGET_X86_64
1581 if (s->aflag == 2) {
1582 gen_op_addq_A0_reg_sN[scale][index]();
1583 } else
1584#endif
1585 {
1586 gen_op_addl_A0_reg_sN[scale][index]();
1587 }
1588 }
1589 if (must_add_seg) {
1590 if (override < 0) {
1591 if (base == R_EBP || base == R_ESP)
1592 override = R_SS;
1593 else
1594 override = R_DS;
1595 }
1596#ifdef TARGET_X86_64
1597 if (s->aflag == 2) {
1598 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1599 } else
1600#endif
1601 {
1602 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1603 }
1604 }
1605 } else {
1606 switch (mod) {
1607 case 0:
1608 if (rm == 6) {
1609 disp = lduw_code(s->pc);
1610 s->pc += 2;
1611 gen_op_movl_A0_im(disp);
1612 rm = 0; /* avoid SS override */
1613 goto no_rm;
1614 } else {
1615 disp = 0;
1616 }
1617 break;
1618 case 1:
1619 disp = (int8_t)ldub_code(s->pc++);
1620 break;
1621 default:
1622 case 2:
1623 disp = lduw_code(s->pc);
1624 s->pc += 2;
1625 break;
1626 }
1627 switch(rm) {
1628 case 0:
1629 gen_op_movl_A0_reg[R_EBX]();
1630 gen_op_addl_A0_reg_sN[0][R_ESI]();
1631 break;
1632 case 1:
1633 gen_op_movl_A0_reg[R_EBX]();
1634 gen_op_addl_A0_reg_sN[0][R_EDI]();
1635 break;
1636 case 2:
1637 gen_op_movl_A0_reg[R_EBP]();
1638 gen_op_addl_A0_reg_sN[0][R_ESI]();
1639 break;
1640 case 3:
1641 gen_op_movl_A0_reg[R_EBP]();
1642 gen_op_addl_A0_reg_sN[0][R_EDI]();
1643 break;
1644 case 4:
1645 gen_op_movl_A0_reg[R_ESI]();
1646 break;
1647 case 5:
1648 gen_op_movl_A0_reg[R_EDI]();
1649 break;
1650 case 6:
1651 gen_op_movl_A0_reg[R_EBP]();
1652 break;
1653 default:
1654 case 7:
1655 gen_op_movl_A0_reg[R_EBX]();
1656 break;
1657 }
1658 if (disp != 0)
1659 gen_op_addl_A0_im(disp);
1660 gen_op_andl_A0_ffff();
1661 no_rm:
1662 if (must_add_seg) {
1663 if (override < 0) {
1664 if (rm == 2 || rm == 3 || rm == 6)
1665 override = R_SS;
1666 else
1667 override = R_DS;
1668 }
1669 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1670 }
1671 }
1672
1673 opreg = OR_A0;
1674 disp = 0;
1675 *reg_ptr = opreg;
1676 *offset_ptr = disp;
1677}
1678
1679static void gen_nop_modrm(DisasContext *s, int modrm)
1680{
1681 int mod, rm, base, code;
1682
1683 mod = (modrm >> 6) & 3;
1684 if (mod == 3)
1685 return;
1686 rm = modrm & 7;
1687
1688 if (s->aflag) {
1689
1690 base = rm;
1691
1692 if (base == 4) {
1693 code = ldub_code(s->pc++);
1694 base = (code & 7);
1695 }
1696
1697 switch (mod) {
1698 case 0:
1699 if (base == 5) {
1700 s->pc += 4;
1701 }
1702 break;
1703 case 1:
1704 s->pc++;
1705 break;
1706 default:
1707 case 2:
1708 s->pc += 4;
1709 break;
1710 }
1711 } else {
1712 switch (mod) {
1713 case 0:
1714 if (rm == 6) {
1715 s->pc += 2;
1716 }
1717 break;
1718 case 1:
1719 s->pc++;
1720 break;
1721 default:
1722 case 2:
1723 s->pc += 2;
1724 break;
1725 }
1726 }
1727}
1728
1729/* used for LEA and MOV AX, mem */
1730static void gen_add_A0_ds_seg(DisasContext *s)
1731{
1732 int override, must_add_seg;
1733 must_add_seg = s->addseg;
1734 override = R_DS;
1735 if (s->override >= 0) {
1736 override = s->override;
1737 must_add_seg = 1;
1738 } else {
1739 override = R_DS;
1740 }
1741 if (must_add_seg) {
1742#ifdef TARGET_X86_64
1743 if (CODE64(s)) {
1744 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1745 } else
1746#endif
1747 {
1748 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1749 }
1750 }
1751}
1752
1753/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1754 OR_TMP0 */
1755static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1756{
1757 int mod, rm, opreg, disp;
1758
1759 mod = (modrm >> 6) & 3;
1760 rm = (modrm & 7) | REX_B(s);
1761 if (mod == 3) {
1762 if (is_store) {
1763 if (reg != OR_TMP0)
1764 gen_op_mov_TN_reg[ot][0][reg]();
1765 gen_op_mov_reg_T0[ot][rm]();
1766 } else {
1767 gen_op_mov_TN_reg[ot][0][rm]();
1768 if (reg != OR_TMP0)
1769 gen_op_mov_reg_T0[ot][reg]();
1770 }
1771 } else {
1772 gen_lea_modrm(s, modrm, &opreg, &disp);
1773 if (is_store) {
1774 if (reg != OR_TMP0)
1775 gen_op_mov_TN_reg[ot][0][reg]();
1776 gen_op_st_T0_A0[ot + s->mem_index]();
1777 } else {
1778 gen_op_ld_T0_A0[ot + s->mem_index]();
1779 if (reg != OR_TMP0)
1780 gen_op_mov_reg_T0[ot][reg]();
1781 }
1782 }
1783}
1784
1785static inline uint32_t insn_get(DisasContext *s, int ot)
1786{
1787 uint32_t ret;
1788
1789 switch(ot) {
1790 case OT_BYTE:
1791 ret = ldub_code(s->pc);
1792 s->pc++;
1793 break;
1794 case OT_WORD:
1795 ret = lduw_code(s->pc);
1796 s->pc += 2;
1797 break;
1798 default:
1799 case OT_LONG:
1800 ret = ldl_code(s->pc);
1801 s->pc += 4;
1802 break;
1803 }
1804 return ret;
1805}
1806
1807static inline int insn_const_size(unsigned int ot)
1808{
1809 if (ot <= OT_LONG)
1810 return 1 << ot;
1811 else
1812 return 4;
1813}
1814
1815static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1816{
1817 TranslationBlock *tb;
1818 target_ulong pc;
1819
1820 pc = s->cs_base + eip;
1821 tb = s->tb;
1822 /* NOTE: we handle the case where the TB spans two pages here */
1823 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1824 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1825 /* jump to same page: we can use a direct jump */
1826 if (tb_num == 0)
1827 gen_op_goto_tb0(TBPARAM(tb));
1828 else
1829 gen_op_goto_tb1(TBPARAM(tb));
1830 gen_jmp_im(eip);
1831 gen_op_movl_T0_im((long)tb + tb_num);
1832 gen_op_exit_tb();
1833 } else {
1834 /* jump to another page: currently not optimized */
1835 gen_jmp_im(eip);
1836 gen_eob(s);
1837 }
1838}
1839
1840static inline void gen_jcc(DisasContext *s, int b,
1841 target_ulong val, target_ulong next_eip)
1842{
1843 TranslationBlock *tb;
1844 int inv, jcc_op;
1845 GenOpFunc1 *func;
1846 target_ulong tmp;
1847 int l1, l2;
1848
1849 inv = b & 1;
1850 jcc_op = (b >> 1) & 7;
1851
1852 if (s->jmp_opt) {
1853#ifdef VBOX
1854 gen_check_external_event(s);
1855#endif /* VBOX */
1856 switch(s->cc_op) {
1857 /* we optimize the cmp/jcc case */
1858 case CC_OP_SUBB:
1859 case CC_OP_SUBW:
1860 case CC_OP_SUBL:
1861 case CC_OP_SUBQ:
1862 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1863 break;
1864
1865 /* some jumps are easy to compute */
1866 case CC_OP_ADDB:
1867 case CC_OP_ADDW:
1868 case CC_OP_ADDL:
1869 case CC_OP_ADDQ:
1870
1871 case CC_OP_ADCB:
1872 case CC_OP_ADCW:
1873 case CC_OP_ADCL:
1874 case CC_OP_ADCQ:
1875
1876 case CC_OP_SBBB:
1877 case CC_OP_SBBW:
1878 case CC_OP_SBBL:
1879 case CC_OP_SBBQ:
1880
1881 case CC_OP_LOGICB:
1882 case CC_OP_LOGICW:
1883 case CC_OP_LOGICL:
1884 case CC_OP_LOGICQ:
1885
1886 case CC_OP_INCB:
1887 case CC_OP_INCW:
1888 case CC_OP_INCL:
1889 case CC_OP_INCQ:
1890
1891 case CC_OP_DECB:
1892 case CC_OP_DECW:
1893 case CC_OP_DECL:
1894 case CC_OP_DECQ:
1895
1896 case CC_OP_SHLB:
1897 case CC_OP_SHLW:
1898 case CC_OP_SHLL:
1899 case CC_OP_SHLQ:
1900
1901 case CC_OP_SARB:
1902 case CC_OP_SARW:
1903 case CC_OP_SARL:
1904 case CC_OP_SARQ:
1905 switch(jcc_op) {
1906 case JCC_Z:
1907 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1908 break;
1909 case JCC_S:
1910 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1911 break;
1912 default:
1913 func = NULL;
1914 break;
1915 }
1916 break;
1917 default:
1918 func = NULL;
1919 break;
1920 }
1921
1922 if (s->cc_op != CC_OP_DYNAMIC) {
1923 gen_op_set_cc_op(s->cc_op);
1924 s->cc_op = CC_OP_DYNAMIC;
1925 }
1926
1927 if (!func) {
1928 gen_setcc_slow[jcc_op]();
1929 func = gen_op_jnz_T0_label;
1930 }
1931
1932 if (inv) {
1933 tmp = val;
1934 val = next_eip;
1935 next_eip = tmp;
1936 }
1937 tb = s->tb;
1938
1939 l1 = gen_new_label();
1940 func(l1);
1941
1942 gen_goto_tb(s, 0, next_eip);
1943
1944 gen_set_label(l1);
1945 gen_goto_tb(s, 1, val);
1946
1947 s->is_jmp = 3;
1948 } else {
1949
1950 if (s->cc_op != CC_OP_DYNAMIC) {
1951 gen_op_set_cc_op(s->cc_op);
1952 s->cc_op = CC_OP_DYNAMIC;
1953 }
1954 gen_setcc_slow[jcc_op]();
1955 if (inv) {
1956 tmp = val;
1957 val = next_eip;
1958 next_eip = tmp;
1959 }
1960 l1 = gen_new_label();
1961 l2 = gen_new_label();
1962 gen_op_jnz_T0_label(l1);
1963 gen_jmp_im(next_eip);
1964 gen_op_jmp_label(l2);
1965 gen_set_label(l1);
1966 gen_jmp_im(val);
1967 gen_set_label(l2);
1968 gen_eob(s);
1969 }
1970}
1971
1972static void gen_setcc(DisasContext *s, int b)
1973{
1974 int inv, jcc_op;
1975 GenOpFunc *func;
1976
1977 inv = b & 1;
1978 jcc_op = (b >> 1) & 7;
1979 switch(s->cc_op) {
1980 /* we optimize the cmp/jcc case */
1981 case CC_OP_SUBB:
1982 case CC_OP_SUBW:
1983 case CC_OP_SUBL:
1984 case CC_OP_SUBQ:
1985 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1986 if (!func)
1987 goto slow_jcc;
1988 break;
1989
1990 /* some jumps are easy to compute */
1991 case CC_OP_ADDB:
1992 case CC_OP_ADDW:
1993 case CC_OP_ADDL:
1994 case CC_OP_ADDQ:
1995
1996 case CC_OP_LOGICB:
1997 case CC_OP_LOGICW:
1998 case CC_OP_LOGICL:
1999 case CC_OP_LOGICQ:
2000
2001 case CC_OP_INCB:
2002 case CC_OP_INCW:
2003 case CC_OP_INCL:
2004 case CC_OP_INCQ:
2005
2006 case CC_OP_DECB:
2007 case CC_OP_DECW:
2008 case CC_OP_DECL:
2009 case CC_OP_DECQ:
2010
2011 case CC_OP_SHLB:
2012 case CC_OP_SHLW:
2013 case CC_OP_SHLL:
2014 case CC_OP_SHLQ:
2015 switch(jcc_op) {
2016 case JCC_Z:
2017 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2018 break;
2019 case JCC_S:
2020 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2021 break;
2022 default:
2023 goto slow_jcc;
2024 }
2025 break;
2026 default:
2027 slow_jcc:
2028 if (s->cc_op != CC_OP_DYNAMIC)
2029 gen_op_set_cc_op(s->cc_op);
2030 func = gen_setcc_slow[jcc_op];
2031 break;
2032 }
2033 func();
2034 if (inv) {
2035 gen_op_xor_T0_1();
2036 }
2037}
2038
2039/* move T0 to seg_reg and compute if the CPU state may change. Never
2040 call this function with seg_reg == R_CS */
2041static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2042{
2043 if (s->pe && !s->vm86) {
2044 /* XXX: optimize by finding processor state dynamically */
2045 if (s->cc_op != CC_OP_DYNAMIC)
2046 gen_op_set_cc_op(s->cc_op);
2047 gen_jmp_im(cur_eip);
2048 gen_op_movl_seg_T0(seg_reg);
2049 /* abort translation because the addseg value may change or
2050 because ss32 may change. For R_SS, translation must always
2051 stop as a special handling must be done to disable hardware
2052 interrupts for the next instruction */
2053 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2054 s->is_jmp = 3;
2055 } else {
2056 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2057 if (seg_reg == R_SS)
2058 s->is_jmp = 3;
2059 }
2060}
2061
2062static inline void gen_stack_update(DisasContext *s, int addend)
2063{
2064#ifdef TARGET_X86_64
2065 if (CODE64(s)) {
2066 if (addend == 8)
2067 gen_op_addq_ESP_8();
2068 else
2069 gen_op_addq_ESP_im(addend);
2070 } else
2071#endif
2072 if (s->ss32) {
2073 if (addend == 2)
2074 gen_op_addl_ESP_2();
2075 else if (addend == 4)
2076 gen_op_addl_ESP_4();
2077 else
2078 gen_op_addl_ESP_im(addend);
2079 } else {
2080 if (addend == 2)
2081 gen_op_addw_ESP_2();
2082 else if (addend == 4)
2083 gen_op_addw_ESP_4();
2084 else
2085 gen_op_addw_ESP_im(addend);
2086 }
2087}
2088
2089/* generate a push. It depends on ss32, addseg and dflag */
2090static void gen_push_T0(DisasContext *s)
2091{
2092#ifdef TARGET_X86_64
2093 if (CODE64(s)) {
2094 gen_op_movq_A0_reg[R_ESP]();
2095 if (s->dflag) {
2096 gen_op_subq_A0_8();
2097 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2098 } else {
2099 gen_op_subq_A0_2();
2100 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2101 }
2102 gen_op_movq_ESP_A0();
2103 } else
2104#endif
2105 {
2106 gen_op_movl_A0_reg[R_ESP]();
2107 if (!s->dflag)
2108 gen_op_subl_A0_2();
2109 else
2110 gen_op_subl_A0_4();
2111 if (s->ss32) {
2112 if (s->addseg) {
2113 gen_op_movl_T1_A0();
2114 gen_op_addl_A0_SS();
2115 }
2116 } else {
2117 gen_op_andl_A0_ffff();
2118 gen_op_movl_T1_A0();
2119 gen_op_addl_A0_SS();
2120 }
2121 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2122 if (s->ss32 && !s->addseg)
2123 gen_op_movl_ESP_A0();
2124 else
2125 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2126 }
2127}
2128
2129/* generate a push. It depends on ss32, addseg and dflag */
2130/* slower version for T1, only used for call Ev */
2131static void gen_push_T1(DisasContext *s)
2132{
2133#ifdef TARGET_X86_64
2134 if (CODE64(s)) {
2135 gen_op_movq_A0_reg[R_ESP]();
2136 if (s->dflag) {
2137 gen_op_subq_A0_8();
2138 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2139 } else {
2140 gen_op_subq_A0_2();
2141 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2142 }
2143 gen_op_movq_ESP_A0();
2144 } else
2145#endif
2146 {
2147 gen_op_movl_A0_reg[R_ESP]();
2148 if (!s->dflag)
2149 gen_op_subl_A0_2();
2150 else
2151 gen_op_subl_A0_4();
2152 if (s->ss32) {
2153 if (s->addseg) {
2154 gen_op_addl_A0_SS();
2155 }
2156 } else {
2157 gen_op_andl_A0_ffff();
2158 gen_op_addl_A0_SS();
2159 }
2160 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2161
2162 if (s->ss32 && !s->addseg)
2163 gen_op_movl_ESP_A0();
2164 else
2165 gen_stack_update(s, (-2) << s->dflag);
2166 }
2167}
2168
2169/* two step pop is necessary for precise exceptions */
2170static void gen_pop_T0(DisasContext *s)
2171{
2172#ifdef TARGET_X86_64
2173 if (CODE64(s)) {
2174 gen_op_movq_A0_reg[R_ESP]();
2175 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2176 } else
2177#endif
2178 {
2179 gen_op_movl_A0_reg[R_ESP]();
2180 if (s->ss32) {
2181 if (s->addseg)
2182 gen_op_addl_A0_SS();
2183 } else {
2184 gen_op_andl_A0_ffff();
2185 gen_op_addl_A0_SS();
2186 }
2187 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2188 }
2189}
2190
2191static void gen_pop_update(DisasContext *s)
2192{
2193#ifdef TARGET_X86_64
2194 if (CODE64(s) && s->dflag) {
2195 gen_stack_update(s, 8);
2196 } else
2197#endif
2198 {
2199 gen_stack_update(s, 2 << s->dflag);
2200 }
2201}
2202
2203static void gen_stack_A0(DisasContext *s)
2204{
2205 gen_op_movl_A0_ESP();
2206 if (!s->ss32)
2207 gen_op_andl_A0_ffff();
2208 gen_op_movl_T1_A0();
2209 if (s->addseg)
2210 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2211}
2212
2213/* NOTE: wrap around in 16 bit not fully handled */
2214static void gen_pusha(DisasContext *s)
2215{
2216 int i;
2217 gen_op_movl_A0_ESP();
2218 gen_op_addl_A0_im(-16 << s->dflag);
2219 if (!s->ss32)
2220 gen_op_andl_A0_ffff();
2221 gen_op_movl_T1_A0();
2222 if (s->addseg)
2223 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2224 for(i = 0;i < 8; i++) {
2225 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2226 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2227 gen_op_addl_A0_im(2 << s->dflag);
2228 }
2229 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2230}
2231
2232/* NOTE: wrap around in 16 bit not fully handled */
2233static void gen_popa(DisasContext *s)
2234{
2235 int i;
2236 gen_op_movl_A0_ESP();
2237 if (!s->ss32)
2238 gen_op_andl_A0_ffff();
2239 gen_op_movl_T1_A0();
2240 gen_op_addl_T1_im(16 << s->dflag);
2241 if (s->addseg)
2242 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2243 for(i = 0;i < 8; i++) {
2244 /* ESP is not reloaded */
2245 if (i != 3) {
2246 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2247 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2248 }
2249 gen_op_addl_A0_im(2 << s->dflag);
2250 }
2251 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2252}
2253
2254static void gen_enter(DisasContext *s, int esp_addend, int level)
2255{
2256 int ot, opsize;
2257
2258 level &= 0x1f;
2259#ifdef TARGET_X86_64
2260 if (CODE64(s)) {
2261 ot = s->dflag ? OT_QUAD : OT_WORD;
2262 opsize = 1 << ot;
2263
2264 gen_op_movl_A0_ESP();
2265 gen_op_addq_A0_im(-opsize);
2266 gen_op_movl_T1_A0();
2267
2268 /* push bp */
2269 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2270 gen_op_st_T0_A0[ot + s->mem_index]();
2271 if (level) {
2272 gen_op_enter64_level(level, (ot == OT_QUAD));
2273 }
2274 gen_op_mov_reg_T1[ot][R_EBP]();
2275 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2276 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2277 } else
2278#endif
2279 {
2280 ot = s->dflag + OT_WORD;
2281 opsize = 2 << s->dflag;
2282
2283 gen_op_movl_A0_ESP();
2284 gen_op_addl_A0_im(-opsize);
2285 if (!s->ss32)
2286 gen_op_andl_A0_ffff();
2287 gen_op_movl_T1_A0();
2288 if (s->addseg)
2289 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2290 /* push bp */
2291 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2292 gen_op_st_T0_A0[ot + s->mem_index]();
2293 if (level) {
2294 gen_op_enter_level(level, s->dflag);
2295 }
2296 gen_op_mov_reg_T1[ot][R_EBP]();
2297 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2298 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2299 }
2300}
2301
2302static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2303{
2304 if (s->cc_op != CC_OP_DYNAMIC)
2305 gen_op_set_cc_op(s->cc_op);
2306 gen_jmp_im(cur_eip);
2307 gen_op_raise_exception(trapno);
2308 s->is_jmp = 3;
2309}
2310
2311/* an interrupt is different from an exception because of the
2312 priviledge checks */
2313static void gen_interrupt(DisasContext *s, int intno,
2314 target_ulong cur_eip, target_ulong next_eip)
2315{
2316 if (s->cc_op != CC_OP_DYNAMIC)
2317 gen_op_set_cc_op(s->cc_op);
2318 gen_jmp_im(cur_eip);
2319 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2320 s->is_jmp = 3;
2321}
2322
2323static void gen_debug(DisasContext *s, target_ulong cur_eip)
2324{
2325 if (s->cc_op != CC_OP_DYNAMIC)
2326 gen_op_set_cc_op(s->cc_op);
2327 gen_jmp_im(cur_eip);
2328 gen_op_debug();
2329 s->is_jmp = 3;
2330}
2331
2332/* generate a generic end of block. Trace exception is also generated
2333 if needed */
2334static void gen_eob(DisasContext *s)
2335{
2336 if (s->cc_op != CC_OP_DYNAMIC)
2337 gen_op_set_cc_op(s->cc_op);
2338 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2339 gen_op_reset_inhibit_irq();
2340 }
2341 if (s->singlestep_enabled) {
2342 gen_op_debug();
2343 } else if (s->tf) {
2344 gen_op_single_step();
2345 } else {
2346 gen_op_movl_T0_0();
2347 gen_op_exit_tb();
2348 }
2349 s->is_jmp = 3;
2350}
2351
2352/* generate a jump to eip. No segment change must happen before as a
2353 direct call to the next block may occur */
2354static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2355{
2356 if (s->jmp_opt) {
2357#ifdef VBOX
2358 gen_check_external_event(s);
2359#endif /* VBOX */
2360 if (s->cc_op != CC_OP_DYNAMIC) {
2361 gen_op_set_cc_op(s->cc_op);
2362 s->cc_op = CC_OP_DYNAMIC;
2363 }
2364 gen_goto_tb(s, tb_num, eip);
2365 s->is_jmp = 3;
2366 } else {
2367 gen_jmp_im(eip);
2368 gen_eob(s);
2369 }
2370}
2371
2372static void gen_jmp(DisasContext *s, target_ulong eip)
2373{
2374 gen_jmp_tb(s, eip, 0);
2375}
2376
2377static void gen_movtl_T0_im(target_ulong val)
2378{
2379#ifdef TARGET_X86_64
2380 if ((int32_t)val == val) {
2381 gen_op_movl_T0_im(val);
2382 } else {
2383 gen_op_movq_T0_im64(val >> 32, val);
2384 }
2385#else
2386 gen_op_movl_T0_im(val);
2387#endif
2388}
2389
2390static void gen_movtl_T1_im(target_ulong val)
2391{
2392#ifdef TARGET_X86_64
2393 if ((int32_t)val == val) {
2394 gen_op_movl_T1_im(val);
2395 } else {
2396 gen_op_movq_T1_im64(val >> 32, val);
2397 }
2398#else
2399 gen_op_movl_T1_im(val);
2400#endif
2401}
2402
2403static void gen_add_A0_im(DisasContext *s, int val)
2404{
2405#ifdef TARGET_X86_64
2406 if (CODE64(s))
2407 gen_op_addq_A0_im(val);
2408 else
2409#endif
2410 gen_op_addl_A0_im(val);
2411}
2412
2413static GenOpFunc1 *gen_ldq_env_A0[3] = {
2414 gen_op_ldq_raw_env_A0,
2415#ifndef CONFIG_USER_ONLY
2416 gen_op_ldq_kernel_env_A0,
2417 gen_op_ldq_user_env_A0,
2418#endif
2419};
2420
2421static GenOpFunc1 *gen_stq_env_A0[3] = {
2422 gen_op_stq_raw_env_A0,
2423#ifndef CONFIG_USER_ONLY
2424 gen_op_stq_kernel_env_A0,
2425 gen_op_stq_user_env_A0,
2426#endif
2427};
2428
2429static GenOpFunc1 *gen_ldo_env_A0[3] = {
2430 gen_op_ldo_raw_env_A0,
2431#ifndef CONFIG_USER_ONLY
2432 gen_op_ldo_kernel_env_A0,
2433 gen_op_ldo_user_env_A0,
2434#endif
2435};
2436
2437static GenOpFunc1 *gen_sto_env_A0[3] = {
2438 gen_op_sto_raw_env_A0,
2439#ifndef CONFIG_USER_ONLY
2440 gen_op_sto_kernel_env_A0,
2441 gen_op_sto_user_env_A0,
2442#endif
2443};
2444
2445#define SSE_SPECIAL ((GenOpFunc2 *)1)
2446
2447#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2448#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2449 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2450
2451static GenOpFunc2 *sse_op_table1[256][4] = {
2452 /* pure SSE operations */
2453 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2454 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2455 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2456 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2457 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2458 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2459 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2460 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2461
2462 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2463 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2464 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2465 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2466 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2467 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2468 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2469 [0x2f] = { gen_op_comiss, gen_op_comisd },
2470 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2471 [0x51] = SSE_FOP(sqrt),
2472 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2473 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2474 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2475 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2476 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2477 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2478 [0x58] = SSE_FOP(add),
2479 [0x59] = SSE_FOP(mul),
2480 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2481 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2482 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2483 [0x5c] = SSE_FOP(sub),
2484 [0x5d] = SSE_FOP(min),
2485 [0x5e] = SSE_FOP(div),
2486 [0x5f] = SSE_FOP(max),
2487
2488 [0xc2] = SSE_FOP(cmpeq),
2489 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2490
2491 /* MMX ops and their SSE extensions */
2492 [0x60] = MMX_OP2(punpcklbw),
2493 [0x61] = MMX_OP2(punpcklwd),
2494 [0x62] = MMX_OP2(punpckldq),
2495 [0x63] = MMX_OP2(packsswb),
2496 [0x64] = MMX_OP2(pcmpgtb),
2497 [0x65] = MMX_OP2(pcmpgtw),
2498 [0x66] = MMX_OP2(pcmpgtl),
2499 [0x67] = MMX_OP2(packuswb),
2500 [0x68] = MMX_OP2(punpckhbw),
2501 [0x69] = MMX_OP2(punpckhwd),
2502 [0x6a] = MMX_OP2(punpckhdq),
2503 [0x6b] = MMX_OP2(packssdw),
2504 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2505 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2506 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2507 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2508 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2509 (GenOpFunc2 *)gen_op_pshufd_xmm,
2510 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2511 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2512 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2513 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2514 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2515 [0x74] = MMX_OP2(pcmpeqb),
2516 [0x75] = MMX_OP2(pcmpeqw),
2517 [0x76] = MMX_OP2(pcmpeql),
2518 [0x77] = { SSE_SPECIAL }, /* emms */
2519 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2520 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2521 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2522 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2523 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2524 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2525 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2526 [0xd1] = MMX_OP2(psrlw),
2527 [0xd2] = MMX_OP2(psrld),
2528 [0xd3] = MMX_OP2(psrlq),
2529 [0xd4] = MMX_OP2(paddq),
2530 [0xd5] = MMX_OP2(pmullw),
2531 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2532 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2533 [0xd8] = MMX_OP2(psubusb),
2534 [0xd9] = MMX_OP2(psubusw),
2535 [0xda] = MMX_OP2(pminub),
2536 [0xdb] = MMX_OP2(pand),
2537 [0xdc] = MMX_OP2(paddusb),
2538 [0xdd] = MMX_OP2(paddusw),
2539 [0xde] = MMX_OP2(pmaxub),
2540 [0xdf] = MMX_OP2(pandn),
2541 [0xe0] = MMX_OP2(pavgb),
2542 [0xe1] = MMX_OP2(psraw),
2543 [0xe2] = MMX_OP2(psrad),
2544 [0xe3] = MMX_OP2(pavgw),
2545 [0xe4] = MMX_OP2(pmulhuw),
2546 [0xe5] = MMX_OP2(pmulhw),
2547 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2548 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2549 [0xe8] = MMX_OP2(psubsb),
2550 [0xe9] = MMX_OP2(psubsw),
2551 [0xea] = MMX_OP2(pminsw),
2552 [0xeb] = MMX_OP2(por),
2553 [0xec] = MMX_OP2(paddsb),
2554 [0xed] = MMX_OP2(paddsw),
2555 [0xee] = MMX_OP2(pmaxsw),
2556 [0xef] = MMX_OP2(pxor),
2557 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2558 [0xf1] = MMX_OP2(psllw),
2559 [0xf2] = MMX_OP2(pslld),
2560 [0xf3] = MMX_OP2(psllq),
2561 [0xf4] = MMX_OP2(pmuludq),
2562 [0xf5] = MMX_OP2(pmaddwd),
2563 [0xf6] = MMX_OP2(psadbw),
2564 [0xf7] = MMX_OP2(maskmov),
2565 [0xf8] = MMX_OP2(psubb),
2566 [0xf9] = MMX_OP2(psubw),
2567 [0xfa] = MMX_OP2(psubl),
2568 [0xfb] = MMX_OP2(psubq),
2569 [0xfc] = MMX_OP2(paddb),
2570 [0xfd] = MMX_OP2(paddw),
2571 [0xfe] = MMX_OP2(paddl),
2572};
2573
2574static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2575 [0 + 2] = MMX_OP2(psrlw),
2576 [0 + 4] = MMX_OP2(psraw),
2577 [0 + 6] = MMX_OP2(psllw),
2578 [8 + 2] = MMX_OP2(psrld),
2579 [8 + 4] = MMX_OP2(psrad),
2580 [8 + 6] = MMX_OP2(pslld),
2581 [16 + 2] = MMX_OP2(psrlq),
2582 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2583 [16 + 6] = MMX_OP2(psllq),
2584 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2585};
2586
2587static GenOpFunc1 *sse_op_table3[4 * 3] = {
2588 gen_op_cvtsi2ss,
2589 gen_op_cvtsi2sd,
2590 X86_64_ONLY(gen_op_cvtsq2ss),
2591 X86_64_ONLY(gen_op_cvtsq2sd),
2592
2593 gen_op_cvttss2si,
2594 gen_op_cvttsd2si,
2595 X86_64_ONLY(gen_op_cvttss2sq),
2596 X86_64_ONLY(gen_op_cvttsd2sq),
2597
2598 gen_op_cvtss2si,
2599 gen_op_cvtsd2si,
2600 X86_64_ONLY(gen_op_cvtss2sq),
2601 X86_64_ONLY(gen_op_cvtsd2sq),
2602};
2603
2604static GenOpFunc2 *sse_op_table4[8][4] = {
2605 SSE_FOP(cmpeq),
2606 SSE_FOP(cmplt),
2607 SSE_FOP(cmple),
2608 SSE_FOP(cmpunord),
2609 SSE_FOP(cmpneq),
2610 SSE_FOP(cmpnlt),
2611 SSE_FOP(cmpnle),
2612 SSE_FOP(cmpord),
2613};
2614
2615static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2616{
2617 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2618 int modrm, mod, rm, reg, reg_addr, offset_addr;
2619 GenOpFunc2 *sse_op2;
2620 GenOpFunc3 *sse_op3;
2621
2622 b &= 0xff;
2623 if (s->prefix & PREFIX_DATA)
2624 b1 = 1;
2625 else if (s->prefix & PREFIX_REPZ)
2626 b1 = 2;
2627 else if (s->prefix & PREFIX_REPNZ)
2628 b1 = 3;
2629 else
2630 b1 = 0;
2631 sse_op2 = sse_op_table1[b][b1];
2632 if (!sse_op2)
2633 goto illegal_op;
2634 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2635 is_xmm = 1;
2636 } else {
2637 if (b1 == 0) {
2638 /* MMX case */
2639 is_xmm = 0;
2640 } else {
2641 is_xmm = 1;
2642 }
2643 }
2644 /* simple MMX/SSE operation */
2645 if (s->flags & HF_TS_MASK) {
2646 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2647 return;
2648 }
2649 if (s->flags & HF_EM_MASK) {
2650 illegal_op:
2651 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2652 return;
2653 }
2654 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2655 goto illegal_op;
2656 if (b == 0x77) {
2657 /* emms */
2658 gen_op_emms();
2659 return;
2660 }
2661 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2662 the static cpu state) */
2663 if (!is_xmm) {
2664 gen_op_enter_mmx();
2665 }
2666
2667 modrm = ldub_code(s->pc++);
2668 reg = ((modrm >> 3) & 7);
2669 if (is_xmm)
2670 reg |= rex_r;
2671 mod = (modrm >> 6) & 3;
2672 if (sse_op2 == SSE_SPECIAL) {
2673 b |= (b1 << 8);
2674 switch(b) {
2675 case 0x0e7: /* movntq */
2676 if (mod == 3)
2677 goto illegal_op;
2678 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2679 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2680 break;
2681 case 0x1e7: /* movntdq */
2682 case 0x02b: /* movntps */
2683 case 0x12b: /* movntps */
2684 case 0x3f0: /* lddqu */
2685 if (mod == 3)
2686 goto illegal_op;
2687 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2688 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2689 break;
2690 case 0x6e: /* movd mm, ea */
2691#ifdef TARGET_X86_64
2692 if (s->dflag == 2) {
2693 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2694 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2695 } else
2696#endif
2697 {
2698 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2699 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2700 }
2701 break;
2702 case 0x16e: /* movd xmm, ea */
2703#ifdef TARGET_X86_64
2704 if (s->dflag == 2) {
2705 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2706 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2707 } else
2708#endif
2709 {
2710 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2711 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2712 }
2713 break;
2714 case 0x6f: /* movq mm, ea */
2715 if (mod != 3) {
2716 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2717 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2718 } else {
2719 rm = (modrm & 7);
2720 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2721 offsetof(CPUX86State,fpregs[rm].mmx));
2722 }
2723 break;
2724 case 0x010: /* movups */
2725 case 0x110: /* movupd */
2726 case 0x028: /* movaps */
2727 case 0x128: /* movapd */
2728 case 0x16f: /* movdqa xmm, ea */
2729 case 0x26f: /* movdqu xmm, ea */
2730 if (mod != 3) {
2731 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2732 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2733 } else {
2734 rm = (modrm & 7) | REX_B(s);
2735 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2736 offsetof(CPUX86State,xmm_regs[rm]));
2737 }
2738 break;
2739 case 0x210: /* movss xmm, ea */
2740 if (mod != 3) {
2741 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2742 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2743 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2744 gen_op_movl_T0_0();
2745 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2746 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2747 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2748 } else {
2749 rm = (modrm & 7) | REX_B(s);
2750 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2751 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2752 }
2753 break;
2754 case 0x310: /* movsd xmm, ea */
2755 if (mod != 3) {
2756 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2757 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2758 gen_op_movl_T0_0();
2759 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2760 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2761 } else {
2762 rm = (modrm & 7) | REX_B(s);
2763 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2764 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2765 }
2766 break;
2767 case 0x012: /* movlps */
2768 case 0x112: /* movlpd */
2769 if (mod != 3) {
2770 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2771 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2772 } else {
2773 /* movhlps */
2774 rm = (modrm & 7) | REX_B(s);
2775 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2776 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2777 }
2778 break;
2779 case 0x212: /* movsldup */
2780 if (mod != 3) {
2781 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2782 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2783 } else {
2784 rm = (modrm & 7) | REX_B(s);
2785 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2786 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2787 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2788 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2789 }
2790 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2791 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2792 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2793 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2794 break;
2795 case 0x312: /* movddup */
2796 if (mod != 3) {
2797 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2798 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2799 } else {
2800 rm = (modrm & 7) | REX_B(s);
2801 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2802 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2803 }
2804 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2805 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2806 break;
2807 case 0x016: /* movhps */
2808 case 0x116: /* movhpd */
2809 if (mod != 3) {
2810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2811 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2812 } else {
2813 /* movlhps */
2814 rm = (modrm & 7) | REX_B(s);
2815 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2816 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2817 }
2818 break;
2819 case 0x216: /* movshdup */
2820 if (mod != 3) {
2821 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2822 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2823 } else {
2824 rm = (modrm & 7) | REX_B(s);
2825 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2826 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2827 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2828 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2829 }
2830 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2831 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2832 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2833 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2834 break;
2835 case 0x7e: /* movd ea, mm */
2836#ifdef TARGET_X86_64
2837 if (s->dflag == 2) {
2838 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2839 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2840 } else
2841#endif
2842 {
2843 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2844 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2845 }
2846 break;
2847 case 0x17e: /* movd ea, xmm */
2848#ifdef TARGET_X86_64
2849 if (s->dflag == 2) {
2850 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2851 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2852 } else
2853#endif
2854 {
2855 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2856 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2857 }
2858 break;
2859 case 0x27e: /* movq xmm, ea */
2860 if (mod != 3) {
2861 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2862 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2863 } else {
2864 rm = (modrm & 7) | REX_B(s);
2865 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2866 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2867 }
2868 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2869 break;
2870 case 0x7f: /* movq ea, mm */
2871 if (mod != 3) {
2872 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2873 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2874 } else {
2875 rm = (modrm & 7);
2876 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2877 offsetof(CPUX86State,fpregs[reg].mmx));
2878 }
2879 break;
2880 case 0x011: /* movups */
2881 case 0x111: /* movupd */
2882 case 0x029: /* movaps */
2883 case 0x129: /* movapd */
2884 case 0x17f: /* movdqa ea, xmm */
2885 case 0x27f: /* movdqu ea, xmm */
2886 if (mod != 3) {
2887 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2888 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2889 } else {
2890 rm = (modrm & 7) | REX_B(s);
2891 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2892 offsetof(CPUX86State,xmm_regs[reg]));
2893 }
2894 break;
2895 case 0x211: /* movss ea, xmm */
2896 if (mod != 3) {
2897 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2898 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2899 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2900 } else {
2901 rm = (modrm & 7) | REX_B(s);
2902 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2903 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2904 }
2905 break;
2906 case 0x311: /* movsd ea, xmm */
2907 if (mod != 3) {
2908 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2909 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2910 } else {
2911 rm = (modrm & 7) | REX_B(s);
2912 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2913 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2914 }
2915 break;
2916 case 0x013: /* movlps */
2917 case 0x113: /* movlpd */
2918 if (mod != 3) {
2919 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2920 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2921 } else {
2922 goto illegal_op;
2923 }
2924 break;
2925 case 0x017: /* movhps */
2926 case 0x117: /* movhpd */
2927 if (mod != 3) {
2928 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2929 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2930 } else {
2931 goto illegal_op;
2932 }
2933 break;
2934 case 0x71: /* shift mm, im */
2935 case 0x72:
2936 case 0x73:
2937 case 0x171: /* shift xmm, im */
2938 case 0x172:
2939 case 0x173:
2940 val = ldub_code(s->pc++);
2941 if (is_xmm) {
2942 gen_op_movl_T0_im(val);
2943 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2944 gen_op_movl_T0_0();
2945 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2946 op1_offset = offsetof(CPUX86State,xmm_t0);
2947 } else {
2948 gen_op_movl_T0_im(val);
2949 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2950 gen_op_movl_T0_0();
2951 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2952 op1_offset = offsetof(CPUX86State,mmx_t0);
2953 }
2954 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2955 if (!sse_op2)
2956 goto illegal_op;
2957 if (is_xmm) {
2958 rm = (modrm & 7) | REX_B(s);
2959 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2960 } else {
2961 rm = (modrm & 7);
2962 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2963 }
2964 sse_op2(op2_offset, op1_offset);
2965 break;
2966 case 0x050: /* movmskps */
2967 rm = (modrm & 7) | REX_B(s);
2968 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2969 gen_op_mov_reg_T0[OT_LONG][reg]();
2970 break;
2971 case 0x150: /* movmskpd */
2972 rm = (modrm & 7) | REX_B(s);
2973 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2974 gen_op_mov_reg_T0[OT_LONG][reg]();
2975 break;
2976 case 0x02a: /* cvtpi2ps */
2977 case 0x12a: /* cvtpi2pd */
2978 gen_op_enter_mmx();
2979 if (mod != 3) {
2980 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2981 op2_offset = offsetof(CPUX86State,mmx_t0);
2982 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2983 } else {
2984 rm = (modrm & 7);
2985 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2986 }
2987 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2988 switch(b >> 8) {
2989 case 0x0:
2990 gen_op_cvtpi2ps(op1_offset, op2_offset);
2991 break;
2992 default:
2993 case 0x1:
2994 gen_op_cvtpi2pd(op1_offset, op2_offset);
2995 break;
2996 }
2997 break;
2998 case 0x22a: /* cvtsi2ss */
2999 case 0x32a: /* cvtsi2sd */
3000 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3001 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3002 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3003 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3004 break;
3005 case 0x02c: /* cvttps2pi */
3006 case 0x12c: /* cvttpd2pi */
3007 case 0x02d: /* cvtps2pi */
3008 case 0x12d: /* cvtpd2pi */
3009 gen_op_enter_mmx();
3010 if (mod != 3) {
3011 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3012 op2_offset = offsetof(CPUX86State,xmm_t0);
3013 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3014 } else {
3015 rm = (modrm & 7) | REX_B(s);
3016 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3017 }
3018 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3019 switch(b) {
3020 case 0x02c:
3021 gen_op_cvttps2pi(op1_offset, op2_offset);
3022 break;
3023 case 0x12c:
3024 gen_op_cvttpd2pi(op1_offset, op2_offset);
3025 break;
3026 case 0x02d:
3027 gen_op_cvtps2pi(op1_offset, op2_offset);
3028 break;
3029 case 0x12d:
3030 gen_op_cvtpd2pi(op1_offset, op2_offset);
3031 break;
3032 }
3033 break;
3034 case 0x22c: /* cvttss2si */
3035 case 0x32c: /* cvttsd2si */
3036 case 0x22d: /* cvtss2si */
3037 case 0x32d: /* cvtsd2si */
3038 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3039 if (mod != 3) {
3040 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3041 if ((b >> 8) & 1) {
3042 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3043 } else {
3044 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3045 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3046 }
3047 op2_offset = offsetof(CPUX86State,xmm_t0);
3048 } else {
3049 rm = (modrm & 7) | REX_B(s);
3050 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3051 }
3052 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3053 (b & 1) * 4](op2_offset);
3054 gen_op_mov_reg_T0[ot][reg]();
3055 break;
3056 case 0xc4: /* pinsrw */
3057 case 0x1c4:
3058 s->rip_offset = 1;
3059 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3060 val = ldub_code(s->pc++);
3061 if (b1) {
3062 val &= 7;
3063 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3064 } else {
3065 val &= 3;
3066 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3067 }
3068 break;
3069 case 0xc5: /* pextrw */
3070 case 0x1c5:
3071 if (mod != 3)
3072 goto illegal_op;
3073 val = ldub_code(s->pc++);
3074 if (b1) {
3075 val &= 7;
3076 rm = (modrm & 7) | REX_B(s);
3077 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3078 } else {
3079 val &= 3;
3080 rm = (modrm & 7);
3081 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3082 }
3083 reg = ((modrm >> 3) & 7) | rex_r;
3084 gen_op_mov_reg_T0[OT_LONG][reg]();
3085 break;
3086 case 0x1d6: /* movq ea, xmm */
3087 if (mod != 3) {
3088 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3089 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3090 } else {
3091 rm = (modrm & 7) | REX_B(s);
3092 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3093 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3094 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3095 }
3096 break;
3097 case 0x2d6: /* movq2dq */
3098 gen_op_enter_mmx();
3099 rm = (modrm & 7);
3100 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3101 offsetof(CPUX86State,fpregs[rm].mmx));
3102 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3103 break;
3104 case 0x3d6: /* movdq2q */
3105 gen_op_enter_mmx();
3106 rm = (modrm & 7) | REX_B(s);
3107 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3108 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3109 break;
3110 case 0xd7: /* pmovmskb */
3111 case 0x1d7:
3112 if (mod != 3)
3113 goto illegal_op;
3114 if (b1) {
3115 rm = (modrm & 7) | REX_B(s);
3116 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3117 } else {
3118 rm = (modrm & 7);
3119 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3120 }
3121 reg = ((modrm >> 3) & 7) | rex_r;
3122 gen_op_mov_reg_T0[OT_LONG][reg]();
3123 break;
3124 default:
3125 goto illegal_op;
3126 }
3127 } else {
3128 /* generic MMX or SSE operation */
3129 switch(b) {
3130 case 0xf7:
3131 /* maskmov : we must prepare A0 */
3132 if (mod != 3)
3133 goto illegal_op;
3134#ifdef TARGET_X86_64
3135 if (s->aflag == 2) {
3136 gen_op_movq_A0_reg[R_EDI]();
3137 } else
3138#endif
3139 {
3140 gen_op_movl_A0_reg[R_EDI]();
3141 if (s->aflag == 0)
3142 gen_op_andl_A0_ffff();
3143 }
3144 gen_add_A0_ds_seg(s);
3145 break;
3146 case 0x70: /* pshufx insn */
3147 case 0xc6: /* pshufx insn */
3148 case 0xc2: /* compare insns */
3149 s->rip_offset = 1;
3150 break;
3151 default:
3152 break;
3153 }
3154 if (is_xmm) {
3155 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3156 if (mod != 3) {
3157 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3158 op2_offset = offsetof(CPUX86State,xmm_t0);
3159 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3160 b == 0xc2)) {
3161 /* specific case for SSE single instructions */
3162 if (b1 == 2) {
3163 /* 32 bit access */
3164 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3165 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3166 } else {
3167 /* 64 bit access */
3168 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3169 }
3170 } else {
3171 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3172 }
3173 } else {
3174 rm = (modrm & 7) | REX_B(s);
3175 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3176 }
3177 } else {
3178 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3179 if (mod != 3) {
3180 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3181 op2_offset = offsetof(CPUX86State,mmx_t0);
3182 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3183 } else {
3184 rm = (modrm & 7);
3185 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3186 }
3187 }
3188 switch(b) {
3189 case 0x70: /* pshufx insn */
3190 case 0xc6: /* pshufx insn */
3191 val = ldub_code(s->pc++);
3192 sse_op3 = (GenOpFunc3 *)sse_op2;
3193 sse_op3(op1_offset, op2_offset, val);
3194 break;
3195 case 0xc2:
3196 /* compare insns */
3197 val = ldub_code(s->pc++);
3198 if (val >= 8)
3199 goto illegal_op;
3200 sse_op2 = sse_op_table4[val][b1];
3201 sse_op2(op1_offset, op2_offset);
3202 break;
3203 default:
3204 sse_op2(op1_offset, op2_offset);
3205 break;
3206 }
3207 if (b == 0x2e || b == 0x2f) {
3208 s->cc_op = CC_OP_EFLAGS;
3209 }
3210 }
3211}
3212
3213#ifdef VBOX
3214/* Checks if it's an invalid lock sequence. Only a few instructions
3215 can be used together with the lock prefix and of those only the
3216 form that write a memory operand. So, this is kind of annoying
3217 work to do...
3218 The AMD manual lists the following instructions.
3219 ADC
3220 ADD
3221 AND
3222 BTC
3223 BTR
3224 BTS
3225 CMPXCHG
3226 CMPXCHG8B
3227 CMPXCHG16B
3228 DEC
3229 INC
3230 NEG
3231 NOT
3232 OR
3233 SBB
3234 SUB
3235 XADD
3236 XCHG
3237 XOR */
3238static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
3239{
3240 target_ulong pc = s->pc;
3241 int modrm, mod, op;
3242
3243 /* X={8,16,32,64} Y={16,32,64} */
3244 switch (b)
3245 {
3246 /* /2: ADC reg/memX, immX */
3247 /* /0: ADD reg/memX, immX */
3248 /* /4: AND reg/memX, immX */
3249 /* /1: OR reg/memX, immX */
3250 /* /3: SBB reg/memX, immX */
3251 /* /5: SUB reg/memX, immX */
3252 /* /6: XOR reg/memX, immX */
3253 case 0x80:
3254 case 0x81:
3255 case 0x83:
3256 modrm = ldub_code(pc++);
3257 op = (modrm >> 3) & 7;
3258 if (op == 7) /* /7: CMP */
3259 break;
3260 mod = (modrm >> 6) & 3;
3261 if (mod == 3) /* register destination */
3262 break;
3263 return false;
3264
3265 case 0x10: /* /r: ADC reg/mem8, reg8 */
3266 case 0x11: /* /r: ADC reg/memX, regY */
3267 case 0x00: /* /r: ADD reg/mem8, reg8 */
3268 case 0x01: /* /r: ADD reg/memX, regY */
3269 case 0x20: /* /r: AND reg/mem8, reg8 */
3270 case 0x21: /* /r: AND reg/memY, regY */
3271 case 0x08: /* /r: OR reg/mem8, reg8 */
3272 case 0x09: /* /r: OR reg/memY, regY */
3273 case 0x18: /* /r: SBB reg/mem8, reg8 */
3274 case 0x19: /* /r: SBB reg/memY, regY */
3275 case 0x28: /* /r: SUB reg/mem8, reg8 */
3276 case 0x29: /* /r: SUB reg/memY, regY */
3277 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
3278 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
3279 case 0x30: /* /r: XOR reg/mem8, reg8 */
3280 case 0x31: /* /r: XOR reg/memY, regY */
3281 modrm = ldub_code(pc++);
3282 mod = (modrm >> 6) & 3;
3283 if (mod == 3) /* register destination */
3284 break;
3285 return false;
3286
3287 /* /1: DEC reg/memX */
3288 /* /0: INC reg/memX */
3289 case 0xfe:
3290 case 0xff:
3291 modrm = ldub_code(pc++);
3292 mod = (modrm >> 6) & 3;
3293 if (mod == 3) /* register destination */
3294 break;
3295 return false;
3296
3297 /* /3: NEG reg/memX */
3298 /* /2: NOT reg/memX */
3299 case 0xf6:
3300 case 0xf7:
3301 modrm = ldub_code(pc++);
3302 mod = (modrm >> 6) & 3;
3303 if (mod == 3) /* register destination */
3304 break;
3305 return false;
3306
3307 case 0x0f:
3308 b = ldub_code(pc++);
3309 switch (b)
3310 {
3311 /* /7: BTC reg/memY, imm8 */
3312 /* /6: BTR reg/memY, imm8 */
3313 /* /5: BTS reg/memY, imm8 */
3314 case 0xba:
3315 modrm = ldub_code(pc++);
3316 op = (modrm >> 3) & 7;
3317 if (op < 5)
3318 break;
3319 mod = (modrm >> 6) & 3;
3320 if (mod == 3) /* register destination */
3321 break;
3322 return false;
3323
3324 case 0xbb: /* /r: BTC reg/memY, regY */
3325 case 0xb3: /* /r: BTR reg/memY, regY */
3326 case 0xab: /* /r: BTS reg/memY, regY */
3327 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
3328 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
3329 case 0xc0: /* /r: XADD reg/mem8, reg8 */
3330 case 0xc1: /* /r: XADD reg/memY, regY */
3331 modrm = ldub_code(pc++);
3332 mod = (modrm >> 6) & 3;
3333 if (mod == 3) /* register destination */
3334 break;
3335 return false;
3336
3337 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
3338 case 0xc7:
3339 modrm = ldub_code(pc++);
3340 op = (modrm >> 3) & 7;
3341 if (op != 1)
3342 break;
3343 return false;
3344 }
3345 break;
3346 }
3347
3348 /* illegal sequence. The s->pc is past the lock prefix and that
3349 is sufficient for the TB, I think. */
3350 Log(("illegal lock sequence %VGv (b=%#x)\n", pc_start, b));
3351 return true;
3352}
3353#endif /* VBOX */
3354
3355
3356/* convert one instruction. s->is_jmp is set if the translation must
3357 be stopped. Return the next pc value */
3358static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3359{
3360 int b, prefixes, aflag, dflag;
3361 int shift, ot;
3362 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3363 target_ulong next_eip, tval;
3364 int rex_w, rex_r;
3365
3366 s->pc = pc_start;
3367 prefixes = 0;
3368 aflag = s->code32;
3369 dflag = s->code32;
3370 s->override = -1;
3371 rex_w = -1;
3372 rex_r = 0;
3373#ifdef TARGET_X86_64
3374 s->rex_x = 0;
3375 s->rex_b = 0;
3376 x86_64_hregs = 0;
3377#endif
3378 s->rip_offset = 0; /* for relative ip address */
3379
3380#ifdef VBOX
3381 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
3382 gen_update_eip(pc_start - s->cs_base);
3383#endif
3384
3385 next_byte:
3386 b = ldub_code(s->pc);
3387 s->pc++;
3388 /* check prefixes */
3389#ifdef TARGET_X86_64
3390 if (CODE64(s)) {
3391 switch (b) {
3392 case 0xf3:
3393 prefixes |= PREFIX_REPZ;
3394 goto next_byte;
3395 case 0xf2:
3396 prefixes |= PREFIX_REPNZ;
3397 goto next_byte;
3398 case 0xf0:
3399 prefixes |= PREFIX_LOCK;
3400 goto next_byte;
3401 case 0x2e:
3402 s->override = R_CS;
3403 goto next_byte;
3404 case 0x36:
3405 s->override = R_SS;
3406 goto next_byte;
3407 case 0x3e:
3408 s->override = R_DS;
3409 goto next_byte;
3410 case 0x26:
3411 s->override = R_ES;
3412 goto next_byte;
3413 case 0x64:
3414 s->override = R_FS;
3415 goto next_byte;
3416 case 0x65:
3417 s->override = R_GS;
3418 goto next_byte;
3419 case 0x66:
3420 prefixes |= PREFIX_DATA;
3421 goto next_byte;
3422 case 0x67:
3423 prefixes |= PREFIX_ADR;
3424 goto next_byte;
3425 case 0x40 ... 0x4f:
3426 /* REX prefix */
3427 rex_w = (b >> 3) & 1;
3428 rex_r = (b & 0x4) << 1;
3429 s->rex_x = (b & 0x2) << 2;
3430 REX_B(s) = (b & 0x1) << 3;
3431 x86_64_hregs = 1; /* select uniform byte register addressing */
3432 goto next_byte;
3433 }
3434 if (rex_w == 1) {
3435 /* 0x66 is ignored if rex.w is set */
3436 dflag = 2;
3437 } else {
3438 if (prefixes & PREFIX_DATA)
3439 dflag ^= 1;
3440 }
3441 if (!(prefixes & PREFIX_ADR))
3442 aflag = 2;
3443 } else
3444#endif
3445 {
3446 switch (b) {
3447 case 0xf3:
3448 prefixes |= PREFIX_REPZ;
3449 goto next_byte;
3450 case 0xf2:
3451 prefixes |= PREFIX_REPNZ;
3452 goto next_byte;
3453 case 0xf0:
3454 prefixes |= PREFIX_LOCK;
3455 goto next_byte;
3456 case 0x2e:
3457 s->override = R_CS;
3458 goto next_byte;
3459 case 0x36:
3460 s->override = R_SS;
3461 goto next_byte;
3462 case 0x3e:
3463 s->override = R_DS;
3464 goto next_byte;
3465 case 0x26:
3466 s->override = R_ES;
3467 goto next_byte;
3468 case 0x64:
3469 s->override = R_FS;
3470 goto next_byte;
3471 case 0x65:
3472 s->override = R_GS;
3473 goto next_byte;
3474 case 0x66:
3475 prefixes |= PREFIX_DATA;
3476 goto next_byte;
3477 case 0x67:
3478 prefixes |= PREFIX_ADR;
3479 goto next_byte;
3480 }
3481 if (prefixes & PREFIX_DATA)
3482 dflag ^= 1;
3483 if (prefixes & PREFIX_ADR)
3484 aflag ^= 1;
3485 }
3486
3487 s->prefix = prefixes;
3488 s->aflag = aflag;
3489 s->dflag = dflag;
3490
3491 /* lock generation */
3492#ifndef VBOX
3493 if (prefixes & PREFIX_LOCK)
3494 gen_op_lock();
3495#else /* VBOX */
3496 if (prefixes & PREFIX_LOCK) {
3497 if (is_invalid_lock_sequence(s, pc_start, b)) {
3498 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3499 return s->pc;
3500 }
3501 gen_op_lock();
3502 }
3503#endif /* VBOX */
3504
3505 /* now check op code */
3506 reswitch:
3507 switch(b) {
3508 case 0x0f:
3509 /**************************/
3510 /* extended op code */
3511 b = ldub_code(s->pc++) | 0x100;
3512 goto reswitch;
3513
3514 /**************************/
3515 /* arith & logic */
3516 case 0x00 ... 0x05:
3517 case 0x08 ... 0x0d:
3518 case 0x10 ... 0x15:
3519 case 0x18 ... 0x1d:
3520 case 0x20 ... 0x25:
3521 case 0x28 ... 0x2d:
3522 case 0x30 ... 0x35:
3523 case 0x38 ... 0x3d:
3524 {
3525 int op, f, val;
3526 op = (b >> 3) & 7;
3527 f = (b >> 1) & 3;
3528
3529 if ((b & 1) == 0)
3530 ot = OT_BYTE;
3531 else
3532 ot = dflag + OT_WORD;
3533
3534 switch(f) {
3535 case 0: /* OP Ev, Gv */
3536 modrm = ldub_code(s->pc++);
3537 reg = ((modrm >> 3) & 7) | rex_r;
3538 mod = (modrm >> 6) & 3;
3539 rm = (modrm & 7) | REX_B(s);
3540 if (mod != 3) {
3541 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3542 opreg = OR_TMP0;
3543 } else if (op == OP_XORL && rm == reg) {
3544 xor_zero:
3545 /* xor reg, reg optimisation */
3546 gen_op_movl_T0_0();
3547 s->cc_op = CC_OP_LOGICB + ot;
3548 gen_op_mov_reg_T0[ot][reg]();
3549 gen_op_update1_cc();
3550 break;
3551 } else {
3552 opreg = rm;
3553 }
3554 gen_op_mov_TN_reg[ot][1][reg]();
3555 gen_op(s, op, ot, opreg);
3556 break;
3557 case 1: /* OP Gv, Ev */
3558 modrm = ldub_code(s->pc++);
3559 mod = (modrm >> 6) & 3;
3560 reg = ((modrm >> 3) & 7) | rex_r;
3561 rm = (modrm & 7) | REX_B(s);
3562 if (mod != 3) {
3563 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3564 gen_op_ld_T1_A0[ot + s->mem_index]();
3565 } else if (op == OP_XORL && rm == reg) {
3566 goto xor_zero;
3567 } else {
3568 gen_op_mov_TN_reg[ot][1][rm]();
3569 }
3570 gen_op(s, op, ot, reg);
3571 break;
3572 case 2: /* OP A, Iv */
3573 val = insn_get(s, ot);
3574 gen_op_movl_T1_im(val);
3575 gen_op(s, op, ot, OR_EAX);
3576 break;
3577 }
3578 }
3579 break;
3580
3581 case 0x80: /* GRP1 */
3582 case 0x81:
3583 case 0x82:
3584 case 0x83:
3585 {
3586 int val;
3587
3588 if ((b & 1) == 0)
3589 ot = OT_BYTE;
3590 else
3591 ot = dflag + OT_WORD;
3592
3593 modrm = ldub_code(s->pc++);
3594 mod = (modrm >> 6) & 3;
3595 rm = (modrm & 7) | REX_B(s);
3596 op = (modrm >> 3) & 7;
3597
3598 if (mod != 3) {
3599 if (b == 0x83)
3600 s->rip_offset = 1;
3601 else
3602 s->rip_offset = insn_const_size(ot);
3603 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3604 opreg = OR_TMP0;
3605 } else {
3606 opreg = rm;
3607 }
3608
3609 switch(b) {
3610 default:
3611 case 0x80:
3612 case 0x81:
3613 case 0x82:
3614 val = insn_get(s, ot);
3615 break;
3616 case 0x83:
3617 val = (int8_t)insn_get(s, OT_BYTE);
3618 break;
3619 }
3620 gen_op_movl_T1_im(val);
3621 gen_op(s, op, ot, opreg);
3622 }
3623 break;
3624
3625 /**************************/
3626 /* inc, dec, and other misc arith */
3627 case 0x40 ... 0x47: /* inc Gv */
3628 ot = dflag ? OT_LONG : OT_WORD;
3629 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3630 break;
3631 case 0x48 ... 0x4f: /* dec Gv */
3632 ot = dflag ? OT_LONG : OT_WORD;
3633 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3634 break;
3635 case 0xf6: /* GRP3 */
3636 case 0xf7:
3637 if ((b & 1) == 0)
3638 ot = OT_BYTE;
3639 else
3640 ot = dflag + OT_WORD;
3641
3642 modrm = ldub_code(s->pc++);
3643 mod = (modrm >> 6) & 3;
3644 rm = (modrm & 7) | REX_B(s);
3645 op = (modrm >> 3) & 7;
3646 if (mod != 3) {
3647 if (op == 0)
3648 s->rip_offset = insn_const_size(ot);
3649 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3650 gen_op_ld_T0_A0[ot + s->mem_index]();
3651 } else {
3652 gen_op_mov_TN_reg[ot][0][rm]();
3653 }
3654
3655 switch(op) {
3656 case 0: /* test */
3657 val = insn_get(s, ot);
3658 gen_op_movl_T1_im(val);
3659 gen_op_testl_T0_T1_cc();
3660 s->cc_op = CC_OP_LOGICB + ot;
3661 break;
3662 case 2: /* not */
3663 gen_op_notl_T0();
3664 if (mod != 3) {
3665 gen_op_st_T0_A0[ot + s->mem_index]();
3666 } else {
3667 gen_op_mov_reg_T0[ot][rm]();
3668 }
3669 break;
3670 case 3: /* neg */
3671 gen_op_negl_T0();
3672 if (mod != 3) {
3673 gen_op_st_T0_A0[ot + s->mem_index]();
3674 } else {
3675 gen_op_mov_reg_T0[ot][rm]();
3676 }
3677 gen_op_update_neg_cc();
3678 s->cc_op = CC_OP_SUBB + ot;
3679 break;
3680 case 4: /* mul */
3681 switch(ot) {
3682 case OT_BYTE:
3683 gen_op_mulb_AL_T0();
3684 s->cc_op = CC_OP_MULB;
3685 break;
3686 case OT_WORD:
3687 gen_op_mulw_AX_T0();
3688 s->cc_op = CC_OP_MULW;
3689 break;
3690 default:
3691 case OT_LONG:
3692 gen_op_mull_EAX_T0();
3693 s->cc_op = CC_OP_MULL;
3694 break;
3695#ifdef TARGET_X86_64
3696 case OT_QUAD:
3697 gen_op_mulq_EAX_T0();
3698 s->cc_op = CC_OP_MULQ;
3699 break;
3700#endif
3701 }
3702 break;
3703 case 5: /* imul */
3704 switch(ot) {
3705 case OT_BYTE:
3706 gen_op_imulb_AL_T0();
3707 s->cc_op = CC_OP_MULB;
3708 break;
3709 case OT_WORD:
3710 gen_op_imulw_AX_T0();
3711 s->cc_op = CC_OP_MULW;
3712 break;
3713 default:
3714 case OT_LONG:
3715 gen_op_imull_EAX_T0();
3716 s->cc_op = CC_OP_MULL;
3717 break;
3718#ifdef TARGET_X86_64
3719 case OT_QUAD:
3720 gen_op_imulq_EAX_T0();
3721 s->cc_op = CC_OP_MULQ;
3722 break;
3723#endif
3724 }
3725 break;
3726 case 6: /* div */
3727 switch(ot) {
3728 case OT_BYTE:
3729 gen_jmp_im(pc_start - s->cs_base);
3730 gen_op_divb_AL_T0();
3731 break;
3732 case OT_WORD:
3733 gen_jmp_im(pc_start - s->cs_base);
3734 gen_op_divw_AX_T0();
3735 break;
3736 default:
3737 case OT_LONG:
3738 gen_jmp_im(pc_start - s->cs_base);
3739 gen_op_divl_EAX_T0();
3740 break;
3741#ifdef TARGET_X86_64
3742 case OT_QUAD:
3743 gen_jmp_im(pc_start - s->cs_base);
3744 gen_op_divq_EAX_T0();
3745 break;
3746#endif
3747 }
3748 break;
3749 case 7: /* idiv */
3750 switch(ot) {
3751 case OT_BYTE:
3752 gen_jmp_im(pc_start - s->cs_base);
3753 gen_op_idivb_AL_T0();
3754 break;
3755 case OT_WORD:
3756 gen_jmp_im(pc_start - s->cs_base);
3757 gen_op_idivw_AX_T0();
3758 break;
3759 default:
3760 case OT_LONG:
3761 gen_jmp_im(pc_start - s->cs_base);
3762 gen_op_idivl_EAX_T0();
3763 break;
3764#ifdef TARGET_X86_64
3765 case OT_QUAD:
3766 gen_jmp_im(pc_start - s->cs_base);
3767 gen_op_idivq_EAX_T0();
3768 break;
3769#endif
3770 }
3771 break;
3772 default:
3773 goto illegal_op;
3774 }
3775 break;
3776
3777 case 0xfe: /* GRP4 */
3778 case 0xff: /* GRP5 */
3779 if ((b & 1) == 0)
3780 ot = OT_BYTE;
3781 else
3782 ot = dflag + OT_WORD;
3783
3784 modrm = ldub_code(s->pc++);
3785 mod = (modrm >> 6) & 3;
3786 rm = (modrm & 7) | REX_B(s);
3787 op = (modrm >> 3) & 7;
3788 if (op >= 2 && b == 0xfe) {
3789 goto illegal_op;
3790 }
3791 if (CODE64(s)) {
3792 if (op == 2 || op == 4) {
3793 /* operand size for jumps is 64 bit */
3794 ot = OT_QUAD;
3795 } else if (op == 3 || op == 5) {
3796 /* for call calls, the operand is 16 or 32 bit, even
3797 in long mode */
3798 ot = dflag ? OT_LONG : OT_WORD;
3799 } else if (op == 6) {
3800 /* default push size is 64 bit */
3801 ot = dflag ? OT_QUAD : OT_WORD;
3802 }
3803 }
3804 if (mod != 3) {
3805 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3806 if (op >= 2 && op != 3 && op != 5)
3807 gen_op_ld_T0_A0[ot + s->mem_index]();
3808 } else {
3809 gen_op_mov_TN_reg[ot][0][rm]();
3810 }
3811
3812 switch(op) {
3813 case 0: /* inc Ev */
3814 if (mod != 3)
3815 opreg = OR_TMP0;
3816 else
3817 opreg = rm;
3818 gen_inc(s, ot, opreg, 1);
3819 break;
3820 case 1: /* dec Ev */
3821 if (mod != 3)
3822 opreg = OR_TMP0;
3823 else
3824 opreg = rm;
3825 gen_inc(s, ot, opreg, -1);
3826 break;
3827 case 2: /* call Ev */
3828 /* XXX: optimize if memory (no 'and' is necessary) */
3829#ifdef VBOX_WITH_CALL_RECORD
3830 if (s->record_call)
3831 gen_op_record_call();
3832#endif
3833 if (s->dflag == 0)
3834 gen_op_andl_T0_ffff();
3835 next_eip = s->pc - s->cs_base;
3836 gen_movtl_T1_im(next_eip);
3837 gen_push_T1(s);
3838 gen_op_jmp_T0();
3839 gen_eob(s);
3840 break;
3841 case 3: /* lcall Ev */
3842 gen_op_ld_T1_A0[ot + s->mem_index]();
3843 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3844 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3845 do_lcall:
3846 if (s->pe && !s->vm86) {
3847 if (s->cc_op != CC_OP_DYNAMIC)
3848 gen_op_set_cc_op(s->cc_op);
3849 gen_jmp_im(pc_start - s->cs_base);
3850 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3851 } else {
3852 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3853 }
3854 gen_eob(s);
3855 break;
3856 case 4: /* jmp Ev */
3857 if (s->dflag == 0)
3858 gen_op_andl_T0_ffff();
3859 gen_op_jmp_T0();
3860 gen_eob(s);
3861 break;
3862 case 5: /* ljmp Ev */
3863 gen_op_ld_T1_A0[ot + s->mem_index]();
3864 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3865 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3866 do_ljmp:
3867 if (s->pe && !s->vm86) {
3868 if (s->cc_op != CC_OP_DYNAMIC)
3869 gen_op_set_cc_op(s->cc_op);
3870 gen_jmp_im(pc_start - s->cs_base);
3871 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3872 } else {
3873 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3874 gen_op_movl_T0_T1();
3875 gen_op_jmp_T0();
3876 }
3877 gen_eob(s);
3878 break;
3879 case 6: /* push Ev */
3880 gen_push_T0(s);
3881 break;
3882 default:
3883 goto illegal_op;
3884 }
3885 break;
3886
3887 case 0x84: /* test Ev, Gv */
3888 case 0x85:
3889 if ((b & 1) == 0)
3890 ot = OT_BYTE;
3891 else
3892 ot = dflag + OT_WORD;
3893
3894 modrm = ldub_code(s->pc++);
3895 mod = (modrm >> 6) & 3;
3896 rm = (modrm & 7) | REX_B(s);
3897 reg = ((modrm >> 3) & 7) | rex_r;
3898
3899 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3900 gen_op_mov_TN_reg[ot][1][reg]();
3901 gen_op_testl_T0_T1_cc();
3902 s->cc_op = CC_OP_LOGICB + ot;
3903 break;
3904
3905 case 0xa8: /* test eAX, Iv */
3906 case 0xa9:
3907 if ((b & 1) == 0)
3908 ot = OT_BYTE;
3909 else
3910 ot = dflag + OT_WORD;
3911 val = insn_get(s, ot);
3912
3913 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3914 gen_op_movl_T1_im(val);
3915 gen_op_testl_T0_T1_cc();
3916 s->cc_op = CC_OP_LOGICB + ot;
3917 break;
3918
3919 case 0x98: /* CWDE/CBW */
3920#ifdef TARGET_X86_64
3921 if (dflag == 2) {
3922 gen_op_movslq_RAX_EAX();
3923 } else
3924#endif
3925 if (dflag == 1)
3926 gen_op_movswl_EAX_AX();
3927 else
3928 gen_op_movsbw_AX_AL();
3929 break;
3930 case 0x99: /* CDQ/CWD */
3931#ifdef TARGET_X86_64
3932 if (dflag == 2) {
3933 gen_op_movsqo_RDX_RAX();
3934 } else
3935#endif
3936 if (dflag == 1)
3937 gen_op_movslq_EDX_EAX();
3938 else
3939 gen_op_movswl_DX_AX();
3940 break;
3941 case 0x1af: /* imul Gv, Ev */
3942 case 0x69: /* imul Gv, Ev, I */
3943 case 0x6b:
3944 ot = dflag + OT_WORD;
3945 modrm = ldub_code(s->pc++);
3946 reg = ((modrm >> 3) & 7) | rex_r;
3947 if (b == 0x69)
3948 s->rip_offset = insn_const_size(ot);
3949 else if (b == 0x6b)
3950 s->rip_offset = 1;
3951 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3952 if (b == 0x69) {
3953 val = insn_get(s, ot);
3954 gen_op_movl_T1_im(val);
3955 } else if (b == 0x6b) {
3956 val = (int8_t)insn_get(s, OT_BYTE);
3957 gen_op_movl_T1_im(val);
3958 } else {
3959 gen_op_mov_TN_reg[ot][1][reg]();
3960 }
3961
3962#ifdef TARGET_X86_64
3963 if (ot == OT_QUAD) {
3964 gen_op_imulq_T0_T1();
3965 } else
3966#endif
3967 if (ot == OT_LONG) {
3968 gen_op_imull_T0_T1();
3969 } else {
3970 gen_op_imulw_T0_T1();
3971 }
3972 gen_op_mov_reg_T0[ot][reg]();
3973 s->cc_op = CC_OP_MULB + ot;
3974 break;
3975 case 0x1c0:
3976 case 0x1c1: /* xadd Ev, Gv */
3977 if ((b & 1) == 0)
3978 ot = OT_BYTE;
3979 else
3980 ot = dflag + OT_WORD;
3981 modrm = ldub_code(s->pc++);
3982 reg = ((modrm >> 3) & 7) | rex_r;
3983 mod = (modrm >> 6) & 3;
3984 if (mod == 3) {
3985 rm = (modrm & 7) | REX_B(s);
3986 gen_op_mov_TN_reg[ot][0][reg]();
3987 gen_op_mov_TN_reg[ot][1][rm]();
3988 gen_op_addl_T0_T1();
3989 gen_op_mov_reg_T1[ot][reg]();
3990 gen_op_mov_reg_T0[ot][rm]();
3991 } else {
3992 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3993 gen_op_mov_TN_reg[ot][0][reg]();
3994 gen_op_ld_T1_A0[ot + s->mem_index]();
3995 gen_op_addl_T0_T1();
3996 gen_op_st_T0_A0[ot + s->mem_index]();
3997 gen_op_mov_reg_T1[ot][reg]();
3998 }
3999 gen_op_update2_cc();
4000 s->cc_op = CC_OP_ADDB + ot;
4001 break;
4002 case 0x1b0:
4003 case 0x1b1: /* cmpxchg Ev, Gv */
4004 if ((b & 1) == 0)
4005 ot = OT_BYTE;
4006 else
4007 ot = dflag + OT_WORD;
4008 modrm = ldub_code(s->pc++);
4009 reg = ((modrm >> 3) & 7) | rex_r;
4010 mod = (modrm >> 6) & 3;
4011 gen_op_mov_TN_reg[ot][1][reg]();
4012 if (mod == 3) {
4013 rm = (modrm & 7) | REX_B(s);
4014 gen_op_mov_TN_reg[ot][0][rm]();
4015 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
4016 gen_op_mov_reg_T0[ot][rm]();
4017 } else {
4018 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4019 gen_op_ld_T0_A0[ot + s->mem_index]();
4020 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
4021 }
4022 s->cc_op = CC_OP_SUBB + ot;
4023 break;
4024 case 0x1c7: /* cmpxchg8b */
4025 modrm = ldub_code(s->pc++);
4026 mod = (modrm >> 6) & 3;
4027 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4028 goto illegal_op;
4029 if (s->cc_op != CC_OP_DYNAMIC)
4030 gen_op_set_cc_op(s->cc_op);
4031 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4032 gen_op_cmpxchg8b();
4033 s->cc_op = CC_OP_EFLAGS;
4034 break;
4035
4036 /**************************/
4037 /* push/pop */
4038 case 0x50 ... 0x57: /* push */
4039 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
4040 gen_push_T0(s);
4041 break;
4042 case 0x58 ... 0x5f: /* pop */
4043 if (CODE64(s)) {
4044 ot = dflag ? OT_QUAD : OT_WORD;
4045 } else {
4046 ot = dflag + OT_WORD;
4047 }
4048 gen_pop_T0(s);
4049 /* NOTE: order is important for pop %sp */
4050 gen_pop_update(s);
4051 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
4052 break;
4053 case 0x60: /* pusha */
4054 if (CODE64(s))
4055 goto illegal_op;
4056 gen_pusha(s);
4057 break;
4058 case 0x61: /* popa */
4059 if (CODE64(s))
4060 goto illegal_op;
4061 gen_popa(s);
4062 break;
4063 case 0x68: /* push Iv */
4064 case 0x6a:
4065 if (CODE64(s)) {
4066 ot = dflag ? OT_QUAD : OT_WORD;
4067 } else {
4068 ot = dflag + OT_WORD;
4069 }
4070 if (b == 0x68)
4071 val = insn_get(s, ot);
4072 else
4073 val = (int8_t)insn_get(s, OT_BYTE);
4074 gen_op_movl_T0_im(val);
4075 gen_push_T0(s);
4076 break;
4077 case 0x8f: /* pop Ev */
4078 if (CODE64(s)) {
4079 ot = dflag ? OT_QUAD : OT_WORD;
4080 } else {
4081 ot = dflag + OT_WORD;
4082 }
4083 modrm = ldub_code(s->pc++);
4084 mod = (modrm >> 6) & 3;
4085 gen_pop_T0(s);
4086 if (mod == 3) {
4087 /* NOTE: order is important for pop %sp */
4088 gen_pop_update(s);
4089 rm = (modrm & 7) | REX_B(s);
4090 gen_op_mov_reg_T0[ot][rm]();
4091 } else {
4092 /* NOTE: order is important too for MMU exceptions */
4093 s->popl_esp_hack = 1 << ot;
4094 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4095 s->popl_esp_hack = 0;
4096 gen_pop_update(s);
4097 }
4098 break;
4099 case 0xc8: /* enter */
4100 {
4101 int level;
4102 val = lduw_code(s->pc);
4103 s->pc += 2;
4104 level = ldub_code(s->pc++);
4105 gen_enter(s, val, level);
4106 }
4107 break;
4108 case 0xc9: /* leave */
4109 /* XXX: exception not precise (ESP is updated before potential exception) */
4110 if (CODE64(s)) {
4111 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
4112 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
4113 } else if (s->ss32) {
4114 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
4115 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
4116 } else {
4117 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
4118 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
4119 }
4120 gen_pop_T0(s);
4121 if (CODE64(s)) {
4122 ot = dflag ? OT_QUAD : OT_WORD;
4123 } else {
4124 ot = dflag + OT_WORD;
4125 }
4126 gen_op_mov_reg_T0[ot][R_EBP]();
4127 gen_pop_update(s);
4128 break;
4129 case 0x06: /* push es */
4130 case 0x0e: /* push cs */
4131 case 0x16: /* push ss */
4132 case 0x1e: /* push ds */
4133 if (CODE64(s))
4134 goto illegal_op;
4135 gen_op_movl_T0_seg(b >> 3);
4136 gen_push_T0(s);
4137 break;
4138 case 0x1a0: /* push fs */
4139 case 0x1a8: /* push gs */
4140 gen_op_movl_T0_seg((b >> 3) & 7);
4141 gen_push_T0(s);
4142 break;
4143 case 0x07: /* pop es */
4144 case 0x17: /* pop ss */
4145 case 0x1f: /* pop ds */
4146 if (CODE64(s))
4147 goto illegal_op;
4148 reg = b >> 3;
4149 gen_pop_T0(s);
4150 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4151 gen_pop_update(s);
4152 if (reg == R_SS) {
4153 /* if reg == SS, inhibit interrupts/trace. */
4154 /* If several instructions disable interrupts, only the
4155 _first_ does it */
4156 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4157 gen_op_set_inhibit_irq();
4158 s->tf = 0;
4159 }
4160 if (s->is_jmp) {
4161 gen_jmp_im(s->pc - s->cs_base);
4162 gen_eob(s);
4163 }
4164 break;
4165 case 0x1a1: /* pop fs */
4166 case 0x1a9: /* pop gs */
4167 gen_pop_T0(s);
4168 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4169 gen_pop_update(s);
4170 if (s->is_jmp) {
4171 gen_jmp_im(s->pc - s->cs_base);
4172 gen_eob(s);
4173 }
4174 break;
4175
4176 /**************************/
4177 /* mov */
4178 case 0x88:
4179 case 0x89: /* mov Gv, Ev */
4180 if ((b & 1) == 0)
4181 ot = OT_BYTE;
4182 else
4183 ot = dflag + OT_WORD;
4184 modrm = ldub_code(s->pc++);
4185 reg = ((modrm >> 3) & 7) | rex_r;
4186
4187 /* generate a generic store */
4188 gen_ldst_modrm(s, modrm, ot, reg, 1);
4189 break;
4190 case 0xc6:
4191 case 0xc7: /* mov Ev, Iv */
4192 if ((b & 1) == 0)
4193 ot = OT_BYTE;
4194 else
4195 ot = dflag + OT_WORD;
4196 modrm = ldub_code(s->pc++);
4197 mod = (modrm >> 6) & 3;
4198 if (mod != 3) {
4199 s->rip_offset = insn_const_size(ot);
4200 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4201 }
4202 val = insn_get(s, ot);
4203 gen_op_movl_T0_im(val);
4204 if (mod != 3)
4205 gen_op_st_T0_A0[ot + s->mem_index]();
4206 else
4207 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
4208 break;
4209 case 0x8a:
4210 case 0x8b: /* mov Ev, Gv */
4211#ifdef VBOX /* dtrace hot fix */
4212 if (prefixes & PREFIX_LOCK)
4213 goto illegal_op;
4214#endif
4215 if ((b & 1) == 0)
4216 ot = OT_BYTE;
4217 else
4218 ot = OT_WORD + dflag;
4219 modrm = ldub_code(s->pc++);
4220 reg = ((modrm >> 3) & 7) | rex_r;
4221
4222 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4223 gen_op_mov_reg_T0[ot][reg]();
4224 break;
4225 case 0x8e: /* mov seg, Gv */
4226 modrm = ldub_code(s->pc++);
4227 reg = (modrm >> 3) & 7;
4228 if (reg >= 6 || reg == R_CS)
4229 goto illegal_op;
4230 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4231 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4232 if (reg == R_SS) {
4233 /* if reg == SS, inhibit interrupts/trace */
4234 /* If several instructions disable interrupts, only the
4235 _first_ does it */
4236 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4237 gen_op_set_inhibit_irq();
4238 s->tf = 0;
4239 }
4240 if (s->is_jmp) {
4241 gen_jmp_im(s->pc - s->cs_base);
4242 gen_eob(s);
4243 }
4244 break;
4245 case 0x8c: /* mov Gv, seg */
4246 modrm = ldub_code(s->pc++);
4247 reg = (modrm >> 3) & 7;
4248 mod = (modrm >> 6) & 3;
4249 if (reg >= 6)
4250 goto illegal_op;
4251 gen_op_movl_T0_seg(reg);
4252 if (mod == 3)
4253 ot = OT_WORD + dflag;
4254 else
4255 ot = OT_WORD;
4256 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4257 break;
4258
4259 case 0x1b6: /* movzbS Gv, Eb */
4260 case 0x1b7: /* movzwS Gv, Eb */
4261 case 0x1be: /* movsbS Gv, Eb */
4262 case 0x1bf: /* movswS Gv, Eb */
4263 {
4264 int d_ot;
4265 /* d_ot is the size of destination */
4266 d_ot = dflag + OT_WORD;
4267 /* ot is the size of source */
4268 ot = (b & 1) + OT_BYTE;
4269 modrm = ldub_code(s->pc++);
4270 reg = ((modrm >> 3) & 7) | rex_r;
4271 mod = (modrm >> 6) & 3;
4272 rm = (modrm & 7) | REX_B(s);
4273
4274 if (mod == 3) {
4275 gen_op_mov_TN_reg[ot][0][rm]();
4276 switch(ot | (b & 8)) {
4277 case OT_BYTE:
4278 gen_op_movzbl_T0_T0();
4279 break;
4280 case OT_BYTE | 8:
4281 gen_op_movsbl_T0_T0();
4282 break;
4283 case OT_WORD:
4284 gen_op_movzwl_T0_T0();
4285 break;
4286 default:
4287 case OT_WORD | 8:
4288 gen_op_movswl_T0_T0();
4289 break;
4290 }
4291 gen_op_mov_reg_T0[d_ot][reg]();
4292 } else {
4293 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4294 if (b & 8) {
4295 gen_op_lds_T0_A0[ot + s->mem_index]();
4296 } else {
4297 gen_op_ldu_T0_A0[ot + s->mem_index]();
4298 }
4299 gen_op_mov_reg_T0[d_ot][reg]();
4300 }
4301 }
4302 break;
4303
4304 case 0x8d: /* lea */
4305 ot = dflag + OT_WORD;
4306 modrm = ldub_code(s->pc++);
4307 mod = (modrm >> 6) & 3;
4308 if (mod == 3)
4309 goto illegal_op;
4310 reg = ((modrm >> 3) & 7) | rex_r;
4311 /* we must ensure that no segment is added */
4312 s->override = -1;
4313 val = s->addseg;
4314 s->addseg = 0;
4315 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4316 s->addseg = val;
4317 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4318 break;
4319
4320 case 0xa0: /* mov EAX, Ov */
4321 case 0xa1:
4322 case 0xa2: /* mov Ov, EAX */
4323 case 0xa3:
4324 {
4325 target_ulong offset_addr;
4326
4327 if ((b & 1) == 0)
4328 ot = OT_BYTE;
4329 else
4330 ot = dflag + OT_WORD;
4331#ifdef TARGET_X86_64
4332 if (s->aflag == 2) {
4333 offset_addr = ldq_code(s->pc);
4334 s->pc += 8;
4335 if (offset_addr == (int32_t)offset_addr)
4336 gen_op_movq_A0_im(offset_addr);
4337 else
4338 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4339 } else
4340#endif
4341 {
4342 if (s->aflag) {
4343 offset_addr = insn_get(s, OT_LONG);
4344 } else {
4345 offset_addr = insn_get(s, OT_WORD);
4346 }
4347 gen_op_movl_A0_im(offset_addr);
4348 }
4349 gen_add_A0_ds_seg(s);
4350 if ((b & 2) == 0) {
4351 gen_op_ld_T0_A0[ot + s->mem_index]();
4352 gen_op_mov_reg_T0[ot][R_EAX]();
4353 } else {
4354 gen_op_mov_TN_reg[ot][0][R_EAX]();
4355 gen_op_st_T0_A0[ot + s->mem_index]();
4356 }
4357 }
4358 break;
4359 case 0xd7: /* xlat */
4360#ifdef TARGET_X86_64
4361 if (s->aflag == 2) {
4362 gen_op_movq_A0_reg[R_EBX]();
4363 gen_op_addq_A0_AL();
4364 } else
4365#endif
4366 {
4367 gen_op_movl_A0_reg[R_EBX]();
4368 gen_op_addl_A0_AL();
4369 if (s->aflag == 0)
4370 gen_op_andl_A0_ffff();
4371 }
4372 gen_add_A0_ds_seg(s);
4373 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4374 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4375 break;
4376 case 0xb0 ... 0xb7: /* mov R, Ib */
4377 val = insn_get(s, OT_BYTE);
4378 gen_op_movl_T0_im(val);
4379 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4380 break;
4381 case 0xb8 ... 0xbf: /* mov R, Iv */
4382#ifdef TARGET_X86_64
4383 if (dflag == 2) {
4384 uint64_t tmp;
4385 /* 64 bit case */
4386 tmp = ldq_code(s->pc);
4387 s->pc += 8;
4388 reg = (b & 7) | REX_B(s);
4389 gen_movtl_T0_im(tmp);
4390 gen_op_mov_reg_T0[OT_QUAD][reg]();
4391 } else
4392#endif
4393 {
4394 ot = dflag ? OT_LONG : OT_WORD;
4395 val = insn_get(s, ot);
4396 reg = (b & 7) | REX_B(s);
4397 gen_op_movl_T0_im(val);
4398 gen_op_mov_reg_T0[ot][reg]();
4399 }
4400 break;
4401
4402 case 0x91 ... 0x97: /* xchg R, EAX */
4403 ot = dflag + OT_WORD;
4404 reg = (b & 7) | REX_B(s);
4405 rm = R_EAX;
4406 goto do_xchg_reg;
4407 case 0x86:
4408 case 0x87: /* xchg Ev, Gv */
4409 if ((b & 1) == 0)
4410 ot = OT_BYTE;
4411 else
4412 ot = dflag + OT_WORD;
4413 modrm = ldub_code(s->pc++);
4414 reg = ((modrm >> 3) & 7) | rex_r;
4415 mod = (modrm >> 6) & 3;
4416 if (mod == 3) {
4417 rm = (modrm & 7) | REX_B(s);
4418 do_xchg_reg:
4419 gen_op_mov_TN_reg[ot][0][reg]();
4420 gen_op_mov_TN_reg[ot][1][rm]();
4421 gen_op_mov_reg_T0[ot][rm]();
4422 gen_op_mov_reg_T1[ot][reg]();
4423 } else {
4424 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4425 gen_op_mov_TN_reg[ot][0][reg]();
4426 /* for xchg, lock is implicit */
4427 if (!(prefixes & PREFIX_LOCK))
4428 gen_op_lock();
4429 gen_op_ld_T1_A0[ot + s->mem_index]();
4430 gen_op_st_T0_A0[ot + s->mem_index]();
4431 if (!(prefixes & PREFIX_LOCK))
4432 gen_op_unlock();
4433 gen_op_mov_reg_T1[ot][reg]();
4434 }
4435 break;
4436 case 0xc4: /* les Gv */
4437 if (CODE64(s))
4438 goto illegal_op;
4439 op = R_ES;
4440 goto do_lxx;
4441 case 0xc5: /* lds Gv */
4442 if (CODE64(s))
4443 goto illegal_op;
4444 op = R_DS;
4445 goto do_lxx;
4446 case 0x1b2: /* lss Gv */
4447 op = R_SS;
4448 goto do_lxx;
4449 case 0x1b4: /* lfs Gv */
4450 op = R_FS;
4451 goto do_lxx;
4452 case 0x1b5: /* lgs Gv */
4453 op = R_GS;
4454 do_lxx:
4455 ot = dflag ? OT_LONG : OT_WORD;
4456 modrm = ldub_code(s->pc++);
4457 reg = ((modrm >> 3) & 7) | rex_r;
4458 mod = (modrm >> 6) & 3;
4459 if (mod == 3)
4460 goto illegal_op;
4461 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4462 gen_op_ld_T1_A0[ot + s->mem_index]();
4463 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4464 /* load the segment first to handle exceptions properly */
4465 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4466 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4467 /* then put the data */
4468 gen_op_mov_reg_T1[ot][reg]();
4469 if (s->is_jmp) {
4470 gen_jmp_im(s->pc - s->cs_base);
4471 gen_eob(s);
4472 }
4473 break;
4474
4475 /************************/
4476 /* shifts */
4477 case 0xc0:
4478 case 0xc1:
4479 /* shift Ev,Ib */
4480 shift = 2;
4481 grp2:
4482 {
4483 if ((b & 1) == 0)
4484 ot = OT_BYTE;
4485 else
4486 ot = dflag + OT_WORD;
4487
4488 modrm = ldub_code(s->pc++);
4489 mod = (modrm >> 6) & 3;
4490 op = (modrm >> 3) & 7;
4491
4492 if (mod != 3) {
4493 if (shift == 2) {
4494 s->rip_offset = 1;
4495 }
4496 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4497 opreg = OR_TMP0;
4498 } else {
4499 opreg = (modrm & 7) | REX_B(s);
4500 }
4501
4502 /* simpler op */
4503 if (shift == 0) {
4504 gen_shift(s, op, ot, opreg, OR_ECX);
4505 } else {
4506 if (shift == 2) {
4507 shift = ldub_code(s->pc++);
4508 }
4509 gen_shifti(s, op, ot, opreg, shift);
4510 }
4511 }
4512 break;
4513 case 0xd0:
4514 case 0xd1:
4515 /* shift Ev,1 */
4516 shift = 1;
4517 goto grp2;
4518 case 0xd2:
4519 case 0xd3:
4520 /* shift Ev,cl */
4521 shift = 0;
4522 goto grp2;
4523
4524 case 0x1a4: /* shld imm */
4525 op = 0;
4526 shift = 1;
4527 goto do_shiftd;
4528 case 0x1a5: /* shld cl */
4529 op = 0;
4530 shift = 0;
4531 goto do_shiftd;
4532 case 0x1ac: /* shrd imm */
4533 op = 1;
4534 shift = 1;
4535 goto do_shiftd;
4536 case 0x1ad: /* shrd cl */
4537 op = 1;
4538 shift = 0;
4539 do_shiftd:
4540 ot = dflag + OT_WORD;
4541 modrm = ldub_code(s->pc++);
4542 mod = (modrm >> 6) & 3;
4543 rm = (modrm & 7) | REX_B(s);
4544 reg = ((modrm >> 3) & 7) | rex_r;
4545
4546 if (mod != 3) {
4547 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4548 gen_op_ld_T0_A0[ot + s->mem_index]();
4549 } else {
4550 gen_op_mov_TN_reg[ot][0][rm]();
4551 }
4552 gen_op_mov_TN_reg[ot][1][reg]();
4553
4554 if (shift) {
4555 val = ldub_code(s->pc++);
4556 if (ot == OT_QUAD)
4557 val &= 0x3f;
4558 else
4559 val &= 0x1f;
4560 if (val) {
4561 if (mod == 3)
4562 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4563 else
4564 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4565 if (op == 0 && ot != OT_WORD)
4566 s->cc_op = CC_OP_SHLB + ot;
4567 else
4568 s->cc_op = CC_OP_SARB + ot;
4569 }
4570 } else {
4571 if (s->cc_op != CC_OP_DYNAMIC)
4572 gen_op_set_cc_op(s->cc_op);
4573 if (mod == 3)
4574 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4575 else
4576 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4577 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4578 }
4579 if (mod == 3) {
4580 gen_op_mov_reg_T0[ot][rm]();
4581 }
4582 break;
4583
4584 /************************/
4585 /* floats */
4586 case 0xd8 ... 0xdf:
4587 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4588 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4589 /* XXX: what to do if illegal op ? */
4590 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4591 break;
4592 }
4593 modrm = ldub_code(s->pc++);
4594 mod = (modrm >> 6) & 3;
4595 rm = modrm & 7;
4596 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4597 if (mod != 3) {
4598 /* memory op */
4599 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4600 switch(op) {
4601 case 0x00 ... 0x07: /* fxxxs */
4602 case 0x10 ... 0x17: /* fixxxl */
4603 case 0x20 ... 0x27: /* fxxxl */
4604 case 0x30 ... 0x37: /* fixxx */
4605 {
4606 int op1;
4607 op1 = op & 7;
4608
4609 switch(op >> 4) {
4610 case 0:
4611 gen_op_flds_FT0_A0();
4612 break;
4613 case 1:
4614 gen_op_fildl_FT0_A0();
4615 break;
4616 case 2:
4617 gen_op_fldl_FT0_A0();
4618 break;
4619 case 3:
4620 default:
4621 gen_op_fild_FT0_A0();
4622 break;
4623 }
4624
4625 gen_op_fp_arith_ST0_FT0[op1]();
4626 if (op1 == 3) {
4627 /* fcomp needs pop */
4628 gen_op_fpop();
4629 }
4630 }
4631 break;
4632 case 0x08: /* flds */
4633 case 0x0a: /* fsts */
4634 case 0x0b: /* fstps */
4635 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4636 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4637 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4638 switch(op & 7) {
4639 case 0:
4640 switch(op >> 4) {
4641 case 0:
4642 gen_op_flds_ST0_A0();
4643 break;
4644 case 1:
4645 gen_op_fildl_ST0_A0();
4646 break;
4647 case 2:
4648 gen_op_fldl_ST0_A0();
4649 break;
4650 case 3:
4651 default:
4652 gen_op_fild_ST0_A0();
4653 break;
4654 }
4655 break;
4656 case 1:
4657 switch(op >> 4) {
4658 case 1:
4659 gen_op_fisttl_ST0_A0();
4660 break;
4661 case 2:
4662 gen_op_fisttll_ST0_A0();
4663 break;
4664 case 3:
4665 default:
4666 gen_op_fistt_ST0_A0();
4667 }
4668 gen_op_fpop();
4669 break;
4670 default:
4671 switch(op >> 4) {
4672 case 0:
4673 gen_op_fsts_ST0_A0();
4674 break;
4675 case 1:
4676 gen_op_fistl_ST0_A0();
4677 break;
4678 case 2:
4679 gen_op_fstl_ST0_A0();
4680 break;
4681 case 3:
4682 default:
4683 gen_op_fist_ST0_A0();
4684 break;
4685 }
4686 if ((op & 7) == 3)
4687 gen_op_fpop();
4688 break;
4689 }
4690 break;
4691 case 0x0c: /* fldenv mem */
4692 gen_op_fldenv_A0(s->dflag);
4693 break;
4694 case 0x0d: /* fldcw mem */
4695 gen_op_fldcw_A0();
4696 break;
4697 case 0x0e: /* fnstenv mem */
4698 gen_op_fnstenv_A0(s->dflag);
4699 break;
4700 case 0x0f: /* fnstcw mem */
4701 gen_op_fnstcw_A0();
4702 break;
4703 case 0x1d: /* fldt mem */
4704 gen_op_fldt_ST0_A0();
4705 break;
4706 case 0x1f: /* fstpt mem */
4707 gen_op_fstt_ST0_A0();
4708 gen_op_fpop();
4709 break;
4710 case 0x2c: /* frstor mem */
4711 gen_op_frstor_A0(s->dflag);
4712 break;
4713 case 0x2e: /* fnsave mem */
4714 gen_op_fnsave_A0(s->dflag);
4715 break;
4716 case 0x2f: /* fnstsw mem */
4717 gen_op_fnstsw_A0();
4718 break;
4719 case 0x3c: /* fbld */
4720 gen_op_fbld_ST0_A0();
4721 break;
4722 case 0x3e: /* fbstp */
4723 gen_op_fbst_ST0_A0();
4724 gen_op_fpop();
4725 break;
4726 case 0x3d: /* fildll */
4727 gen_op_fildll_ST0_A0();
4728 break;
4729 case 0x3f: /* fistpll */
4730 gen_op_fistll_ST0_A0();
4731 gen_op_fpop();
4732 break;
4733 default:
4734 goto illegal_op;
4735 }
4736 } else {
4737 /* register float ops */
4738 opreg = rm;
4739
4740 switch(op) {
4741 case 0x08: /* fld sti */
4742 gen_op_fpush();
4743 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4744 break;
4745 case 0x09: /* fxchg sti */
4746 case 0x29: /* fxchg4 sti, undocumented op */
4747 case 0x39: /* fxchg7 sti, undocumented op */
4748 gen_op_fxchg_ST0_STN(opreg);
4749 break;
4750 case 0x0a: /* grp d9/2 */
4751 switch(rm) {
4752 case 0: /* fnop */
4753 /* check exceptions (FreeBSD FPU probe) */
4754 if (s->cc_op != CC_OP_DYNAMIC)
4755 gen_op_set_cc_op(s->cc_op);
4756 gen_jmp_im(pc_start - s->cs_base);
4757 gen_op_fwait();
4758 break;
4759 default:
4760 goto illegal_op;
4761 }
4762 break;
4763 case 0x0c: /* grp d9/4 */
4764 switch(rm) {
4765 case 0: /* fchs */
4766 gen_op_fchs_ST0();
4767 break;
4768 case 1: /* fabs */
4769 gen_op_fabs_ST0();
4770 break;
4771 case 4: /* ftst */
4772 gen_op_fldz_FT0();
4773 gen_op_fcom_ST0_FT0();
4774 break;
4775 case 5: /* fxam */
4776 gen_op_fxam_ST0();
4777 break;
4778 default:
4779 goto illegal_op;
4780 }
4781 break;
4782 case 0x0d: /* grp d9/5 */
4783 {
4784 switch(rm) {
4785 case 0:
4786 gen_op_fpush();
4787 gen_op_fld1_ST0();
4788 break;
4789 case 1:
4790 gen_op_fpush();
4791 gen_op_fldl2t_ST0();
4792 break;
4793 case 2:
4794 gen_op_fpush();
4795 gen_op_fldl2e_ST0();
4796 break;
4797 case 3:
4798 gen_op_fpush();
4799 gen_op_fldpi_ST0();
4800 break;
4801 case 4:
4802 gen_op_fpush();
4803 gen_op_fldlg2_ST0();
4804 break;
4805 case 5:
4806 gen_op_fpush();
4807 gen_op_fldln2_ST0();
4808 break;
4809 case 6:
4810 gen_op_fpush();
4811 gen_op_fldz_ST0();
4812 break;
4813 default:
4814 goto illegal_op;
4815 }
4816 }
4817 break;
4818 case 0x0e: /* grp d9/6 */
4819 switch(rm) {
4820 case 0: /* f2xm1 */
4821 gen_op_f2xm1();
4822 break;
4823 case 1: /* fyl2x */
4824 gen_op_fyl2x();
4825 break;
4826 case 2: /* fptan */
4827 gen_op_fptan();
4828 break;
4829 case 3: /* fpatan */
4830 gen_op_fpatan();
4831 break;
4832 case 4: /* fxtract */
4833 gen_op_fxtract();
4834 break;
4835 case 5: /* fprem1 */
4836 gen_op_fprem1();
4837 break;
4838 case 6: /* fdecstp */
4839 gen_op_fdecstp();
4840 break;
4841 default:
4842 case 7: /* fincstp */
4843 gen_op_fincstp();
4844 break;
4845 }
4846 break;
4847 case 0x0f: /* grp d9/7 */
4848 switch(rm) {
4849 case 0: /* fprem */
4850 gen_op_fprem();
4851 break;
4852 case 1: /* fyl2xp1 */
4853 gen_op_fyl2xp1();
4854 break;
4855 case 2: /* fsqrt */
4856 gen_op_fsqrt();
4857 break;
4858 case 3: /* fsincos */
4859 gen_op_fsincos();
4860 break;
4861 case 5: /* fscale */
4862 gen_op_fscale();
4863 break;
4864 case 4: /* frndint */
4865 gen_op_frndint();
4866 break;
4867 case 6: /* fsin */
4868 gen_op_fsin();
4869 break;
4870 default:
4871 case 7: /* fcos */
4872 gen_op_fcos();
4873 break;
4874 }
4875 break;
4876 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4877 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4878 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4879 {
4880 int op1;
4881
4882 op1 = op & 7;
4883 if (op >= 0x20) {
4884 gen_op_fp_arith_STN_ST0[op1](opreg);
4885 if (op >= 0x30)
4886 gen_op_fpop();
4887 } else {
4888 gen_op_fmov_FT0_STN(opreg);
4889 gen_op_fp_arith_ST0_FT0[op1]();
4890 }
4891 }
4892 break;
4893 case 0x02: /* fcom */
4894 case 0x22: /* fcom2, undocumented op */
4895 gen_op_fmov_FT0_STN(opreg);
4896 gen_op_fcom_ST0_FT0();
4897 break;
4898 case 0x03: /* fcomp */
4899 case 0x23: /* fcomp3, undocumented op */
4900 case 0x32: /* fcomp5, undocumented op */
4901 gen_op_fmov_FT0_STN(opreg);
4902 gen_op_fcom_ST0_FT0();
4903 gen_op_fpop();
4904 break;
4905 case 0x15: /* da/5 */
4906 switch(rm) {
4907 case 1: /* fucompp */
4908 gen_op_fmov_FT0_STN(1);
4909 gen_op_fucom_ST0_FT0();
4910 gen_op_fpop();
4911 gen_op_fpop();
4912 break;
4913 default:
4914 goto illegal_op;
4915 }
4916 break;
4917 case 0x1c:
4918 switch(rm) {
4919 case 0: /* feni (287 only, just do nop here) */
4920 break;
4921 case 1: /* fdisi (287 only, just do nop here) */
4922 break;
4923 case 2: /* fclex */
4924 gen_op_fclex();
4925 break;
4926 case 3: /* fninit */
4927 gen_op_fninit();
4928 break;
4929 case 4: /* fsetpm (287 only, just do nop here) */
4930 break;
4931 default:
4932 goto illegal_op;
4933 }
4934 break;
4935 case 0x1d: /* fucomi */
4936 if (s->cc_op != CC_OP_DYNAMIC)
4937 gen_op_set_cc_op(s->cc_op);
4938 gen_op_fmov_FT0_STN(opreg);
4939 gen_op_fucomi_ST0_FT0();
4940 s->cc_op = CC_OP_EFLAGS;
4941 break;
4942 case 0x1e: /* fcomi */
4943 if (s->cc_op != CC_OP_DYNAMIC)
4944 gen_op_set_cc_op(s->cc_op);
4945 gen_op_fmov_FT0_STN(opreg);
4946 gen_op_fcomi_ST0_FT0();
4947 s->cc_op = CC_OP_EFLAGS;
4948 break;
4949 case 0x28: /* ffree sti */
4950 gen_op_ffree_STN(opreg);
4951 break;
4952 case 0x2a: /* fst sti */
4953 gen_op_fmov_STN_ST0(opreg);
4954 break;
4955 case 0x2b: /* fstp sti */
4956 case 0x0b: /* fstp1 sti, undocumented op */
4957 case 0x3a: /* fstp8 sti, undocumented op */
4958 case 0x3b: /* fstp9 sti, undocumented op */
4959 gen_op_fmov_STN_ST0(opreg);
4960 gen_op_fpop();
4961 break;
4962 case 0x2c: /* fucom st(i) */
4963 gen_op_fmov_FT0_STN(opreg);
4964 gen_op_fucom_ST0_FT0();
4965 break;
4966 case 0x2d: /* fucomp st(i) */
4967 gen_op_fmov_FT0_STN(opreg);
4968 gen_op_fucom_ST0_FT0();
4969 gen_op_fpop();
4970 break;
4971 case 0x33: /* de/3 */
4972 switch(rm) {
4973 case 1: /* fcompp */
4974 gen_op_fmov_FT0_STN(1);
4975 gen_op_fcom_ST0_FT0();
4976 gen_op_fpop();
4977 gen_op_fpop();
4978 break;
4979 default:
4980 goto illegal_op;
4981 }
4982 break;
4983 case 0x38: /* ffreep sti, undocumented op */
4984 gen_op_ffree_STN(opreg);
4985 gen_op_fpop();
4986 break;
4987 case 0x3c: /* df/4 */
4988 switch(rm) {
4989 case 0:
4990 gen_op_fnstsw_EAX();
4991 break;
4992 default:
4993 goto illegal_op;
4994 }
4995 break;
4996 case 0x3d: /* fucomip */
4997 if (s->cc_op != CC_OP_DYNAMIC)
4998 gen_op_set_cc_op(s->cc_op);
4999 gen_op_fmov_FT0_STN(opreg);
5000 gen_op_fucomi_ST0_FT0();
5001 gen_op_fpop();
5002 s->cc_op = CC_OP_EFLAGS;
5003 break;
5004 case 0x3e: /* fcomip */
5005 if (s->cc_op != CC_OP_DYNAMIC)
5006 gen_op_set_cc_op(s->cc_op);
5007 gen_op_fmov_FT0_STN(opreg);
5008 gen_op_fcomi_ST0_FT0();
5009 gen_op_fpop();
5010 s->cc_op = CC_OP_EFLAGS;
5011 break;
5012 case 0x10 ... 0x13: /* fcmovxx */
5013 case 0x18 ... 0x1b:
5014 {
5015 int op1;
5016 const static uint8_t fcmov_cc[8] = {
5017 (JCC_B << 1),
5018 (JCC_Z << 1),
5019 (JCC_BE << 1),
5020 (JCC_P << 1),
5021 };
5022 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5023 gen_setcc(s, op1);
5024 gen_op_fcmov_ST0_STN_T0(opreg);
5025 }
5026 break;
5027 default:
5028 goto illegal_op;
5029 }
5030 }
5031#ifdef USE_CODE_COPY
5032 s->tb->cflags |= CF_TB_FP_USED;
5033#endif
5034 break;
5035 /************************/
5036 /* string ops */
5037
5038 case 0xa4: /* movsS */
5039 case 0xa5:
5040 if ((b & 1) == 0)
5041 ot = OT_BYTE;
5042 else
5043 ot = dflag + OT_WORD;
5044
5045 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5046 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5047 } else {
5048 gen_movs(s, ot);
5049 }
5050 break;
5051
5052 case 0xaa: /* stosS */
5053 case 0xab:
5054 if ((b & 1) == 0)
5055 ot = OT_BYTE;
5056 else
5057 ot = dflag + OT_WORD;
5058
5059 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5060 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5061 } else {
5062 gen_stos(s, ot);
5063 }
5064 break;
5065 case 0xac: /* lodsS */
5066 case 0xad:
5067 if ((b & 1) == 0)
5068 ot = OT_BYTE;
5069 else
5070 ot = dflag + OT_WORD;
5071 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5072 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5073 } else {
5074 gen_lods(s, ot);
5075 }
5076 break;
5077 case 0xae: /* scasS */
5078 case 0xaf:
5079 if ((b & 1) == 0)
5080 ot = OT_BYTE;
5081 else
5082 ot = dflag + OT_WORD;
5083 if (prefixes & PREFIX_REPNZ) {
5084 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5085 } else if (prefixes & PREFIX_REPZ) {
5086 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5087 } else {
5088 gen_scas(s, ot);
5089 s->cc_op = CC_OP_SUBB + ot;
5090 }
5091 break;
5092
5093 case 0xa6: /* cmpsS */
5094 case 0xa7:
5095 if ((b & 1) == 0)
5096 ot = OT_BYTE;
5097 else
5098 ot = dflag + OT_WORD;
5099 if (prefixes & PREFIX_REPNZ) {
5100 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5101 } else if (prefixes & PREFIX_REPZ) {
5102 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5103 } else {
5104 gen_cmps(s, ot);
5105 s->cc_op = CC_OP_SUBB + ot;
5106 }
5107 break;
5108 case 0x6c: /* insS */
5109 case 0x6d:
5110 if ((b & 1) == 0)
5111 ot = OT_BYTE;
5112 else
5113 ot = dflag ? OT_LONG : OT_WORD;
5114 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5115 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5116 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5117 } else {
5118 gen_ins(s, ot);
5119 }
5120 break;
5121 case 0x6e: /* outsS */
5122 case 0x6f:
5123 if ((b & 1) == 0)
5124 ot = OT_BYTE;
5125 else
5126 ot = dflag ? OT_LONG : OT_WORD;
5127 gen_check_io(s, ot, 1, pc_start - s->cs_base);
5128 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5129 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5130 } else {
5131 gen_outs(s, ot);
5132 }
5133 break;
5134
5135 /************************/
5136 /* port I/O */
5137 case 0xe4:
5138 case 0xe5:
5139 if ((b & 1) == 0)
5140 ot = OT_BYTE;
5141 else
5142 ot = dflag ? OT_LONG : OT_WORD;
5143 val = ldub_code(s->pc++);
5144 gen_op_movl_T0_im(val);
5145 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5146 gen_op_in[ot]();
5147 gen_op_mov_reg_T1[ot][R_EAX]();
5148 break;
5149 case 0xe6:
5150 case 0xe7:
5151 if ((b & 1) == 0)
5152 ot = OT_BYTE;
5153 else
5154 ot = dflag ? OT_LONG : OT_WORD;
5155 val = ldub_code(s->pc++);
5156 gen_op_movl_T0_im(val);
5157 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5158#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
5159 if (val == 0x80)
5160 break;
5161#endif /* VBOX */
5162 gen_op_mov_TN_reg[ot][1][R_EAX]();
5163 gen_op_out[ot]();
5164 break;
5165 case 0xec:
5166 case 0xed:
5167 if ((b & 1) == 0)
5168 ot = OT_BYTE;
5169 else
5170 ot = dflag ? OT_LONG : OT_WORD;
5171 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5172 gen_op_andl_T0_ffff();
5173 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5174 gen_op_in[ot]();
5175 gen_op_mov_reg_T1[ot][R_EAX]();
5176 break;
5177 case 0xee:
5178 case 0xef:
5179 if ((b & 1) == 0)
5180 ot = OT_BYTE;
5181 else
5182 ot = dflag ? OT_LONG : OT_WORD;
5183 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5184 gen_op_andl_T0_ffff();
5185 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5186 gen_op_mov_TN_reg[ot][1][R_EAX]();
5187 gen_op_out[ot]();
5188 break;
5189
5190 /************************/
5191 /* control */
5192 case 0xc2: /* ret im */
5193 val = ldsw_code(s->pc);
5194 s->pc += 2;
5195 gen_pop_T0(s);
5196 if (CODE64(s) && s->dflag)
5197 s->dflag = 2;
5198 gen_stack_update(s, val + (2 << s->dflag));
5199 if (s->dflag == 0)
5200 gen_op_andl_T0_ffff();
5201 gen_op_jmp_T0();
5202 gen_eob(s);
5203 break;
5204 case 0xc3: /* ret */
5205 gen_pop_T0(s);
5206 gen_pop_update(s);
5207 if (s->dflag == 0)
5208 gen_op_andl_T0_ffff();
5209 gen_op_jmp_T0();
5210 gen_eob(s);
5211 break;
5212 case 0xca: /* lret im */
5213 val = ldsw_code(s->pc);
5214 s->pc += 2;
5215 do_lret:
5216 if (s->pe && !s->vm86) {
5217 if (s->cc_op != CC_OP_DYNAMIC)
5218 gen_op_set_cc_op(s->cc_op);
5219 gen_jmp_im(pc_start - s->cs_base);
5220 gen_op_lret_protected(s->dflag, val);
5221 } else {
5222 gen_stack_A0(s);
5223 /* pop offset */
5224 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5225 if (s->dflag == 0)
5226 gen_op_andl_T0_ffff();
5227 /* NOTE: keeping EIP updated is not a problem in case of
5228 exception */
5229 gen_op_jmp_T0();
5230 /* pop selector */
5231 gen_op_addl_A0_im(2 << s->dflag);
5232 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5233 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5234 /* add stack offset */
5235 gen_stack_update(s, val + (4 << s->dflag));
5236 }
5237 gen_eob(s);
5238 break;
5239 case 0xcb: /* lret */
5240 val = 0;
5241 goto do_lret;
5242 case 0xcf: /* iret */
5243 if (!s->pe) {
5244 /* real mode */
5245 gen_op_iret_real(s->dflag);
5246 s->cc_op = CC_OP_EFLAGS;
5247 } else if (s->vm86) {
5248#ifdef VBOX
5249 if (s->iopl != 3 && (!s->vme || s->dflag)) {
5250#else
5251 if (s->iopl != 3) {
5252#endif
5253 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5254 } else {
5255 gen_op_iret_real(s->dflag);
5256 s->cc_op = CC_OP_EFLAGS;
5257 }
5258 } else {
5259 if (s->cc_op != CC_OP_DYNAMIC)
5260 gen_op_set_cc_op(s->cc_op);
5261 gen_jmp_im(pc_start - s->cs_base);
5262 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5263 s->cc_op = CC_OP_EFLAGS;
5264 }
5265 gen_eob(s);
5266 break;
5267 case 0xe8: /* call im */
5268 {
5269 if (dflag)
5270 tval = (int32_t)insn_get(s, OT_LONG);
5271 else
5272 tval = (int16_t)insn_get(s, OT_WORD);
5273 next_eip = s->pc - s->cs_base;
5274 tval += next_eip;
5275 if (s->dflag == 0)
5276 tval &= 0xffff;
5277 gen_movtl_T0_im(next_eip);
5278 gen_push_T0(s);
5279 gen_jmp(s, tval);
5280 }
5281 break;
5282 case 0x9a: /* lcall im */
5283 {
5284 unsigned int selector, offset;
5285
5286 if (CODE64(s))
5287 goto illegal_op;
5288 ot = dflag ? OT_LONG : OT_WORD;
5289 offset = insn_get(s, ot);
5290 selector = insn_get(s, OT_WORD);
5291
5292 gen_op_movl_T0_im(selector);
5293 gen_op_movl_T1_imu(offset);
5294 }
5295 goto do_lcall;
5296 case 0xe9: /* jmp im */
5297 if (dflag)
5298 tval = (int32_t)insn_get(s, OT_LONG);
5299 else
5300 tval = (int16_t)insn_get(s, OT_WORD);
5301 tval += s->pc - s->cs_base;
5302 if (s->dflag == 0)
5303 tval &= 0xffff;
5304 gen_jmp(s, tval);
5305 break;
5306 case 0xea: /* ljmp im */
5307 {
5308 unsigned int selector, offset;
5309
5310 if (CODE64(s))
5311 goto illegal_op;
5312 ot = dflag ? OT_LONG : OT_WORD;
5313 offset = insn_get(s, ot);
5314 selector = insn_get(s, OT_WORD);
5315
5316 gen_op_movl_T0_im(selector);
5317 gen_op_movl_T1_imu(offset);
5318 }
5319 goto do_ljmp;
5320 case 0xeb: /* jmp Jb */
5321 tval = (int8_t)insn_get(s, OT_BYTE);
5322 tval += s->pc - s->cs_base;
5323 if (s->dflag == 0)
5324 tval &= 0xffff;
5325 gen_jmp(s, tval);
5326 break;
5327 case 0x70 ... 0x7f: /* jcc Jb */
5328 tval = (int8_t)insn_get(s, OT_BYTE);
5329 goto do_jcc;
5330 case 0x180 ... 0x18f: /* jcc Jv */
5331 if (dflag) {
5332 tval = (int32_t)insn_get(s, OT_LONG);
5333 } else {
5334 tval = (int16_t)insn_get(s, OT_WORD);
5335 }
5336 do_jcc:
5337 next_eip = s->pc - s->cs_base;
5338 tval += next_eip;
5339 if (s->dflag == 0)
5340 tval &= 0xffff;
5341 gen_jcc(s, b, tval, next_eip);
5342 break;
5343
5344 case 0x190 ... 0x19f: /* setcc Gv */
5345 modrm = ldub_code(s->pc++);
5346 gen_setcc(s, b);
5347 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5348 break;
5349 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5350 ot = dflag + OT_WORD;
5351 modrm = ldub_code(s->pc++);
5352 reg = ((modrm >> 3) & 7) | rex_r;
5353 mod = (modrm >> 6) & 3;
5354 gen_setcc(s, b);
5355 if (mod != 3) {
5356 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5357 gen_op_ld_T1_A0[ot + s->mem_index]();
5358 } else {
5359 rm = (modrm & 7) | REX_B(s);
5360 gen_op_mov_TN_reg[ot][1][rm]();
5361 }
5362 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5363 break;
5364
5365 /************************/
5366 /* flags */
5367 case 0x9c: /* pushf */
5368#ifdef VBOX
5369 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5370#else
5371 if (s->vm86 && s->iopl != 3) {
5372#endif
5373 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5374 } else {
5375 if (s->cc_op != CC_OP_DYNAMIC)
5376 gen_op_set_cc_op(s->cc_op);
5377#ifdef VBOX
5378 if (s->vm86 && s->vme && s->iopl != 3)
5379 gen_op_movl_T0_eflags_vme();
5380 else
5381#endif
5382 gen_op_movl_T0_eflags();
5383 gen_push_T0(s);
5384 }
5385 break;
5386 case 0x9d: /* popf */
5387#ifdef VBOX
5388 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5389#else
5390 if (s->vm86 && s->iopl != 3) {
5391#endif
5392 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5393 } else {
5394 gen_pop_T0(s);
5395 if (s->cpl == 0) {
5396 if (s->dflag) {
5397 gen_op_movl_eflags_T0_cpl0();
5398 } else {
5399 gen_op_movw_eflags_T0_cpl0();
5400 }
5401 } else {
5402 if (s->cpl <= s->iopl) {
5403 if (s->dflag) {
5404 gen_op_movl_eflags_T0_io();
5405 } else {
5406 gen_op_movw_eflags_T0_io();
5407 }
5408 } else {
5409 if (s->dflag) {
5410 gen_op_movl_eflags_T0();
5411 } else {
5412#ifdef VBOX
5413 if (s->vm86 && s->vme)
5414 gen_op_movw_eflags_T0_vme();
5415 else
5416#endif
5417 gen_op_movw_eflags_T0();
5418 }
5419 }
5420 }
5421 gen_pop_update(s);
5422 s->cc_op = CC_OP_EFLAGS;
5423 /* abort translation because TF flag may change */
5424 gen_jmp_im(s->pc - s->cs_base);
5425 gen_eob(s);
5426 }
5427 break;
5428 case 0x9e: /* sahf */
5429 if (CODE64(s))
5430 goto illegal_op;
5431 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5432 if (s->cc_op != CC_OP_DYNAMIC)
5433 gen_op_set_cc_op(s->cc_op);
5434 gen_op_movb_eflags_T0();
5435 s->cc_op = CC_OP_EFLAGS;
5436 break;
5437 case 0x9f: /* lahf */
5438 if (CODE64(s))
5439 goto illegal_op;
5440 if (s->cc_op != CC_OP_DYNAMIC)
5441 gen_op_set_cc_op(s->cc_op);
5442 gen_op_movl_T0_eflags();
5443 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5444 break;
5445 case 0xf5: /* cmc */
5446 if (s->cc_op != CC_OP_DYNAMIC)
5447 gen_op_set_cc_op(s->cc_op);
5448 gen_op_cmc();
5449 s->cc_op = CC_OP_EFLAGS;
5450 break;
5451 case 0xf8: /* clc */
5452 if (s->cc_op != CC_OP_DYNAMIC)
5453 gen_op_set_cc_op(s->cc_op);
5454 gen_op_clc();
5455 s->cc_op = CC_OP_EFLAGS;
5456 break;
5457 case 0xf9: /* stc */
5458 if (s->cc_op != CC_OP_DYNAMIC)
5459 gen_op_set_cc_op(s->cc_op);
5460 gen_op_stc();
5461 s->cc_op = CC_OP_EFLAGS;
5462 break;
5463 case 0xfc: /* cld */
5464 gen_op_cld();
5465 break;
5466 case 0xfd: /* std */
5467 gen_op_std();
5468 break;
5469
5470 /************************/
5471 /* bit operations */
5472 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5473 ot = dflag + OT_WORD;
5474 modrm = ldub_code(s->pc++);
5475 op = (modrm >> 3) & 7;
5476 mod = (modrm >> 6) & 3;
5477 rm = (modrm & 7) | REX_B(s);
5478 if (mod != 3) {
5479 s->rip_offset = 1;
5480 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5481 gen_op_ld_T0_A0[ot + s->mem_index]();
5482 } else {
5483 gen_op_mov_TN_reg[ot][0][rm]();
5484 }
5485 /* load shift */
5486 val = ldub_code(s->pc++);
5487 gen_op_movl_T1_im(val);
5488 if (op < 4)
5489 goto illegal_op;
5490 op -= 4;
5491 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5492 s->cc_op = CC_OP_SARB + ot;
5493 if (op != 0) {
5494 if (mod != 3)
5495 gen_op_st_T0_A0[ot + s->mem_index]();
5496 else
5497 gen_op_mov_reg_T0[ot][rm]();
5498 gen_op_update_bt_cc();
5499 }
5500 break;
5501 case 0x1a3: /* bt Gv, Ev */
5502 op = 0;
5503 goto do_btx;
5504 case 0x1ab: /* bts */
5505 op = 1;
5506 goto do_btx;
5507 case 0x1b3: /* btr */
5508 op = 2;
5509 goto do_btx;
5510 case 0x1bb: /* btc */
5511 op = 3;
5512 do_btx:
5513 ot = dflag + OT_WORD;
5514 modrm = ldub_code(s->pc++);
5515 reg = ((modrm >> 3) & 7) | rex_r;
5516 mod = (modrm >> 6) & 3;
5517 rm = (modrm & 7) | REX_B(s);
5518 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5519 if (mod != 3) {
5520 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5521 /* specific case: we need to add a displacement */
5522 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5523 gen_op_ld_T0_A0[ot + s->mem_index]();
5524 } else {
5525 gen_op_mov_TN_reg[ot][0][rm]();
5526 }
5527 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5528 s->cc_op = CC_OP_SARB + ot;
5529 if (op != 0) {
5530 if (mod != 3)
5531 gen_op_st_T0_A0[ot + s->mem_index]();
5532 else
5533 gen_op_mov_reg_T0[ot][rm]();
5534 gen_op_update_bt_cc();
5535 }
5536 break;
5537 case 0x1bc: /* bsf */
5538 case 0x1bd: /* bsr */
5539 ot = dflag + OT_WORD;
5540 modrm = ldub_code(s->pc++);
5541 reg = ((modrm >> 3) & 7) | rex_r;
5542 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5543 /* NOTE: in order to handle the 0 case, we must load the
5544 result. It could be optimized with a generated jump */
5545 gen_op_mov_TN_reg[ot][1][reg]();
5546 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5547 gen_op_mov_reg_T1[ot][reg]();
5548 s->cc_op = CC_OP_LOGICB + ot;
5549 break;
5550 /************************/
5551 /* bcd */
5552 case 0x27: /* daa */
5553 if (CODE64(s))
5554 goto illegal_op;
5555 if (s->cc_op != CC_OP_DYNAMIC)
5556 gen_op_set_cc_op(s->cc_op);
5557 gen_op_daa();
5558 s->cc_op = CC_OP_EFLAGS;
5559 break;
5560 case 0x2f: /* das */
5561 if (CODE64(s))
5562 goto illegal_op;
5563 if (s->cc_op != CC_OP_DYNAMIC)
5564 gen_op_set_cc_op(s->cc_op);
5565 gen_op_das();
5566 s->cc_op = CC_OP_EFLAGS;
5567 break;
5568 case 0x37: /* aaa */
5569 if (CODE64(s))
5570 goto illegal_op;
5571 if (s->cc_op != CC_OP_DYNAMIC)
5572 gen_op_set_cc_op(s->cc_op);
5573 gen_op_aaa();
5574 s->cc_op = CC_OP_EFLAGS;
5575 break;
5576 case 0x3f: /* aas */
5577 if (CODE64(s))
5578 goto illegal_op;
5579 if (s->cc_op != CC_OP_DYNAMIC)
5580 gen_op_set_cc_op(s->cc_op);
5581 gen_op_aas();
5582 s->cc_op = CC_OP_EFLAGS;
5583 break;
5584 case 0xd4: /* aam */
5585 if (CODE64(s))
5586 goto illegal_op;
5587 val = ldub_code(s->pc++);
5588 if (val == 0) {
5589 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5590 } else {
5591 gen_op_aam(val);
5592 s->cc_op = CC_OP_LOGICB;
5593 }
5594 break;
5595 case 0xd5: /* aad */
5596 if (CODE64(s))
5597 goto illegal_op;
5598 val = ldub_code(s->pc++);
5599 gen_op_aad(val);
5600 s->cc_op = CC_OP_LOGICB;
5601 break;
5602 /************************/
5603 /* misc */
5604 case 0x90: /* nop */
5605 /* XXX: xchg + rex handling */
5606 /* XXX: correct lock test for all insn */
5607 if (prefixes & PREFIX_LOCK)
5608 goto illegal_op;
5609 break;
5610 case 0x9b: /* fwait */
5611 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5612 (HF_MP_MASK | HF_TS_MASK)) {
5613 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5614 } else {
5615 if (s->cc_op != CC_OP_DYNAMIC)
5616 gen_op_set_cc_op(s->cc_op);
5617 gen_jmp_im(pc_start - s->cs_base);
5618 gen_op_fwait();
5619 }
5620 break;
5621 case 0xcc: /* int3 */
5622#ifdef VBOX
5623 if (s->vm86 && s->iopl != 3 && !s->vme) {
5624 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5625 } else
5626#endif
5627 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5628 break;
5629 case 0xcd: /* int N */
5630 val = ldub_code(s->pc++);
5631#ifdef VBOX
5632 if (s->vm86 && s->iopl != 3 && !s->vme) {
5633#else
5634 if (s->vm86 && s->iopl != 3) {
5635#endif
5636 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5637 } else {
5638 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5639 }
5640 break;
5641 case 0xce: /* into */
5642 if (CODE64(s))
5643 goto illegal_op;
5644 if (s->cc_op != CC_OP_DYNAMIC)
5645 gen_op_set_cc_op(s->cc_op);
5646 gen_jmp_im(pc_start - s->cs_base);
5647 gen_op_into(s->pc - pc_start);
5648 break;
5649 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5650#if 1
5651 gen_debug(s, pc_start - s->cs_base);
5652#else
5653 /* start debug */
5654 tb_flush(cpu_single_env);
5655 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5656#endif
5657 break;
5658 case 0xfa: /* cli */
5659 if (!s->vm86) {
5660 if (s->cpl <= s->iopl) {
5661 gen_op_cli();
5662 } else {
5663 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5664 }
5665 } else {
5666 if (s->iopl == 3) {
5667 gen_op_cli();
5668#ifdef VBOX
5669 } else if (s->iopl != 3 && s->vme) {
5670 gen_op_cli_vme();
5671#endif
5672 } else {
5673 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5674 }
5675 }
5676 break;
5677 case 0xfb: /* sti */
5678 if (!s->vm86) {
5679 if (s->cpl <= s->iopl) {
5680 gen_sti:
5681 gen_op_sti();
5682 /* interruptions are enabled only the first insn after sti */
5683 /* If several instructions disable interrupts, only the
5684 _first_ does it */
5685 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5686 gen_op_set_inhibit_irq();
5687 /* give a chance to handle pending irqs */
5688 gen_jmp_im(s->pc - s->cs_base);
5689 gen_eob(s);
5690 } else {
5691 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5692 }
5693 } else {
5694 if (s->iopl == 3) {
5695 goto gen_sti;
5696#ifdef VBOX
5697 } else if (s->iopl != 3 && s->vme) {
5698 gen_op_sti_vme();
5699 /* give a chance to handle pending irqs */
5700 gen_jmp_im(s->pc - s->cs_base);
5701 gen_eob(s);
5702#endif
5703 } else {
5704 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5705 }
5706 }
5707 break;
5708 case 0x62: /* bound */
5709 if (CODE64(s))
5710 goto illegal_op;
5711 ot = dflag ? OT_LONG : OT_WORD;
5712 modrm = ldub_code(s->pc++);
5713 reg = (modrm >> 3) & 7;
5714 mod = (modrm >> 6) & 3;
5715 if (mod == 3)
5716 goto illegal_op;
5717 gen_op_mov_TN_reg[ot][0][reg]();
5718 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5719 gen_jmp_im(pc_start - s->cs_base);
5720 if (ot == OT_WORD)
5721 gen_op_boundw();
5722 else
5723 gen_op_boundl();
5724 break;
5725 case 0x1c8 ... 0x1cf: /* bswap reg */
5726 reg = (b & 7) | REX_B(s);
5727#ifdef TARGET_X86_64
5728 if (dflag == 2) {
5729 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5730 gen_op_bswapq_T0();
5731 gen_op_mov_reg_T0[OT_QUAD][reg]();
5732 } else
5733#endif
5734 {
5735 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5736 gen_op_bswapl_T0();
5737 gen_op_mov_reg_T0[OT_LONG][reg]();
5738 }
5739 break;
5740 case 0xd6: /* salc */
5741 if (CODE64(s))
5742 goto illegal_op;
5743 if (s->cc_op != CC_OP_DYNAMIC)
5744 gen_op_set_cc_op(s->cc_op);
5745 gen_op_salc();
5746 break;
5747 case 0xe0: /* loopnz */
5748 case 0xe1: /* loopz */
5749 if (s->cc_op != CC_OP_DYNAMIC)
5750 gen_op_set_cc_op(s->cc_op);
5751 /* FALL THRU */
5752 case 0xe2: /* loop */
5753 case 0xe3: /* jecxz */
5754 {
5755 int l1, l2;
5756
5757 tval = (int8_t)insn_get(s, OT_BYTE);
5758 next_eip = s->pc - s->cs_base;
5759 tval += next_eip;
5760 if (s->dflag == 0)
5761 tval &= 0xffff;
5762
5763 l1 = gen_new_label();
5764 l2 = gen_new_label();
5765 b &= 3;
5766 if (b == 3) {
5767 gen_op_jz_ecx[s->aflag](l1);
5768 } else {
5769 gen_op_dec_ECX[s->aflag]();
5770 if (b <= 1)
5771 gen_op_mov_T0_cc();
5772 gen_op_loop[s->aflag][b](l1);
5773 }
5774
5775 gen_jmp_im(next_eip);
5776 gen_op_jmp_label(l2);
5777 gen_set_label(l1);
5778 gen_jmp_im(tval);
5779 gen_set_label(l2);
5780 gen_eob(s);
5781 }
5782 break;
5783 case 0x130: /* wrmsr */
5784 case 0x132: /* rdmsr */
5785 if (s->cpl != 0) {
5786 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5787 } else {
5788 if (b & 2)
5789 gen_op_rdmsr();
5790 else
5791 gen_op_wrmsr();
5792 }
5793 break;
5794 case 0x131: /* rdtsc */
5795 gen_jmp_im(pc_start - s->cs_base);
5796 gen_op_rdtsc();
5797 break;
5798 case 0x134: /* sysenter */
5799 if (CODE64(s))
5800 goto illegal_op;
5801 if (!s->pe) {
5802 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5803 } else {
5804 if (s->cc_op != CC_OP_DYNAMIC) {
5805 gen_op_set_cc_op(s->cc_op);
5806 s->cc_op = CC_OP_DYNAMIC;
5807 }
5808 gen_jmp_im(pc_start - s->cs_base);
5809 gen_op_sysenter();
5810 gen_eob(s);
5811 }
5812 break;
5813 case 0x135: /* sysexit */
5814 if (CODE64(s))
5815 goto illegal_op;
5816 if (!s->pe) {
5817 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5818 } else {
5819 if (s->cc_op != CC_OP_DYNAMIC) {
5820 gen_op_set_cc_op(s->cc_op);
5821 s->cc_op = CC_OP_DYNAMIC;
5822 }
5823 gen_jmp_im(pc_start - s->cs_base);
5824 gen_op_sysexit();
5825 gen_eob(s);
5826 }
5827 break;
5828#ifdef TARGET_X86_64
5829 case 0x105: /* syscall */
5830 /* XXX: is it usable in real mode ? */
5831 if (s->cc_op != CC_OP_DYNAMIC) {
5832 gen_op_set_cc_op(s->cc_op);
5833 s->cc_op = CC_OP_DYNAMIC;
5834 }
5835 gen_jmp_im(pc_start - s->cs_base);
5836 gen_op_syscall(s->pc - pc_start);
5837 gen_eob(s);
5838 break;
5839 case 0x107: /* sysret */
5840 if (!s->pe) {
5841 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5842 } else {
5843 if (s->cc_op != CC_OP_DYNAMIC) {
5844 gen_op_set_cc_op(s->cc_op);
5845 s->cc_op = CC_OP_DYNAMIC;
5846 }
5847 gen_jmp_im(pc_start - s->cs_base);
5848 gen_op_sysret(s->dflag);
5849 /* condition codes are modified only in long mode */
5850 if (s->lma)
5851 s->cc_op = CC_OP_EFLAGS;
5852 gen_eob(s);
5853 }
5854 break;
5855#endif
5856 case 0x1a2: /* cpuid */
5857 gen_op_cpuid();
5858 break;
5859 case 0xf4: /* hlt */
5860 if (s->cpl != 0) {
5861 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5862 } else {
5863 if (s->cc_op != CC_OP_DYNAMIC)
5864 gen_op_set_cc_op(s->cc_op);
5865 gen_jmp_im(s->pc - s->cs_base);
5866 gen_op_hlt();
5867 s->is_jmp = 3;
5868 }
5869 break;
5870 case 0x100:
5871 modrm = ldub_code(s->pc++);
5872 mod = (modrm >> 6) & 3;
5873 op = (modrm >> 3) & 7;
5874 switch(op) {
5875 case 0: /* sldt */
5876 if (!s->pe || s->vm86)
5877 goto illegal_op;
5878 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5879 ot = OT_WORD;
5880 if (mod == 3)
5881 ot += s->dflag;
5882 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5883 break;
5884 case 2: /* lldt */
5885 if (!s->pe || s->vm86)
5886 goto illegal_op;
5887 if (s->cpl != 0) {
5888 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5889 } else {
5890 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5891 gen_jmp_im(pc_start - s->cs_base);
5892 gen_op_lldt_T0();
5893 }
5894 break;
5895 case 1: /* str */
5896 if (!s->pe || s->vm86)
5897 goto illegal_op;
5898 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5899 ot = OT_WORD;
5900 if (mod == 3)
5901 ot += s->dflag;
5902 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5903 break;
5904 case 3: /* ltr */
5905 if (!s->pe || s->vm86)
5906 goto illegal_op;
5907 if (s->cpl != 0) {
5908 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5909 } else {
5910 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5911 gen_jmp_im(pc_start - s->cs_base);
5912 gen_op_ltr_T0();
5913 }
5914 break;
5915 case 4: /* verr */
5916 case 5: /* verw */
5917 if (!s->pe || s->vm86)
5918 goto illegal_op;
5919 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5920 if (s->cc_op != CC_OP_DYNAMIC)
5921 gen_op_set_cc_op(s->cc_op);
5922 if (op == 4)
5923 gen_op_verr();
5924 else
5925 gen_op_verw();
5926 s->cc_op = CC_OP_EFLAGS;
5927 break;
5928 default:
5929 goto illegal_op;
5930 }
5931 break;
5932 case 0x101:
5933 modrm = ldub_code(s->pc++);
5934 mod = (modrm >> 6) & 3;
5935 op = (modrm >> 3) & 7;
5936 rm = modrm & 7;
5937 switch(op) {
5938 case 0: /* sgdt */
5939 if (mod == 3)
5940 goto illegal_op;
5941 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5942 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5943 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5944 gen_add_A0_im(s, 2);
5945 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5946 if (!s->dflag)
5947 gen_op_andl_T0_im(0xffffff);
5948 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5949 break;
5950 case 1:
5951 if (mod == 3) {
5952 switch (rm) {
5953 case 0: /* monitor */
5954 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5955 s->cpl != 0)
5956 goto illegal_op;
5957 gen_jmp_im(pc_start - s->cs_base);
5958#ifdef TARGET_X86_64
5959 if (s->aflag == 2) {
5960 gen_op_movq_A0_reg[R_EBX]();
5961 gen_op_addq_A0_AL();
5962 } else
5963#endif
5964 {
5965 gen_op_movl_A0_reg[R_EBX]();
5966 gen_op_addl_A0_AL();
5967 if (s->aflag == 0)
5968 gen_op_andl_A0_ffff();
5969 }
5970 gen_add_A0_ds_seg(s);
5971 gen_op_monitor();
5972 break;
5973 case 1: /* mwait */
5974 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5975 s->cpl != 0)
5976 goto illegal_op;
5977 if (s->cc_op != CC_OP_DYNAMIC) {
5978 gen_op_set_cc_op(s->cc_op);
5979 s->cc_op = CC_OP_DYNAMIC;
5980 }
5981 gen_jmp_im(s->pc - s->cs_base);
5982 gen_op_mwait();
5983 gen_eob(s);
5984 break;
5985 default:
5986 goto illegal_op;
5987 }
5988 } else { /* sidt */
5989 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5990 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5991 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5992 gen_add_A0_im(s, 2);
5993 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5994 if (!s->dflag)
5995 gen_op_andl_T0_im(0xffffff);
5996 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5997 }
5998 break;
5999 case 2: /* lgdt */
6000 case 3: /* lidt */
6001 if (mod == 3)
6002 goto illegal_op;
6003 if (s->cpl != 0) {
6004 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6005 } else {
6006 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6007 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
6008 gen_add_A0_im(s, 2);
6009 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
6010 if (!s->dflag)
6011 gen_op_andl_T0_im(0xffffff);
6012 if (op == 2) {
6013 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6014 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6015 } else {
6016 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6017 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6018 }
6019 }
6020 break;
6021 case 4: /* smsw */
6022 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6023 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6024 break;
6025 case 6: /* lmsw */
6026 if (s->cpl != 0) {
6027 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6028 } else {
6029 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6030 gen_op_lmsw_T0();
6031 gen_jmp_im(s->pc - s->cs_base);
6032 gen_eob(s);
6033 }
6034 break;
6035 case 7: /* invlpg */
6036 if (s->cpl != 0) {
6037 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6038 } else {
6039 if (mod == 3) {
6040#ifdef TARGET_X86_64
6041 if (CODE64(s) && rm == 0) {
6042 /* swapgs */
6043 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6044 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6045 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6046 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6047 } else
6048#endif
6049 {
6050 goto illegal_op;
6051 }
6052 } else {
6053 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6054 gen_op_invlpg_A0();
6055 gen_jmp_im(s->pc - s->cs_base);
6056 gen_eob(s);
6057 }
6058 }
6059 break;
6060 default:
6061 goto illegal_op;
6062 }
6063 break;
6064 case 0x108: /* invd */
6065 case 0x109: /* wbinvd */
6066 if (s->cpl != 0) {
6067 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6068 } else {
6069 /* nothing to do */
6070 }
6071 break;
6072 case 0x63: /* arpl or movslS (x86_64) */
6073#ifdef TARGET_X86_64
6074 if (CODE64(s)) {
6075 int d_ot;
6076 /* d_ot is the size of destination */
6077 d_ot = dflag + OT_WORD;
6078
6079 modrm = ldub_code(s->pc++);
6080 reg = ((modrm >> 3) & 7) | rex_r;
6081 mod = (modrm >> 6) & 3;
6082 rm = (modrm & 7) | REX_B(s);
6083
6084 if (mod == 3) {
6085 gen_op_mov_TN_reg[OT_LONG][0][rm]();
6086 /* sign extend */
6087 if (d_ot == OT_QUAD)
6088 gen_op_movslq_T0_T0();
6089 gen_op_mov_reg_T0[d_ot][reg]();
6090 } else {
6091 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6092 if (d_ot == OT_QUAD) {
6093 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
6094 } else {
6095 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6096 }
6097 gen_op_mov_reg_T0[d_ot][reg]();
6098 }
6099 } else
6100#endif
6101 {
6102 if (!s->pe || s->vm86)
6103 goto illegal_op;
6104 ot = dflag ? OT_LONG : OT_WORD;
6105 modrm = ldub_code(s->pc++);
6106 reg = (modrm >> 3) & 7;
6107 mod = (modrm >> 6) & 3;
6108 rm = modrm & 7;
6109#ifdef VBOX /* Fix for obvious bug - T1 needs to be loaded */
6110 gen_op_mov_TN_reg[ot][1][reg]();
6111#endif
6112 if (mod != 3) {
6113 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6114 gen_op_ld_T0_A0[ot + s->mem_index]();
6115 } else {
6116 gen_op_mov_TN_reg[ot][0][rm]();
6117 }
6118 if (s->cc_op != CC_OP_DYNAMIC)
6119 gen_op_set_cc_op(s->cc_op);
6120 gen_op_arpl();
6121 s->cc_op = CC_OP_EFLAGS;
6122 if (mod != 3) {
6123 gen_op_st_T0_A0[ot + s->mem_index]();
6124 } else {
6125 gen_op_mov_reg_T0[ot][rm]();
6126 }
6127 gen_op_arpl_update();
6128 }
6129 break;
6130 case 0x102: /* lar */
6131 case 0x103: /* lsl */
6132 if (!s->pe || s->vm86)
6133 goto illegal_op;
6134 ot = dflag ? OT_LONG : OT_WORD;
6135 modrm = ldub_code(s->pc++);
6136 reg = ((modrm >> 3) & 7) | rex_r;
6137 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6138 gen_op_mov_TN_reg[ot][1][reg]();
6139 if (s->cc_op != CC_OP_DYNAMIC)
6140 gen_op_set_cc_op(s->cc_op);
6141 if (b == 0x102)
6142 gen_op_lar();
6143 else
6144 gen_op_lsl();
6145 s->cc_op = CC_OP_EFLAGS;
6146 gen_op_mov_reg_T1[ot][reg]();
6147 break;
6148 case 0x118:
6149 modrm = ldub_code(s->pc++);
6150 mod = (modrm >> 6) & 3;
6151 op = (modrm >> 3) & 7;
6152 switch(op) {
6153 case 0: /* prefetchnta */
6154 case 1: /* prefetchnt0 */
6155 case 2: /* prefetchnt0 */
6156 case 3: /* prefetchnt0 */
6157 if (mod == 3)
6158 goto illegal_op;
6159 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6160 /* nothing more to do */
6161 break;
6162 default: /* nop (multi byte) */
6163 gen_nop_modrm(s, modrm);
6164 break;
6165 }
6166 break;
6167 case 0x119 ... 0x11f: /* nop (multi byte) */
6168 modrm = ldub_code(s->pc++);
6169 gen_nop_modrm(s, modrm);
6170 break;
6171 case 0x120: /* mov reg, crN */
6172 case 0x122: /* mov crN, reg */
6173 if (s->cpl != 0) {
6174 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6175 } else {
6176 modrm = ldub_code(s->pc++);
6177 if ((modrm & 0xc0) != 0xc0)
6178 goto illegal_op;
6179 rm = (modrm & 7) | REX_B(s);
6180 reg = ((modrm >> 3) & 7) | rex_r;
6181 if (CODE64(s))
6182 ot = OT_QUAD;
6183 else
6184 ot = OT_LONG;
6185 switch(reg) {
6186 case 0:
6187 case 2:
6188 case 3:
6189 case 4:
6190 case 8:
6191 if (b & 2) {
6192 gen_op_mov_TN_reg[ot][0][rm]();
6193 gen_op_movl_crN_T0(reg);
6194 gen_jmp_im(s->pc - s->cs_base);
6195 gen_eob(s);
6196 } else {
6197#if !defined(CONFIG_USER_ONLY)
6198 if (reg == 8)
6199 gen_op_movtl_T0_cr8();
6200 else
6201#endif
6202 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6203 gen_op_mov_reg_T0[ot][rm]();
6204 }
6205 break;
6206 default:
6207 goto illegal_op;
6208 }
6209 }
6210 break;
6211 case 0x121: /* mov reg, drN */
6212 case 0x123: /* mov drN, reg */
6213 if (s->cpl != 0) {
6214 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6215 } else {
6216 modrm = ldub_code(s->pc++);
6217 if ((modrm & 0xc0) != 0xc0)
6218 goto illegal_op;
6219 rm = (modrm & 7) | REX_B(s);
6220 reg = ((modrm >> 3) & 7) | rex_r;
6221 if (CODE64(s))
6222 ot = OT_QUAD;
6223 else
6224 ot = OT_LONG;
6225 /* XXX: do it dynamically with CR4.DE bit */
6226 if (reg == 4 || reg == 5 || reg >= 8)
6227 goto illegal_op;
6228 if (b & 2) {
6229 gen_op_mov_TN_reg[ot][0][rm]();
6230 gen_op_movl_drN_T0(reg);
6231 gen_jmp_im(s->pc - s->cs_base);
6232 gen_eob(s);
6233 } else {
6234 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6235 gen_op_mov_reg_T0[ot][rm]();
6236 }
6237 }
6238 break;
6239 case 0x106: /* clts */
6240 if (s->cpl != 0) {
6241 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6242 } else {
6243 gen_op_clts();
6244 /* abort block because static cpu state changed */
6245 gen_jmp_im(s->pc - s->cs_base);
6246 gen_eob(s);
6247 }
6248 break;
6249 /* MMX/SSE/SSE2/PNI support */
6250 case 0x1c3: /* MOVNTI reg, mem */
6251 if (!(s->cpuid_features & CPUID_SSE2))
6252 goto illegal_op;
6253 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6254 modrm = ldub_code(s->pc++);
6255 mod = (modrm >> 6) & 3;
6256 if (mod == 3)
6257 goto illegal_op;
6258 reg = ((modrm >> 3) & 7) | rex_r;
6259 /* generate a generic store */
6260 gen_ldst_modrm(s, modrm, ot, reg, 1);
6261 break;
6262 case 0x1ae:
6263 modrm = ldub_code(s->pc++);
6264 mod = (modrm >> 6) & 3;
6265 op = (modrm >> 3) & 7;
6266 switch(op) {
6267 case 0: /* fxsave */
6268 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6269 (s->flags & HF_EM_MASK))
6270 goto illegal_op;
6271 if (s->flags & HF_TS_MASK) {
6272 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6273 break;
6274 }
6275 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6276 gen_op_fxsave_A0((s->dflag == 2));
6277 break;
6278 case 1: /* fxrstor */
6279 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6280 (s->flags & HF_EM_MASK))
6281 goto illegal_op;
6282 if (s->flags & HF_TS_MASK) {
6283 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6284 break;
6285 }
6286 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6287 gen_op_fxrstor_A0((s->dflag == 2));
6288 break;
6289 case 2: /* ldmxcsr */
6290 case 3: /* stmxcsr */
6291 if (s->flags & HF_TS_MASK) {
6292 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6293 break;
6294 }
6295 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6296 mod == 3)
6297 goto illegal_op;
6298 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6299 if (op == 2) {
6300 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6301 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6302 } else {
6303 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6304 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6305 }
6306 break;
6307 case 5: /* lfence */
6308 case 6: /* mfence */
6309 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6310 goto illegal_op;
6311 break;
6312 case 7: /* sfence / clflush */
6313 if ((modrm & 0xc7) == 0xc0) {
6314 /* sfence */
6315 if (!(s->cpuid_features & CPUID_SSE))
6316 goto illegal_op;
6317 } else {
6318 /* clflush */
6319 if (!(s->cpuid_features & CPUID_CLFLUSH))
6320 goto illegal_op;
6321 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6322 }
6323 break;
6324 default:
6325 goto illegal_op;
6326 }
6327 break;
6328 case 0x10d: /* prefetch */
6329 modrm = ldub_code(s->pc++);
6330 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6331 /* ignore for now */
6332 break;
6333 case 0x1aa: /* rsm */
6334 if (!(s->flags & HF_SMM_MASK))
6335 goto illegal_op;
6336 if (s->cc_op != CC_OP_DYNAMIC) {
6337 gen_op_set_cc_op(s->cc_op);
6338 s->cc_op = CC_OP_DYNAMIC;
6339 }
6340 gen_jmp_im(s->pc - s->cs_base);
6341 gen_op_rsm();
6342 gen_eob(s);
6343 break;
6344 case 0x110 ... 0x117:
6345 case 0x128 ... 0x12f:
6346 case 0x150 ... 0x177:
6347 case 0x17c ... 0x17f:
6348 case 0x1c2:
6349 case 0x1c4 ... 0x1c6:
6350 case 0x1d0 ... 0x1fe:
6351 gen_sse(s, b, pc_start, rex_r);
6352 break;
6353 default:
6354 goto illegal_op;
6355 }
6356 /* lock generation */
6357 if (s->prefix & PREFIX_LOCK)
6358 gen_op_unlock();
6359 return s->pc;
6360 illegal_op:
6361 if (s->prefix & PREFIX_LOCK)
6362 gen_op_unlock();
6363 /* XXX: ensure that no lock was generated */
6364 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6365 return s->pc;
6366}
6367
6368#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6369#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6370
6371/* flags read by an operation */
6372static uint16_t opc_read_flags[NB_OPS] = {
6373 [INDEX_op_aas] = CC_A,
6374 [INDEX_op_aaa] = CC_A,
6375 [INDEX_op_das] = CC_A | CC_C,
6376 [INDEX_op_daa] = CC_A | CC_C,
6377
6378 /* subtle: due to the incl/decl implementation, C is used */
6379 [INDEX_op_update_inc_cc] = CC_C,
6380
6381 [INDEX_op_into] = CC_O,
6382
6383 [INDEX_op_jb_subb] = CC_C,
6384 [INDEX_op_jb_subw] = CC_C,
6385 [INDEX_op_jb_subl] = CC_C,
6386
6387 [INDEX_op_jz_subb] = CC_Z,
6388 [INDEX_op_jz_subw] = CC_Z,
6389 [INDEX_op_jz_subl] = CC_Z,
6390
6391 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6392 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6393 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6394
6395 [INDEX_op_js_subb] = CC_S,
6396 [INDEX_op_js_subw] = CC_S,
6397 [INDEX_op_js_subl] = CC_S,
6398
6399 [INDEX_op_jl_subb] = CC_O | CC_S,
6400 [INDEX_op_jl_subw] = CC_O | CC_S,
6401 [INDEX_op_jl_subl] = CC_O | CC_S,
6402
6403 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6404 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6405 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6406
6407 [INDEX_op_loopnzw] = CC_Z,
6408 [INDEX_op_loopnzl] = CC_Z,
6409 [INDEX_op_loopzw] = CC_Z,
6410 [INDEX_op_loopzl] = CC_Z,
6411
6412 [INDEX_op_seto_T0_cc] = CC_O,
6413 [INDEX_op_setb_T0_cc] = CC_C,
6414 [INDEX_op_setz_T0_cc] = CC_Z,
6415 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6416 [INDEX_op_sets_T0_cc] = CC_S,
6417 [INDEX_op_setp_T0_cc] = CC_P,
6418 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6419 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6420
6421 [INDEX_op_setb_T0_subb] = CC_C,
6422 [INDEX_op_setb_T0_subw] = CC_C,
6423 [INDEX_op_setb_T0_subl] = CC_C,
6424
6425 [INDEX_op_setz_T0_subb] = CC_Z,
6426 [INDEX_op_setz_T0_subw] = CC_Z,
6427 [INDEX_op_setz_T0_subl] = CC_Z,
6428
6429 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6430 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6431 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6432
6433 [INDEX_op_sets_T0_subb] = CC_S,
6434 [INDEX_op_sets_T0_subw] = CC_S,
6435 [INDEX_op_sets_T0_subl] = CC_S,
6436
6437 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6438 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6439 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6440
6441 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6442 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6443 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6444
6445 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6446 [INDEX_op_cmc] = CC_C,
6447 [INDEX_op_salc] = CC_C,
6448
6449 /* needed for correct flag optimisation before string ops */
6450 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6451 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6452 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6453 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6454
6455#ifdef TARGET_X86_64
6456 [INDEX_op_jb_subq] = CC_C,
6457 [INDEX_op_jz_subq] = CC_Z,
6458 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6459 [INDEX_op_js_subq] = CC_S,
6460 [INDEX_op_jl_subq] = CC_O | CC_S,
6461 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6462
6463 [INDEX_op_loopnzq] = CC_Z,
6464 [INDEX_op_loopzq] = CC_Z,
6465
6466 [INDEX_op_setb_T0_subq] = CC_C,
6467 [INDEX_op_setz_T0_subq] = CC_Z,
6468 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6469 [INDEX_op_sets_T0_subq] = CC_S,
6470 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6471 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6472
6473 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6474 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6475#endif
6476
6477#define DEF_READF(SUFFIX)\
6478 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6479 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6480 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6481 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6482 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6483 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6484 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6485 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6486\
6487 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6488 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6489 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6490 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6491 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6492 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6493 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6494 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6495
6496 DEF_READF( )
6497 DEF_READF(_raw)
6498#ifndef CONFIG_USER_ONLY
6499 DEF_READF(_kernel)
6500 DEF_READF(_user)
6501#endif
6502};
6503
6504/* flags written by an operation */
6505static uint16_t opc_write_flags[NB_OPS] = {
6506 [INDEX_op_update2_cc] = CC_OSZAPC,
6507 [INDEX_op_update1_cc] = CC_OSZAPC,
6508 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6509 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6510 /* subtle: due to the incl/decl implementation, C is used */
6511 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6512 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6513
6514 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6515 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6516 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6517 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6518 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6519 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6520 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6521 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6522 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6523 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6524 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6525
6526 /* sse */
6527 [INDEX_op_ucomiss] = CC_OSZAPC,
6528 [INDEX_op_ucomisd] = CC_OSZAPC,
6529 [INDEX_op_comiss] = CC_OSZAPC,
6530 [INDEX_op_comisd] = CC_OSZAPC,
6531
6532 /* bcd */
6533 [INDEX_op_aam] = CC_OSZAPC,
6534 [INDEX_op_aad] = CC_OSZAPC,
6535 [INDEX_op_aas] = CC_OSZAPC,
6536 [INDEX_op_aaa] = CC_OSZAPC,
6537 [INDEX_op_das] = CC_OSZAPC,
6538 [INDEX_op_daa] = CC_OSZAPC,
6539
6540 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6541 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6542 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6543 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6544 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6545 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6546 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6547 [INDEX_op_clc] = CC_C,
6548 [INDEX_op_stc] = CC_C,
6549 [INDEX_op_cmc] = CC_C,
6550
6551 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6552 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6553 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6554 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6555 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6556 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6557 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6558 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6559 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6560 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6561 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6562 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6563
6564 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6565 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6566 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6567 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6568 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6569 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6570
6571 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6572 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6573 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6574 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6575
6576 [INDEX_op_cmpxchg8b] = CC_Z,
6577 [INDEX_op_lar] = CC_Z,
6578 [INDEX_op_lsl] = CC_Z,
6579 [INDEX_op_verr] = CC_Z,
6580 [INDEX_op_verw] = CC_Z,
6581 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6582 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6583
6584#define DEF_WRITEF(SUFFIX)\
6585 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6586 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6587 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6588 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6589 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6590 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6591 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6592 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6593\
6594 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6595 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6596 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6597 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6598 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6599 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6600 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6601 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6602\
6603 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6604 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6605 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6606 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6607 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6608 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6609 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6610 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6611\
6612 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6613 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6614 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6615 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6616\
6617 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6618 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6619 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6620 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6621\
6622 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6623 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6624 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6625 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6626\
6627 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6628 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6629 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6630 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6631 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6632 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6633\
6634 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6635 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6636 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6637 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6638 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6639 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6640\
6641 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6642 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6643 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6644 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6645
6646
6647 DEF_WRITEF( )
6648 DEF_WRITEF(_raw)
6649#ifndef CONFIG_USER_ONLY
6650 DEF_WRITEF(_kernel)
6651 DEF_WRITEF(_user)
6652#endif
6653};
6654
6655/* simpler form of an operation if no flags need to be generated */
6656static uint16_t opc_simpler[NB_OPS] = {
6657 [INDEX_op_update2_cc] = INDEX_op_nop,
6658 [INDEX_op_update1_cc] = INDEX_op_nop,
6659 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6660#if 0
6661 /* broken: CC_OP logic must be rewritten */
6662 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6663#endif
6664
6665 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6666 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6667 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6668 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6669
6670 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6671 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6672 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6673 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6674
6675 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6676 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6677 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6678 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6679
6680#define DEF_SIMPLER(SUFFIX)\
6681 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6682 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6683 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6684 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6685\
6686 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6687 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6688 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6689 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6690
6691 DEF_SIMPLER( )
6692 DEF_SIMPLER(_raw)
6693#ifndef CONFIG_USER_ONLY
6694 DEF_SIMPLER(_kernel)
6695 DEF_SIMPLER(_user)
6696#endif
6697};
6698
6699void optimize_flags_init(void)
6700{
6701 int i;
6702 /* put default values in arrays */
6703 for(i = 0; i < NB_OPS; i++) {
6704 if (opc_simpler[i] == 0)
6705 opc_simpler[i] = i;
6706 }
6707}
6708
6709/* CPU flags computation optimization: we move backward thru the
6710 generated code to see which flags are needed. The operation is
6711 modified if suitable */
6712static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6713{
6714 uint16_t *opc_ptr;
6715 int live_flags, write_flags, op;
6716
6717 opc_ptr = opc_buf + opc_buf_len;
6718 /* live_flags contains the flags needed by the next instructions
6719 in the code. At the end of the bloc, we consider that all the
6720 flags are live. */
6721 live_flags = CC_OSZAPC;
6722 while (opc_ptr > opc_buf) {
6723 op = *--opc_ptr;
6724 /* if none of the flags written by the instruction is used,
6725 then we can try to find a simpler instruction */
6726 write_flags = opc_write_flags[op];
6727 if ((live_flags & write_flags) == 0) {
6728 *opc_ptr = opc_simpler[op];
6729 }
6730 /* compute the live flags before the instruction */
6731 live_flags &= ~write_flags;
6732 live_flags |= opc_read_flags[op];
6733 }
6734}
6735
6736/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6737 basic block 'tb'. If search_pc is TRUE, also generate PC
6738 information for each intermediate instruction. */
6739static inline int gen_intermediate_code_internal(CPUState *env,
6740 TranslationBlock *tb,
6741 int search_pc)
6742{
6743 DisasContext dc1, *dc = &dc1;
6744 target_ulong pc_ptr;
6745 uint16_t *gen_opc_end;
6746 int flags, j, lj, cflags;
6747 target_ulong pc_start;
6748 target_ulong cs_base;
6749
6750 /* generate intermediate code */
6751 pc_start = tb->pc;
6752 cs_base = tb->cs_base;
6753 flags = tb->flags;
6754 cflags = tb->cflags;
6755
6756 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6757 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6758 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6759 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6760 dc->f_st = 0;
6761 dc->vm86 = (flags >> VM_SHIFT) & 1;
6762#ifdef VBOX_WITH_CALL_RECORD
6763 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
6764 if ( !(env->state & CPU_RAW_RING0)
6765 && (env->cr[0] & CR0_PG_MASK)
6766 && !(env->eflags & X86_EFL_IF)
6767 && dc->code32)
6768 dc->record_call = 1;
6769 else
6770 dc->record_call = 0;
6771#endif
6772 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6773 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6774 dc->tf = (flags >> TF_SHIFT) & 1;
6775 dc->singlestep_enabled = env->singlestep_enabled;
6776 dc->cc_op = CC_OP_DYNAMIC;
6777 dc->cs_base = cs_base;
6778 dc->tb = tb;
6779 dc->popl_esp_hack = 0;
6780 /* select memory access functions */
6781 dc->mem_index = 0;
6782 if (flags & HF_SOFTMMU_MASK) {
6783 if (dc->cpl == 3)
6784 dc->mem_index = 2 * 4;
6785 else
6786 dc->mem_index = 1 * 4;
6787 }
6788 dc->cpuid_features = env->cpuid_features;
6789 dc->cpuid_ext_features = env->cpuid_ext_features;
6790#ifdef TARGET_X86_64
6791 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6792 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6793#endif
6794 dc->flags = flags;
6795 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6796 (flags & HF_INHIBIT_IRQ_MASK)
6797#ifndef CONFIG_SOFTMMU
6798 || (flags & HF_SOFTMMU_MASK)
6799#endif
6800 );
6801#if 0
6802 /* check addseg logic */
6803 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6804 printf("ERROR addseg\n");
6805#endif
6806
6807 gen_opc_ptr = gen_opc_buf;
6808 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6809 gen_opparam_ptr = gen_opparam_buf;
6810 nb_gen_labels = 0;
6811
6812 dc->is_jmp = DISAS_NEXT;
6813 pc_ptr = pc_start;
6814 lj = -1;
6815
6816 for(;;) {
6817 if (env->nb_breakpoints > 0) {
6818 for(j = 0; j < env->nb_breakpoints; j++) {
6819 if (env->breakpoints[j] == pc_ptr) {
6820 gen_debug(dc, pc_ptr - dc->cs_base);
6821 break;
6822 }
6823 }
6824 }
6825 if (search_pc) {
6826 j = gen_opc_ptr - gen_opc_buf;
6827 if (lj < j) {
6828 lj++;
6829 while (lj < j)
6830 gen_opc_instr_start[lj++] = 0;
6831 }
6832 gen_opc_pc[lj] = pc_ptr;
6833 gen_opc_cc_op[lj] = dc->cc_op;
6834 gen_opc_instr_start[lj] = 1;
6835 }
6836 pc_ptr = disas_insn(dc, pc_ptr);
6837 /* stop translation if indicated */
6838 if (dc->is_jmp)
6839 break;
6840
6841#ifdef VBOX
6842#ifdef DEBUG
6843/*
6844 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6845 {
6846 //should never happen as the jump to the patch code terminates the translation block
6847 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6848 }
6849*/
6850#endif
6851 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6852 {
6853 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6854 gen_jmp_im(pc_ptr - dc->cs_base);
6855 gen_eob(dc);
6856 break;
6857 }
6858#endif /* VBOX */
6859
6860 /* if single step mode, we generate only one instruction and
6861 generate an exception */
6862 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6863 the flag and abort the translation to give the irqs a
6864 change to be happen */
6865 if (dc->tf || dc->singlestep_enabled ||
6866 (flags & HF_INHIBIT_IRQ_MASK) ||
6867 (cflags & CF_SINGLE_INSN)) {
6868 gen_jmp_im(pc_ptr - dc->cs_base);
6869 gen_eob(dc);
6870 break;
6871 }
6872 /* if too long translation, stop generation too */
6873 if (gen_opc_ptr >= gen_opc_end ||
6874 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6875 gen_jmp_im(pc_ptr - dc->cs_base);
6876 gen_eob(dc);
6877 break;
6878 }
6879 }
6880 *gen_opc_ptr = INDEX_op_end;
6881 /* we don't forget to fill the last values */
6882 if (search_pc) {
6883 j = gen_opc_ptr - gen_opc_buf;
6884 lj++;
6885 while (lj <= j)
6886 gen_opc_instr_start[lj++] = 0;
6887 }
6888
6889#ifdef DEBUG_DISAS
6890 if (loglevel & CPU_LOG_TB_CPU) {
6891 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6892 }
6893 if (loglevel & CPU_LOG_TB_IN_ASM) {
6894 int disas_flags;
6895 fprintf(logfile, "----------------\n");
6896 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6897#ifdef TARGET_X86_64
6898 if (dc->code64)
6899 disas_flags = 2;
6900 else
6901#endif
6902 disas_flags = !dc->code32;
6903 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6904 fprintf(logfile, "\n");
6905 if (loglevel & CPU_LOG_TB_OP) {
6906 fprintf(logfile, "OP:\n");
6907 dump_ops(gen_opc_buf, gen_opparam_buf);
6908 fprintf(logfile, "\n");
6909 }
6910 }
6911#endif
6912
6913 /* optimize flag computations */
6914 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6915
6916#ifdef DEBUG_DISAS
6917 if (loglevel & CPU_LOG_TB_OP_OPT) {
6918 fprintf(logfile, "AFTER FLAGS OPT:\n");
6919 dump_ops(gen_opc_buf, gen_opparam_buf);
6920 fprintf(logfile, "\n");
6921 }
6922#endif
6923 if (!search_pc)
6924 tb->size = pc_ptr - pc_start;
6925 return 0;
6926}
6927
6928int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6929{
6930 return gen_intermediate_code_internal(env, tb, 0);
6931}
6932
6933int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6934{
6935 return gen_intermediate_code_internal(env, tb, 1);
6936}
6937
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette